diff --git a/.github/workflows/test_bazarr_execution.yml b/.github/workflows/test_bazarr_execution.yml index cca016553e..0272c6ebf5 100644 --- a/.github/workflows/test_bazarr_execution.yml +++ b/.github/workflows/test_bazarr_execution.yml @@ -13,7 +13,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ '3.8', '3.9', '3.10', '3.11', '3.12', '3.13', '3.14' ] + python-version: [ '3.12', '3.13', '3.14' ] name: Python ${{ matrix.python-version }} test steps: diff --git a/bazarr/api/subtitles/batch.py b/bazarr/api/subtitles/batch.py index b40377b3a8..f0f0615ff5 100644 --- a/bazarr/api/subtitles/batch.py +++ b/bazarr/api/subtitles/batch.py @@ -19,6 +19,7 @@ 'remove_tags': 'Removing Style Tags', 'fix_uppercase': 'Fixing Uppercase', 'reverse_rtl': 'Reversing RTL', + 'emoji': 'Removing Emoji', 'scan-disk': 'Scanning Disk', 'search-missing': 'Searching Missing Subtitles', 'upgrade': 'Upgrading Subtitles', @@ -60,7 +61,6 @@ class BatchOperation(Resource): }) @authenticate - @api_ns_batch.doc(body=post_request_model) @api_ns_batch.response(200, 'Success', post_response_model) @api_ns_batch.response(400, 'Bad Request') @api_ns_batch.response(401, 'Not Authenticated') @@ -159,7 +159,7 @@ def get_upgradable_media_ids(): TableHistoryMovie.timestamp > minimum_timestamp, or_( and_(TableHistoryMovie.score.is_(None), TableHistoryMovie.action == 6), - TableHistoryMovie.score < 117 + TableHistoryMovie.score < TableHistoryMovie.score_out_of - 3 ) )) ).all() @@ -188,7 +188,7 @@ def get_upgradable_media_ids(): TableHistory.timestamp > minimum_timestamp, or_( and_(TableHistory.score.is_(None), TableHistory.action == 6), - TableHistory.score < 357 + TableHistory.score < TableHistory.score_out_of - 3 ) )) ).all() diff --git a/bazarr/app/check_update.py b/bazarr/app/check_update.py index 63a83c78c6..c02872bb71 100644 --- a/bazarr/app/check_update.py +++ b/bazarr/app/check_update.py @@ -29,7 +29,7 @@ def deprecated_python_version(): # return True if Python version is deprecated - return sys.version_info.major == 2 or (sys.version_info.major == 3 and sys.version_info.minor < 8) + return sys.version_info.major == 2 or (sys.version_info.major == 3 and sys.version_info.minor < 10) def _fetch_repo_releases(repo, label=None): diff --git a/bazarr/app/get_providers.py b/bazarr/app/get_providers.py index 8d74f9e1cd..b794bc6739 100644 --- a/bazarr/app/get_providers.py +++ b/bazarr/app/get_providers.py @@ -3,7 +3,6 @@ import os import json import datetime -import pytz import logging import subliminal_patch import pretty @@ -13,6 +12,7 @@ import traceback import re +from zoneinfo import ZoneInfo from requests import ConnectionError from subzero.language import Language from subliminal_patch.exceptions import (TooManyRequests, APIThrottled, ParseResponseError, IPAddressBlocked, @@ -45,13 +45,13 @@ def time_until_midnight(timezone) -> datetime.timedelta: # Titulky resets its download limits at the start of a new day from its perspective - the Europe/Prague timezone # Needs to convert to offset-naive dt def titulky_limit_reset_timedelta(): - return time_until_midnight(timezone=pytz.timezone('Europe/Prague')) + return time_until_midnight(timezone=datetime.datetime.now(ZoneInfo('Europe/Prague'))) # LegendasDivx reset its searches limit at approximately midnight, Lisbon time, every day. We wait 1 more hours just # to be sure. def legendasdivx_limit_reset_timedelta(): - return time_until_midnight(timezone=pytz.timezone('Europe/Lisbon')) + datetime.timedelta(minutes=60) + return time_until_midnight(timezone=datetime.datetime.now(ZoneInfo('Europe/Lisbon'))) + datetime.timedelta(minutes=60) VALID_THROTTLE_EXCEPTIONS = (TooManyRequests, DownloadLimitExceeded, ServiceUnavailable, APIThrottled, diff --git a/bazarr/app/logger.py b/bazarr/app/logger.py index a5b998da1d..13422b18e0 100644 --- a/bazarr/app/logger.py +++ b/bazarr/app/logger.py @@ -9,7 +9,6 @@ from logging.handlers import TimedRotatingFileHandler from utilities.central import get_log_file_path -from pytz_deprecation_shim import PytzUsageWarning from .config import settings @@ -129,8 +128,6 @@ def filter(self, record): def configure_logging(debug=False): warnings.simplefilter('ignore', category=ResourceWarning) - warnings.simplefilter('ignore', category=PytzUsageWarning) - # warnings.simplefilter('ignore', category=SAWarning) if debug: log_level = logging.DEBUG diff --git a/bazarr/subtitles/mass_operations.py b/bazarr/subtitles/mass_operations.py index 3ec79beafe..d5dda58d85 100644 --- a/bazarr/subtitles/mass_operations.py +++ b/bazarr/subtitles/mass_operations.py @@ -22,12 +22,13 @@ VALID_ACTIONS = { 'sync', 'translate', 'OCR_fixes', 'common', 'remove_HI', - 'remove_tags', 'fix_uppercase', 'reverse_rtl', 'scan-disk', 'search-missing', 'upgrade', + 'remove_tags', 'fix_uppercase', 'reverse_rtl', 'emoji', + 'scan-disk', 'search-missing', 'upgrade', } MEDIA_ACTIONS = {'scan-disk', 'search-missing', 'upgrade'} -MOD_ACTIONS = {'OCR_fixes', 'common', 'remove_HI', 'remove_tags', 'fix_uppercase', 'reverse_rtl'} +MOD_ACTIONS = {'OCR_fixes', 'common', 'remove_HI', 'remove_tags', 'fix_uppercase', 'reverse_rtl', 'emoji'} def _parse_subtitles_column(subtitles_raw): diff --git a/custom_libs/subliminal/extensions.py b/custom_libs/subliminal/extensions.py index 61843ed410..f3b630b359 100644 --- a/custom_libs/subliminal/extensions.py +++ b/custom_libs/subliminal/extensions.py @@ -64,7 +64,7 @@ def register(self, entry_point): if ep.name in self.names(): raise ValueError('An extension with the same name already exist') - ext = self._load_one_plugin(ep, False, (), {}, False) + ext = self._load_one_plugin(ep, False, (), {}) self.extensions.append(ext) if self._extensions_by_name is not None: self._extensions_by_name[ext.name] = ext diff --git a/frontend/src/assets/_bazarr.scss b/frontend/src/assets/_bazarr.scss index d31f51aca8..a6e8ca052c 100644 --- a/frontend/src/assets/_bazarr.scss +++ b/frontend/src/assets/_bazarr.scss @@ -75,7 +75,8 @@ body::after { inset: 0; z-index: 9999; pointer-events: none; - background: radial-gradient( + background: + radial-gradient( ellipse at 15% 10%, rgba(230, 138, 0, 0.12) 0%, transparent 50% diff --git a/frontend/src/components/bazarr/LanguageSelector.tsx b/frontend/src/components/bazarr/LanguageSelector.tsx index 8954403bd2..2395098514 100644 --- a/frontend/src/components/bazarr/LanguageSelector.tsx +++ b/frontend/src/components/bazarr/LanguageSelector.tsx @@ -3,8 +3,10 @@ import { useLanguages } from "@/apis/hooks"; import { Selector, SelectorProps } from "@/components/inputs"; import { useSelectorOptions } from "@/utilities"; -interface LanguageSelectorProps - extends Omit, "options" | "getkey"> { +interface LanguageSelectorProps extends Omit< + SelectorProps, + "options" | "getkey" +> { enabled?: boolean; } diff --git a/frontend/src/types/react-table.d.ts b/frontend/src/types/react-table.d.ts index c05bcd9fd1..775d06537f 100644 --- a/frontend/src/types/react-table.d.ts +++ b/frontend/src/types/react-table.d.ts @@ -42,11 +42,13 @@ declare module "react-table" { } // eslint-disable-next-line @typescript-eslint/no-empty-interface - interface CustomTableProps> - extends useSelectionProps {} + interface CustomTableProps< + D extends Record, + > extends useSelectionProps {} export interface TableOptions> - extends UseExpandedOptions, + extends + UseExpandedOptions, // UseFiltersOptions, // UseGlobalFiltersOptions, UseGroupByOptions, @@ -61,14 +63,18 @@ declare module "react-table" { export interface Hooks< D extends Record = Record, - > extends UseExpandedHooks, + > + extends + UseExpandedHooks, UseGroupByHooks, UseRowSelectHooks, UseSortByHooks {} export interface TableInstance< D extends Record = Record, - > extends UseColumnOrderInstanceProps, + > + extends + UseColumnOrderInstanceProps, UseExpandedInstanceProps, // UseFiltersInstanceProps, // UseGlobalFiltersInstanceProps, @@ -81,7 +87,9 @@ declare module "react-table" { export interface TableState< D extends Record = Record, - > extends UseColumnOrderState, + > + extends + UseColumnOrderState, UseExpandedState, // UseFiltersState, // UseGlobalFiltersState, @@ -94,7 +102,9 @@ declare module "react-table" { export interface ColumnInterface< D extends Record = Record, - > extends UseFiltersColumnOptions, + > + extends + UseFiltersColumnOptions, // UseGlobalFiltersColumnOptions, UseGroupByColumnOptions, // UseResizeColumnsColumnOptions, @@ -102,7 +112,9 @@ declare module "react-table" { export interface ColumnInstance< D extends Record = Record, - > extends UseFiltersColumnProps, + > + extends + UseFiltersColumnProps, UseGroupByColumnProps, // UseResizeColumnsColumnProps, UseSortByColumnProps {} @@ -114,7 +126,9 @@ declare module "react-table" { export interface Row< D extends Record = Record, - > extends UseExpandedRowProps, + > + extends + UseExpandedRowProps, UseGroupByRowProps, UseRowSelectRowProps {} } diff --git a/frontend/src/utilities/routers.tsx b/frontend/src/utilities/routers.tsx index e6164ed9b6..834d88854e 100644 --- a/frontend/src/utilities/routers.tsx +++ b/frontend/src/utilities/routers.tsx @@ -18,13 +18,17 @@ export function usePrompt( const handleStay = useCallback(() => { modals.closeAll(); - blocker.reset?.(); + if (blocker.state === "blocked") { + blocker.reset?.(); + } requestAnimationFrame(() => previousFocus.current?.focus()); }, [blocker]); const handleDiscard = useCallback(() => { modals.closeAll(); - blocker.proceed?.(); + if (blocker.state === "blocked") { + blocker.proceed?.(); + } }, [blocker]); const handleSaveAndLeave = useCallback(async () => { @@ -32,7 +36,9 @@ export function usePrompt( await onSaveAndLeave(); } modals.closeAll(); - blocker.proceed?.(); + if (blocker.state === "blocked") { + blocker.proceed?.(); + } }, [blocker, onSaveAndLeave]); useEffect(() => { diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts index b68ac4670c..ee6f35b559 100644 --- a/frontend/vite.config.ts +++ b/frontend/vite.config.ts @@ -133,6 +133,8 @@ export default defineConfig(({ mode, command }) => { globals: true, environment: "jsdom", setupFiles: "./src/tests/setup.tsx", + testTimeout: 20000, + pool: "forks", }, server: { proxy: { diff --git a/libs/APScheduler-3.10.4.dist-info/METADATA b/libs/APScheduler-3.10.4.dist-info/METADATA deleted file mode 100644 index 62df97e735..0000000000 --- a/libs/APScheduler-3.10.4.dist-info/METADATA +++ /dev/null @@ -1,138 +0,0 @@ -Metadata-Version: 2.1 -Name: APScheduler -Version: 3.10.4 -Summary: In-process task scheduler with Cron-like capabilities -Home-page: https://github.com/agronholm/apscheduler -Author: Alex Grönholm -Author-email: apscheduler@nextday.fi -License: MIT -Keywords: scheduling cron -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Requires-Python: >=3.6 -License-File: LICENSE.txt -Requires-Dist: six >=1.4.0 -Requires-Dist: pytz -Requires-Dist: tzlocal !=3.*,>=2.0 -Requires-Dist: importlib-metadata >=3.6.0 ; python_version < "3.8" -Provides-Extra: doc -Requires-Dist: sphinx ; extra == 'doc' -Requires-Dist: sphinx-rtd-theme ; extra == 'doc' -Provides-Extra: gevent -Requires-Dist: gevent ; extra == 'gevent' -Provides-Extra: mongodb -Requires-Dist: pymongo >=3.0 ; extra == 'mongodb' -Provides-Extra: redis -Requires-Dist: redis >=3.0 ; extra == 'redis' -Provides-Extra: rethinkdb -Requires-Dist: rethinkdb >=2.4.0 ; extra == 'rethinkdb' -Provides-Extra: sqlalchemy -Requires-Dist: sqlalchemy >=1.4 ; extra == 'sqlalchemy' -Provides-Extra: testing -Requires-Dist: pytest ; extra == 'testing' -Requires-Dist: pytest-asyncio ; extra == 'testing' -Requires-Dist: pytest-cov ; extra == 'testing' -Requires-Dist: pytest-tornado5 ; extra == 'testing' -Provides-Extra: tornado -Requires-Dist: tornado >=4.3 ; extra == 'tornado' -Provides-Extra: twisted -Requires-Dist: twisted ; extra == 'twisted' -Provides-Extra: zookeeper -Requires-Dist: kazoo ; extra == 'zookeeper' - -.. image:: https://github.com/agronholm/apscheduler/workflows/Python%20codeqa/test/badge.svg?branch=3.x - :target: https://github.com/agronholm/apscheduler/actions?query=workflow%3A%22Python+codeqa%2Ftest%22+branch%3A3.x - :alt: Build Status -.. image:: https://coveralls.io/repos/github/agronholm/apscheduler/badge.svg?branch=3.x - :target: https://coveralls.io/github/agronholm/apscheduler?branch=3.x - :alt: Code Coverage -.. image:: https://readthedocs.org/projects/apscheduler/badge/?version=3.x - :target: https://apscheduler.readthedocs.io/en/master/?badge=3.x - :alt: Documentation - -Advanced Python Scheduler (APScheduler) is a Python library that lets you schedule your Python code -to be executed later, either just once or periodically. You can add new jobs or remove old ones on -the fly as you please. If you store your jobs in a database, they will also survive scheduler -restarts and maintain their state. When the scheduler is restarted, it will then run all the jobs -it should have run while it was offline [#f1]_. - -Among other things, APScheduler can be used as a cross-platform, application specific replacement -to platform specific schedulers, such as the cron daemon or the Windows task scheduler. Please -note, however, that APScheduler is **not** a daemon or service itself, nor does it come with any -command line tools. It is primarily meant to be run inside existing applications. That said, -APScheduler does provide some building blocks for you to build a scheduler service or to run a -dedicated scheduler process. - -APScheduler has three built-in scheduling systems you can use: - -* Cron-style scheduling (with optional start/end times) -* Interval-based execution (runs jobs on even intervals, with optional start/end times) -* One-off delayed execution (runs jobs once, on a set date/time) - -You can mix and match scheduling systems and the backends where the jobs are stored any way you -like. Supported backends for storing jobs include: - -* Memory -* `SQLAlchemy `_ (any RDBMS supported by SQLAlchemy works) -* `MongoDB `_ -* `Redis `_ -* `RethinkDB `_ -* `ZooKeeper `_ - -APScheduler also integrates with several common Python frameworks, like: - -* `asyncio `_ (:pep:`3156`) -* `gevent `_ -* `Tornado `_ -* `Twisted `_ -* `Qt `_ (using either - `PyQt `_ , - `PySide6 `_ , - `PySide2 `_ or - `PySide `_) - -There are third party solutions for integrating APScheduler with other frameworks: - -* `Django `_ -* `Flask `_ - - -.. [#f1] The cutoff period for this is also configurable. - - -Documentation -------------- - -Documentation can be found `here `_. - - -Source ------- - -The source can be browsed at `Github `_. - - -Reporting bugs --------------- - -A `bug tracker `_ is provided by Github. - - -Getting help ------------- - -If you have problems or other questions, you can either: - -* Ask in the `apscheduler `_ room on Gitter -* Ask on the `APScheduler GitHub discussion forum `_, or -* Ask on `StackOverflow `_ and tag your - question with the ``apscheduler`` tag diff --git a/libs/APScheduler-3.10.4.dist-info/RECORD b/libs/APScheduler-3.10.4.dist-info/RECORD deleted file mode 100644 index 6a44be9239..0000000000 --- a/libs/APScheduler-3.10.4.dist-info/RECORD +++ /dev/null @@ -1,46 +0,0 @@ -APScheduler-3.10.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -APScheduler-3.10.4.dist-info/LICENSE.txt,sha256=YWP3mH37ONa8MgzitwsvArhivEESZRbVUu8c1DJH51g,1130 -APScheduler-3.10.4.dist-info/METADATA,sha256=ITYjDYv8SBO2ynuPiXmySCDJPjfvrFElLJoKQr58h8U,5695 -APScheduler-3.10.4.dist-info/RECORD,, -APScheduler-3.10.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -APScheduler-3.10.4.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92 -APScheduler-3.10.4.dist-info/entry_points.txt,sha256=KMxTUp2QykDNL6w-WBU5xrk8ebroCPEBN0eZtyL3x2w,1147 -APScheduler-3.10.4.dist-info/top_level.txt,sha256=O3oMCWxG-AHkecUoO6Ze7-yYjWrttL95uHO8-RFdYvE,12 -apscheduler/__init__.py,sha256=c_KXMg1QziacYqUpDuzLY5g1mcEZvBLq1dJY7NjLoKc,452 -apscheduler/events.py,sha256=KRMTDQUS6d2uVnrQvPoz3ZPV5V9XKsCAZLsgx913FFo,3593 -apscheduler/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -apscheduler/executors/asyncio.py,sha256=9m4wvRHSSYplllxAQyxWkPVcFdyFG5aZbHt5nfWKIAc,1859 -apscheduler/executors/base.py,sha256=hogiMc_t-huw6BMod0HEeY2FhRNmAAUyNNuBHvIX31M,5336 -apscheduler/executors/base_py3.py,sha256=8WOpTeX1NA-spdbEQ1oJMh5T2O_t2UdsaSnAh-iEWe0,1831 -apscheduler/executors/debug.py,sha256=15_ogSBzl8RRCfBYDnkIV2uMH8cLk1KImYmBa_NVGpc,573 -apscheduler/executors/gevent.py,sha256=aulrNmoefyBgrOkH9awRhFiXIDnSCnZ4U0o0_JXIXgc,777 -apscheduler/executors/pool.py,sha256=h4cYgKMRhjpNHmkhlogHLbmT4O_q6HePXVLmiJIHC3c,2484 -apscheduler/executors/tornado.py,sha256=DU75VaQ9R6nBuy8lbPUvDKUgsuJcZqwAvURC5vg3r6w,1780 -apscheduler/executors/twisted.py,sha256=bRoU0C4BoVcS6_BjKD5wfUs0IJpGkmLsRAcMH2rJJss,778 -apscheduler/job.py,sha256=JCRERBpfWLuomPiNNHX-jrluEwfHkdscEmz4i0Y8rao,11216 -apscheduler/jobstores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -apscheduler/jobstores/base.py,sha256=DXzSW9XscueHZHMvy1qFiG-vYqUl_MMv0n0uBSZWXGo,4523 -apscheduler/jobstores/memory.py,sha256=ZxWiKsqfsCHFvac-6X9BztuhnuSxlOYi1dhT6g-pjQo,3655 -apscheduler/jobstores/mongodb.py,sha256=r9t2neNuzfPuf_omDm0KdkLGPZXLksiH-U3j13MIBlM,5347 -apscheduler/jobstores/redis.py,sha256=kjQDIzPXz-Yq976U9HK3aMkcCI_QRLKgTADQWKewtik,5483 -apscheduler/jobstores/rethinkdb.py,sha256=k1rSLYJqejuhQxJY3pXwHAQYcpZ1QFJsoQ8n0oEu5MM,5863 -apscheduler/jobstores/sqlalchemy.py,sha256=LIA9iSGMvuPTVqGHdztgQs4YFmYN1xqXvpJauYNK470,6529 -apscheduler/jobstores/zookeeper.py,sha256=avGLXaJGjHD0F7uG6rLJ2gg_TXNqXDEM4PqOu56f-Xg,6363 -apscheduler/schedulers/__init__.py,sha256=jM63xA_K7GSToBenhsz-SCcqfhk1pdEVb6ajwoO5Kqg,406 -apscheduler/schedulers/asyncio.py,sha256=iJO6QUo1oW16giOU_nW8WMu2b9NTWT4Tg2gY586G08w,1994 -apscheduler/schedulers/background.py,sha256=751p-f5Di6pY4x6UXlZggpxQ5k2ObJ_Q5wSeWmKHS8o,1566 -apscheduler/schedulers/base.py,sha256=hCchDyhEXCoVmCfGgD3QMrKumYYLAUwY4456tQrukAY,43780 -apscheduler/schedulers/blocking.py,sha256=8nubfJ4PoUnAkEY6WRQG4COzG4SxGyW9PjuVPhDAbsk,985 -apscheduler/schedulers/gevent.py,sha256=csPBvV75FGcboXXsdex6fCD7J54QgBddYNdWj62ZO9g,1031 -apscheduler/schedulers/qt.py,sha256=jy58cP5roWOv68ytg8fiwtxMVnZKw7a8tkCHbLWeUs8,1329 -apscheduler/schedulers/tornado.py,sha256=D9Vaq3Ee9EFiXa1jDy9tedI048gR_YT_LAFUWqO_uEw,1926 -apscheduler/schedulers/twisted.py,sha256=D5EBjjMRtMBxy0_aAURcULAI8Ky2IvCTr9tK9sO1rYk,1844 -apscheduler/triggers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -apscheduler/triggers/base.py,sha256=BvBJdOnIeVClXPXeInzYK25cN64jAc4a9IiEQucSiVk,1355 -apscheduler/triggers/combining.py,sha256=klaSoBp1kyrPX5D3gBpNTlsGKjks5QeKPW5JN_MVs30,3449 -apscheduler/triggers/cron/__init__.py,sha256=D39BQ63qWyk6XZcSuWth46ELQ3VIFpYjUHh7Kj65Z9M,9251 -apscheduler/triggers/cron/expressions.py,sha256=hu1kq0mKvivIw7U0D0Nnrbuk3q01dCuhZ7SHRPw6qhI,9184 -apscheduler/triggers/cron/fields.py,sha256=NWPClh1NgSOpTlJ3sm1TXM_ViC2qJGKWkd_vg0xsw7o,3510 -apscheduler/triggers/date.py,sha256=RrfB1PNO9G9e91p1BOf-y_TseVHQQR-KJPhNdPpAHcU,1705 -apscheduler/triggers/interval.py,sha256=ABjcZFaGYAAgdAaUQIuLr9_dLszIifu88qaXrJmdxQ4,4377 -apscheduler/util.py,sha256=aCLu_v8-c7rpY6sD7EKgxH2zYjZARiBdqKFZktaxO68,13260 diff --git a/libs/APScheduler-3.10.4.dist-info/WHEEL b/libs/APScheduler-3.10.4.dist-info/WHEEL deleted file mode 100644 index ba48cbcf92..0000000000 --- a/libs/APScheduler-3.10.4.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.3) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/libs/APScheduler-3.10.4.dist-info/entry_points.txt b/libs/APScheduler-3.10.4.dist-info/entry_points.txt deleted file mode 100644 index 0adfe3ead0..0000000000 --- a/libs/APScheduler-3.10.4.dist-info/entry_points.txt +++ /dev/null @@ -1,23 +0,0 @@ -[apscheduler.executors] -asyncio = apscheduler.executors.asyncio:AsyncIOExecutor [asyncio] -debug = apscheduler.executors.debug:DebugExecutor -gevent = apscheduler.executors.gevent:GeventExecutor [gevent] -processpool = apscheduler.executors.pool:ProcessPoolExecutor -threadpool = apscheduler.executors.pool:ThreadPoolExecutor -tornado = apscheduler.executors.tornado:TornadoExecutor [tornado] -twisted = apscheduler.executors.twisted:TwistedExecutor [twisted] - -[apscheduler.jobstores] -memory = apscheduler.jobstores.memory:MemoryJobStore -mongodb = apscheduler.jobstores.mongodb:MongoDBJobStore [mongodb] -redis = apscheduler.jobstores.redis:RedisJobStore [redis] -rethinkdb = apscheduler.jobstores.rethinkdb:RethinkDBJobStore [rethinkdb] -sqlalchemy = apscheduler.jobstores.sqlalchemy:SQLAlchemyJobStore [sqlalchemy] -zookeeper = apscheduler.jobstores.zookeeper:ZooKeeperJobStore [zookeeper] - -[apscheduler.triggers] -and = apscheduler.triggers.combining:AndTrigger -cron = apscheduler.triggers.cron:CronTrigger -date = apscheduler.triggers.date:DateTrigger -interval = apscheduler.triggers.interval:IntervalTrigger -or = apscheduler.triggers.combining:OrTrigger diff --git a/libs/Flask_Cors-5.0.0.dist-info/LICENSE b/libs/Flask_Cors-5.0.0.dist-info/LICENSE deleted file mode 100644 index 46d932f8d8..0000000000 --- a/libs/Flask_Cors-5.0.0.dist-info/LICENSE +++ /dev/null @@ -1,7 +0,0 @@ -Copyright (C) 2016 Cory Dolphin, Olin College - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/libs/Flask_Cors-5.0.0.dist-info/METADATA b/libs/Flask_Cors-5.0.0.dist-info/METADATA deleted file mode 100644 index 99902fe9d6..0000000000 --- a/libs/Flask_Cors-5.0.0.dist-info/METADATA +++ /dev/null @@ -1,148 +0,0 @@ -Metadata-Version: 2.1 -Name: Flask-Cors -Version: 5.0.0 -Summary: A Flask extension adding a decorator for CORS support -Home-page: https://github.com/corydolphin/flask-cors -Author: Cory Dolphin -Author-email: corydolphin@gmail.com -License: MIT -Platform: any -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules -License-File: LICENSE -Requires-Dist: Flask >=0.9 - -Flask-CORS -========== - -|Build Status| |Latest Version| |Supported Python versions| -|License| - -A Flask extension for handling Cross Origin Resource Sharing (CORS), making cross-origin AJAX possible. - -This package has a simple philosophy: when you want to enable CORS, you wish to enable it for all use cases on a domain. -This means no mucking around with different allowed headers, methods, etc. - -By default, submission of cookies across domains is disabled due to the security implications. -Please see the documentation for how to enable credential'ed requests, and please make sure you add some sort of `CSRF `__ protection before doing so! - -Installation ------------- - -Install the extension with using pip, or easy\_install. - -.. code:: bash - - $ pip install -U flask-cors - -Usage ------ - -This package exposes a Flask extension which by default enables CORS support on all routes, for all origins and methods. -It allows parameterization of all CORS headers on a per-resource level. -The package also contains a decorator, for those who prefer this approach. - -Simple Usage -~~~~~~~~~~~~ - -In the simplest case, initialize the Flask-Cors extension with default arguments in order to allow CORS for all domains on all routes. -See the full list of options in the `documentation `__. - -.. code:: python - - - from flask import Flask - from flask_cors import CORS - - app = Flask(__name__) - CORS(app) - - @app.route("/") - def helloWorld(): - return "Hello, cross-origin-world!" - -Resource specific CORS -^^^^^^^^^^^^^^^^^^^^^^ - -Alternatively, you can specify CORS options on a resource and origin level of granularity by passing a dictionary as the `resources` option, mapping paths to a set of options. -See the full list of options in the `documentation `__. - -.. code:: python - - app = Flask(__name__) - cors = CORS(app, resources={r"/api/*": {"origins": "*"}}) - - @app.route("/api/v1/users") - def list_users(): - return "user example" - -Route specific CORS via decorator -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -This extension also exposes a simple decorator to decorate flask routes with. -Simply add ``@cross_origin()`` below a call to Flask's ``@app.route(..)`` to allow CORS on a given route. -See the full list of options in the `decorator documentation `__. - -.. code:: python - - @app.route("/") - @cross_origin() - def helloWorld(): - return "Hello, cross-origin-world!" - -Documentation -------------- - -For a full list of options, please see the full `documentation `__ - -Troubleshooting ---------------- - -If things aren't working as you expect, enable logging to help understand what is going on under the hood, and why. - -.. code:: python - - logging.getLogger('flask_cors').level = logging.DEBUG - - -Tests ------ - -A simple set of tests is included in ``test/``. -To run, install nose, and simply invoke ``nosetests`` or ``python setup.py test`` to exercise the tests. - -If nosetests does not work for you, due to it no longer working with newer python versions. -You can use pytest to run the tests instead. - -Contributing ------------- - -Questions, comments or improvements? -Please create an issue on `Github `__, tweet at `@corydolphin `__ or send me an email. -I do my best to include every contribution proposed in any way that I can. - -Credits -------- - -This Flask extension is based upon the `Decorator for the HTTP Access Control `__ written by Armin Ronacher. - -.. |Build Status| image:: https://github.com/corydolphin/flask-cors/actions/workflows/unittests.yaml/badge.svg - :target: https://travis-ci.org/corydolphin/flask-cors -.. |Latest Version| image:: https://img.shields.io/pypi/v/Flask-Cors.svg - :target: https://pypi.python.org/pypi/Flask-Cors/ -.. |Supported Python versions| image:: https://img.shields.io/pypi/pyversions/Flask-Cors.svg - :target: https://img.shields.io/pypi/pyversions/Flask-Cors.svg -.. |License| image:: http://img.shields.io/:license-mit-blue.svg - :target: https://pypi.python.org/pypi/Flask-Cors/ diff --git a/libs/Flask_Cors-5.0.0.dist-info/RECORD b/libs/Flask_Cors-5.0.0.dist-info/RECORD deleted file mode 100644 index 5e942ce533..0000000000 --- a/libs/Flask_Cors-5.0.0.dist-info/RECORD +++ /dev/null @@ -1,12 +0,0 @@ -Flask_Cors-5.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Flask_Cors-5.0.0.dist-info/LICENSE,sha256=bhob3FSDTB4HQMvOXV9vLK4chG_Sp_SCsRZJWU-vvV0,1069 -Flask_Cors-5.0.0.dist-info/METADATA,sha256=V2L_s849dFlZXsOhcgXVqv5Slj_JKSVuiiuRgDOft5s,5474 -Flask_Cors-5.0.0.dist-info/RECORD,, -Flask_Cors-5.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -Flask_Cors-5.0.0.dist-info/WHEEL,sha256=P2T-6epvtXQ2cBOE_U1K4_noqlJFN3tj15djMgEu4NM,110 -Flask_Cors-5.0.0.dist-info/top_level.txt,sha256=aWye_0QNZPp_QtPF4ZluLHqnyVLT9CPJsfiGhwqkWuo,11 -flask_cors/__init__.py,sha256=wZDCvPTHspA2g1VV7KyKN7R-uCdBnirTlsCzgPDcQtI,792 -flask_cors/core.py,sha256=y76xxLasWTdV_3ka19IxpdJPOgROBZQZ5L8t20IjqRA,14252 -flask_cors/decorator.py,sha256=BeJsyX1wYhVKWN04FAhb6z8YqffiRr7wKqwzHPap4bw,5009 -flask_cors/extension.py,sha256=gzv6zWUwSDYlGHBWzMuTI_hoQ7gQmp9DlcAcrKTVHdw,8602 -flask_cors/version.py,sha256=JzYPYpvaglqIJRGCDrh5-hYmXI0ISrDDed0V1QQZAGU,22 diff --git a/libs/Flask_Cors-5.0.0.dist-info/WHEEL b/libs/Flask_Cors-5.0.0.dist-info/WHEEL deleted file mode 100644 index f31e450fda..0000000000 --- a/libs/Flask_Cors-5.0.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.3) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/libs/Flask_Migrate-4.1.0.dist-info/METADATA b/libs/Flask_Migrate-4.1.0.dist-info/METADATA index ef8ede367d..0b89e51cd3 100644 --- a/libs/Flask_Migrate-4.1.0.dist-info/METADATA +++ b/libs/Flask_Migrate-4.1.0.dist-info/METADATA @@ -1,4 +1,4 @@ -Metadata-Version: 2.1 +Metadata-Version: 2.4 Name: Flask-Migrate Version: 4.1.0 Summary: SQLAlchemy database migrations for Flask applications using Alembic. @@ -14,15 +14,16 @@ Classifier: Operating System :: OS Independent Requires-Python: >=3.6 Description-Content-Type: text/markdown License-File: LICENSE -Requires-Dist: Flask >=0.9 -Requires-Dist: Flask-SQLAlchemy >=1.0 -Requires-Dist: alembic >=1.9.0 +Requires-Dist: Flask>=0.9 +Requires-Dist: Flask-SQLAlchemy>=1.0 +Requires-Dist: alembic>=1.9.0 Provides-Extra: dev -Requires-Dist: tox ; extra == 'dev' -Requires-Dist: flake8 ; extra == 'dev' -Requires-Dist: pytest ; extra == 'dev' +Requires-Dist: tox; extra == "dev" +Requires-Dist: flake8; extra == "dev" +Requires-Dist: pytest; extra == "dev" Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: sphinx; extra == "docs" +Dynamic: license-file Flask-Migrate ============= diff --git a/libs/Flask_Migrate-4.1.0.dist-info/RECORD b/libs/Flask_Migrate-4.1.0.dist-info/RECORD index d634fa09ab..3514c51736 100644 --- a/libs/Flask_Migrate-4.1.0.dist-info/RECORD +++ b/libs/Flask_Migrate-4.1.0.dist-info/RECORD @@ -1,10 +1,10 @@ -Flask_Migrate-4.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Flask_Migrate-4.1.0.dist-info/LICENSE,sha256=kfkXGlJQvKy3Y__6tAJ8ynIp1HQfeROXhL8jZU1d-DI,1082 -Flask_Migrate-4.1.0.dist-info/METADATA,sha256=Oc_YNcJGhss0camLTDR64sz2RuLXAppze2rvHDzS8_0,3296 -Flask_Migrate-4.1.0.dist-info/RECORD,, -Flask_Migrate-4.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -Flask_Migrate-4.1.0.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91 -Flask_Migrate-4.1.0.dist-info/top_level.txt,sha256=jLoPgiMG6oR4ugNteXn3IHskVVIyIXVStZOVq-AWLdU,14 +flask_migrate-4.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +flask_migrate-4.1.0.dist-info/METADATA,sha256=1bA492meS2k4iqK2vBX3qIbYKl4uAi7lsOztLMAStHw,3311 +flask_migrate-4.1.0.dist-info/RECORD,, +flask_migrate-4.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +flask_migrate-4.1.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92 +flask_migrate-4.1.0.dist-info/licenses/LICENSE,sha256=kfkXGlJQvKy3Y__6tAJ8ynIp1HQfeROXhL8jZU1d-DI,1082 +flask_migrate-4.1.0.dist-info/top_level.txt,sha256=jLoPgiMG6oR4ugNteXn3IHskVVIyIXVStZOVq-AWLdU,14 flask_migrate/__init__.py,sha256=JMySGA55Y8Gxy3HviWu7qq5rPUNQBWc2NID2OicpDyw,10082 flask_migrate/cli.py,sha256=IxrxBSC82S5sPfWac8Qg83_FVsRvqTYtCG7HRyMW8RU,11097 flask_migrate/templates/aioflask-multidb/README,sha256=Ek4cJqTaxneVjtkue--BXMlfpfp3MmJRjqoZvnSizww,43 diff --git a/libs/Flask_Migrate-4.1.0.dist-info/WHEEL b/libs/Flask_Migrate-4.1.0.dist-info/WHEEL index 9b78c44519..0885d05555 100644 --- a/libs/Flask_Migrate-4.1.0.dist-info/WHEEL +++ b/libs/Flask_Migrate-4.1.0.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: setuptools (75.3.0) +Generator: setuptools (80.10.2) Root-Is-Purelib: true Tag: py3-none-any diff --git a/libs/Flask_Migrate-4.1.0.dist-info/LICENSE b/libs/Flask_Migrate-4.1.0.dist-info/licenses/LICENSE similarity index 100% rename from libs/Flask_Migrate-4.1.0.dist-info/LICENSE rename to libs/Flask_Migrate-4.1.0.dist-info/licenses/LICENSE diff --git a/libs/Flask_SocketIO-5.5.1.dist-info/METADATA b/libs/Flask_SocketIO-5.5.1.dist-info/METADATA deleted file mode 100644 index 8676b6140c..0000000000 --- a/libs/Flask_SocketIO-5.5.1.dist-info/METADATA +++ /dev/null @@ -1,76 +0,0 @@ -Metadata-Version: 2.1 -Name: Flask-SocketIO -Version: 5.5.1 -Summary: Socket.IO integration for Flask applications -Author-email: Miguel Grinberg -Project-URL: Homepage, https://github.com/miguelgrinberg/flask-socketio -Project-URL: Bug Tracker, https://github.com/miguelgrinberg/flask-socketio/issues -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python :: 3 -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Requires-Python: >=3.6 -Description-Content-Type: text/markdown -License-File: LICENSE -Requires-Dist: Flask >=0.9 -Requires-Dist: python-socketio >=5.12.0 -Provides-Extra: docs -Requires-Dist: sphinx ; extra == 'docs' - -Flask-SocketIO -============== - -[![Build status](https://github.com/miguelgrinberg/flask-socketio/workflows/build/badge.svg)](https://github.com/miguelgrinberg/Flask-SocketIO/actions) [![codecov](https://codecov.io/gh/miguelgrinberg/flask-socketio/branch/main/graph/badge.svg)](https://codecov.io/gh/miguelgrinberg/flask-socketio) - -Socket.IO integration for Flask applications. - -Sponsors --------- - -The following organizations are funding this project: - -![Socket.IO](https://images.opencollective.com/socketio/050e5eb/logo/64.png)
[Socket.IO](https://socket.io) | [Add your company here!](https://github.com/sponsors/miguelgrinberg)| --|- - -Many individual sponsors also support this project through small ongoing contributions. Why not [join them](https://github.com/sponsors/miguelgrinberg)? - -Installation ------------- - -You can install this package as usual with pip: - - pip install flask-socketio - -Example -------- - -```py -from flask import Flask, render_template -from flask_socketio import SocketIO, emit - -app = Flask(__name__) -app.config['SECRET_KEY'] = 'secret!' -socketio = SocketIO(app) - -@app.route('/') -def index(): - return render_template('index.html') - -@socketio.event -def my_event(message): - emit('my response', {'data': 'got it!'}) - -if __name__ == '__main__': - socketio.run(app) -``` - -Resources ---------- - -- [Tutorial](http://blog.miguelgrinberg.com/post/easy-websockets-with-flask-and-gevent) -- [Documentation](http://flask-socketio.readthedocs.io/en/latest/) -- [PyPI](https://pypi.python.org/pypi/Flask-SocketIO) -- [Change Log](https://github.com/miguelgrinberg/Flask-SocketIO/blob/main/CHANGES.md) -- Questions? See the [questions](https://stackoverflow.com/questions/tagged/flask-socketio) others have asked on Stack Overflow, or [ask](https://stackoverflow.com/questions/ask?tags=python+flask-socketio+python-socketio) your own question. - diff --git a/libs/Flask_SocketIO-5.5.1.dist-info/RECORD b/libs/Flask_SocketIO-5.5.1.dist-info/RECORD deleted file mode 100644 index 2b50d3dda3..0000000000 --- a/libs/Flask_SocketIO-5.5.1.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -Flask_SocketIO-5.5.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Flask_SocketIO-5.5.1.dist-info/LICENSE,sha256=aNCWbkgKjS_T1cJtACyZbvCM36KxWnfQ0LWTuavuYKQ,1082 -Flask_SocketIO-5.5.1.dist-info/METADATA,sha256=6NSCK70GFvnCHNKwcr6lmffkRAKLd9dOnGq6TbAJlfs,2638 -Flask_SocketIO-5.5.1.dist-info/RECORD,, -Flask_SocketIO-5.5.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -Flask_SocketIO-5.5.1.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91 -Flask_SocketIO-5.5.1.dist-info/top_level.txt,sha256=C1ugzQBJ3HHUJsWGzyt70XRVOX-y4CUAR8MWKjwJOQ8,15 -flask_socketio/__init__.py,sha256=5hN0LE0hfGMUDcX4FheZrtXERJ1IBEPagv0pgeqdtlU,54904 -flask_socketio/namespace.py,sha256=UkVryJvFYgnCMKWSF35GVfGdyh2cXRDyRbfmEPPchVA,2329 -flask_socketio/test_client.py,sha256=rClk02TSRqgidH8IyeohspKVKdpRx7gcZBjg1YUtZpA,11026 diff --git a/libs/Flask_SocketIO-5.5.1.dist-info/WHEEL b/libs/Flask_SocketIO-5.5.1.dist-info/WHEEL deleted file mode 100644 index 9b78c44519..0000000000 --- a/libs/Flask_SocketIO-5.5.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.3.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/libs/Js2Py-0.74.dist-info/METADATA b/libs/Js2Py-0.74.dist-info/METADATA index 4063e4ba92..a35be0b740 100644 --- a/libs/Js2Py-0.74.dist-info/METADATA +++ b/libs/Js2Py-0.74.dist-info/METADATA @@ -1,4 +1,4 @@ -Metadata-Version: 2.1 +Metadata-Version: 2.4 Name: Js2Py Version: 0.74 Summary: JavaScript to Python Translator & JavaScript interpreter written in 100% pure Python. @@ -7,9 +7,17 @@ Author: Piotr Dabkowski Author-email: piodrus@gmail.com License: MIT License-File: LICENSE.md -Requires-Dist: tzlocal >=1.2 -Requires-Dist: six >=1.10 -Requires-Dist: pyjsparser >=2.5.1 +Requires-Dist: tzlocal>=1.2 +Requires-Dist: six>=1.10 +Requires-Dist: pyjsparser>=2.5.1 +Dynamic: author +Dynamic: author-email +Dynamic: description +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: requires-dist +Dynamic: summary Translates JavaScript to Python code. Js2Py is able to translate and execute virtually any JavaScript code. diff --git a/libs/Js2Py-0.74.dist-info/RECORD b/libs/Js2Py-0.74.dist-info/RECORD index d14c3b06c8..cf583183c6 100644 --- a/libs/Js2Py-0.74.dist-info/RECORD +++ b/libs/Js2Py-0.74.dist-info/RECORD @@ -1,10 +1,10 @@ -Js2Py-0.74.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Js2Py-0.74.dist-info/LICENSE.md,sha256=5HcnGlDEDJrDrVbQLnDH19l_ZscybPUk0TsS-IQsxOk,1088 -Js2Py-0.74.dist-info/METADATA,sha256=shZhquhN4nVuaurudCERGEnLk38BjT7grtZLNg4MABc,862 -Js2Py-0.74.dist-info/RECORD,, -Js2Py-0.74.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -Js2Py-0.74.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92 -Js2Py-0.74.dist-info/top_level.txt,sha256=Me1vDvBnqRgA6Jf96euhHjsa-dYkaXpr3Sm0RGPoGn8,6 +js2py-0.74.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +js2py-0.74.dist-info/METADATA,sha256=EsxDQrBB3A2LSeMzBE55q-4t-GTdP7AD1iLqykYiv2Q,1016 +js2py-0.74.dist-info/RECORD,, +js2py-0.74.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +js2py-0.74.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92 +js2py-0.74.dist-info/licenses/LICENSE.md,sha256=5HcnGlDEDJrDrVbQLnDH19l_ZscybPUk0TsS-IQsxOk,1088 +js2py-0.74.dist-info/top_level.txt,sha256=Me1vDvBnqRgA6Jf96euhHjsa-dYkaXpr3Sm0RGPoGn8,6 js2py/__init__.py,sha256=VlWswk9Sf3qBwxaJ1E9STDxUdTYh3PLKp6Kn1ws0STE,2886 js2py/base.py,sha256=_h7HbsB30cybzGAU7XIX5tawMA4C7IHFsRi_ESRqzZc,115810 js2py/constructors/__init__.py,sha256=isKbPQhm2gf7O6f4aW0F9J0yBlds8jYDq5xA4u08xrE,30 diff --git a/libs/Js2Py-0.74.dist-info/WHEEL b/libs/Js2Py-0.74.dist-info/WHEEL index ba48cbcf92..0885d05555 100644 --- a/libs/Js2Py-0.74.dist-info/WHEEL +++ b/libs/Js2Py-0.74.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.3) +Generator: setuptools (80.10.2) Root-Is-Purelib: true Tag: py3-none-any diff --git a/libs/Js2Py-0.74.dist-info/LICENSE.md b/libs/Js2Py-0.74.dist-info/licenses/LICENSE.md similarity index 100% rename from libs/Js2Py-0.74.dist-info/LICENSE.md rename to libs/Js2Py-0.74.dist-info/licenses/LICENSE.md diff --git a/libs/Mako-1.3.8.dist-info/LICENSE b/libs/Mako-1.3.8.dist-info/LICENSE deleted file mode 100644 index 7cf3d43378..0000000000 --- a/libs/Mako-1.3.8.dist-info/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright 2006-2024 the Mako authors and contributors . - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/libs/Mako-1.3.8.dist-info/METADATA b/libs/Mako-1.3.8.dist-info/METADATA deleted file mode 100644 index bf0b1f45b9..0000000000 --- a/libs/Mako-1.3.8.dist-info/METADATA +++ /dev/null @@ -1,87 +0,0 @@ -Metadata-Version: 2.1 -Name: Mako -Version: 1.3.8 -Summary: A super-fast templating language that borrows the best ideas from the existing templating languages. -Home-page: https://www.makotemplates.org/ -Author: Mike Bayer -Author-email: mike@zzzcomputing.com -License: MIT -Project-URL: Documentation, https://docs.makotemplates.org -Project-URL: Issue Tracker, https://github.com/sqlalchemy/mako -Classifier: Development Status :: 5 - Production/Stable -Classifier: License :: OSI Approved :: MIT License -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: MarkupSafe>=0.9.2 -Provides-Extra: babel -Requires-Dist: Babel; extra == "babel" -Provides-Extra: lingua -Requires-Dist: lingua; extra == "lingua" -Provides-Extra: testing -Requires-Dist: pytest; extra == "testing" - -========================= -Mako Templates for Python -========================= - -Mako is a template library written in Python. It provides a familiar, non-XML -syntax which compiles into Python modules for maximum performance. Mako's -syntax and API borrows from the best ideas of many others, including Django -templates, Cheetah, Myghty, and Genshi. Conceptually, Mako is an embedded -Python (i.e. Python Server Page) language, which refines the familiar ideas -of componentized layout and inheritance to produce one of the most -straightforward and flexible models available, while also maintaining close -ties to Python calling and scoping semantics. - -Nutshell -======== - -:: - - <%inherit file="base.html"/> - <% - rows = [[v for v in range(0,10)] for row in range(0,10)] - %> - - % for row in rows: - ${makerow(row)} - % endfor -
- - <%def name="makerow(row)"> - - % for name in row: - ${name}\ - % endfor - - - -Philosophy -=========== - -Python is a great scripting language. Don't reinvent the wheel...your templates can handle it ! - -Documentation -============== - -See documentation for Mako at https://docs.makotemplates.org/en/latest/ - -License -======== - -Mako is licensed under an MIT-style license (see LICENSE). -Other incorporated projects may be licensed under different licenses. -All licenses allow for non-commercial and commercial use. diff --git a/libs/Mako-1.3.8.dist-info/RECORD b/libs/Mako-1.3.8.dist-info/RECORD deleted file mode 100644 index 7e79ae7aa2..0000000000 --- a/libs/Mako-1.3.8.dist-info/RECORD +++ /dev/null @@ -1,42 +0,0 @@ -../../bin/mako-render,sha256=NK39DgCmw8pz5T7ALDcW2MB6hFGNVOpWXAHq3-GKyss,236 -Mako-1.3.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Mako-1.3.8.dist-info/LICENSE,sha256=FWJ7NrONBynN1obfmr9gZQPZnWJLL17FyyVKddWvqJE,1098 -Mako-1.3.8.dist-info/METADATA,sha256=YtMX8Z6wVX7TvuBzOsUAOAq_jdceHFW4rR6hwvMNZgE,2896 -Mako-1.3.8.dist-info/RECORD,, -Mako-1.3.8.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -Mako-1.3.8.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91 -Mako-1.3.8.dist-info/entry_points.txt,sha256=LsKkUsOsJQYbJ2M72hZCm968wi5K8Ywb5uFxCuN8Obk,512 -Mako-1.3.8.dist-info/top_level.txt,sha256=LItdH8cDPetpUu8rUyBG3DObS6h9Gcpr9j_WLj2S-R0,5 -mako/__init__.py,sha256=sMLX8sANJQjjeIsZjbrwotWPXHEpRcKxELPgkx2Cyw8,242 -mako/_ast_util.py,sha256=CenxCrdES1irHDhOQU6Ldta4rdsytfYaMkN6s0TlveM,20247 -mako/ast.py,sha256=pY7MH-5cLnUuVz5YAwoGhWgWfgoVvLQkRDtc_s9qqw0,6642 -mako/cache.py,sha256=5DBBorj1NqiWDqNhN3ZJ8tMCm-h6Mew541276kdsxAU,7680 -mako/cmd.py,sha256=vP5M5g9yc5sjAT5owVTQu056YwyS-YkpulFSDb0IMGw,2813 -mako/codegen.py,sha256=XRhzcuGEleDUXTfmOjw4alb6TkczbmEfBCLqID8x4bA,47736 -mako/compat.py,sha256=wjVMf7uMg0TlC_aI5hdwWizza99nqJuGNdrnTNrZbt0,1820 -mako/exceptions.py,sha256=pfdd5-1lCZ--I2YqQ_oHODZLmo62bn_lO5Kz_1__72w,12530 -mako/ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -mako/ext/autohandler.py,sha256=Tyz1CLRlG_C0EnKgjuHzqS4BBnpeA49O05x8rriNtTY,1885 -mako/ext/babelplugin.py,sha256=v10o5XQdgXwbr1bo0aL8VV3THm_84C_eeq6BmAGd3uA,2091 -mako/ext/beaker_cache.py,sha256=aAs9ELzO2WaiO5OppV4KDT6f7yNyn1BF1XHDQZwf0-E,2578 -mako/ext/extract.py,sha256=c3YuIN3Z5ZgS-xzX_gjKrEVQaptK3liXkm5j-Vq8yEM,4659 -mako/ext/linguaplugin.py,sha256=pzHlHC3-KlFeVAR4r8S1--_dfE5DcYmjLXtr0genBYU,1935 -mako/ext/preprocessors.py,sha256=zKQy42Ce6dOmU0Yk_rUVDAAn38-RUUfQolVKTJjLotA,576 -mako/ext/pygmentplugin.py,sha256=qBdsAhKktlQX7d5Yv1sAXufUNOZqcnJmKuC7V4D_srM,4753 -mako/ext/turbogears.py,sha256=0emY1WiMnuY8Pf6ARv5JBArKtouUdmuTljI-w6rE3J4,2141 -mako/filters.py,sha256=F7aDIKTUxnT-Og4rgboQtnML7Q87DJTHQyhi_dY_Ih4,4658 -mako/lexer.py,sha256=Xi6Lk8CnASf3UYAaPoYrfjuPkrYauNjvYvULCUkKYaY,16321 -mako/lookup.py,sha256=rkMvT5T7EOS5KRvPtgYii-sjh1nWWyKok_mEk-cEzrM,12428 -mako/parsetree.py,sha256=BHdZI9vyxKB27Q4hzym5TdZ_982_3k31_HMsGLz3Tlg,19021 -mako/pygen.py,sha256=d4f_ugRACCXuV9hJgEk6Ncoj38EaRHA3RTxkr_tK7UQ,10416 -mako/pyparser.py,sha256=eY_a94QDXaK3vIA2jZYT9so7oXKKJLT0SO_Yrl3IOb8,7478 -mako/runtime.py,sha256=ZsUEN22nX3d3dECQujF69mBKDQS6yVv2nvz_0eTvFGg,27804 -mako/template.py,sha256=4xQzwruZd5XzPw7iONZMZJj4SdFsctYYg4PfBYs2PLk,23857 -mako/testing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -mako/testing/_config.py,sha256=k-qpnsnbXUoN-ykMN5BRpg84i1x0p6UsAddKQnrIytU,3566 -mako/testing/assertions.py,sha256=pfbGl84QlW7QWGg3_lo3wP8XnBAVo9AjzNp2ajmn7FA,5161 -mako/testing/config.py,sha256=wmYVZfzGvOK3mJUZpzmgO8-iIgvaCH41Woi4yDpxq6E,323 -mako/testing/exclusions.py,sha256=_t6ADKdatk3f18tOfHV_ZY6u_ZwQsKphZ2MXJVSAOcI,1553 -mako/testing/fixtures.py,sha256=nEp7wTusf7E0n3Q-BHJW2s_t1vx0KB9poadQ1BmIJzE,3044 -mako/testing/helpers.py,sha256=z4HAactwlht4ut1cbvxKt1QLb3yLPk1U7cnh5BwVUlc,1623 -mako/util.py,sha256=dIFuchHfiNtRJJ99kEIRdHBkCZ3UmEvNO6l2ZQSCdVU,10638 diff --git a/libs/Mako-1.3.8.dist-info/WHEEL b/libs/Mako-1.3.8.dist-info/WHEEL deleted file mode 100644 index 9b78c44519..0000000000 --- a/libs/Mako-1.3.8.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.3.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/libs/Markdown-3.7.dist-info/METADATA b/libs/Markdown-3.7.dist-info/METADATA deleted file mode 100644 index 233bc55baa..0000000000 --- a/libs/Markdown-3.7.dist-info/METADATA +++ /dev/null @@ -1,146 +0,0 @@ -Metadata-Version: 2.1 -Name: Markdown -Version: 3.7 -Summary: Python implementation of John Gruber's Markdown. -Author: Manfred Stienstra, Yuri Takhteyev -Author-email: Waylan limberg -Maintainer: Isaac Muse -Maintainer-email: Waylan Limberg -License: BSD 3-Clause License - - Copyright 2007, 2008 The Python Markdown Project (v. 1.7 and later) - Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b) - Copyright 2004 Manfred Stienstra (the original version) - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - - 3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -Project-URL: Homepage, https://Python-Markdown.github.io/ -Project-URL: Documentation, https://Python-Markdown.github.io/ -Project-URL: Repository, https://github.com/Python-Markdown/markdown -Project-URL: Issue Tracker, https://github.com/Python-Markdown/markdown/issues -Project-URL: Changelog, https://python-markdown.github.io/changelog/ -Keywords: markdown,markdown-parser,python-markdown,markdown-to-html -Classifier: Development Status :: 5 - Production/Stable -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Communications :: Email :: Filters -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries -Classifier: Topic :: Internet :: WWW/HTTP :: Site Management -Classifier: Topic :: Software Development :: Documentation -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Text Processing :: Filters -Classifier: Topic :: Text Processing :: Markup :: HTML -Classifier: Topic :: Text Processing :: Markup :: Markdown -Requires-Python: >=3.8 -Description-Content-Type: text/markdown -License-File: LICENSE.md -Requires-Dist: importlib-metadata >=4.4 ; python_version < "3.10" -Provides-Extra: docs -Requires-Dist: mkdocs >=1.5 ; extra == 'docs' -Requires-Dist: mkdocs-nature >=0.6 ; extra == 'docs' -Requires-Dist: mdx-gh-links >=0.2 ; extra == 'docs' -Requires-Dist: mkdocstrings[python] ; extra == 'docs' -Requires-Dist: mkdocs-gen-files ; extra == 'docs' -Requires-Dist: mkdocs-section-index ; extra == 'docs' -Requires-Dist: mkdocs-literate-nav ; extra == 'docs' -Provides-Extra: testing -Requires-Dist: coverage ; extra == 'testing' -Requires-Dist: pyyaml ; extra == 'testing' - -[Python-Markdown][] -=================== - -[![Build Status][build-button]][build] -[![Coverage Status][codecov-button]][codecov] -[![Latest Version][mdversion-button]][md-pypi] -[![Python Versions][pyversion-button]][md-pypi] -[![BSD License][bsdlicense-button]][bsdlicense] -[![Code of Conduct][codeofconduct-button]][Code of Conduct] - -[build-button]: https://github.com/Python-Markdown/markdown/workflows/CI/badge.svg?event=push -[build]: https://github.com/Python-Markdown/markdown/actions?query=workflow%3ACI+event%3Apush -[codecov-button]: https://codecov.io/gh/Python-Markdown/markdown/branch/master/graph/badge.svg -[codecov]: https://codecov.io/gh/Python-Markdown/markdown -[mdversion-button]: https://img.shields.io/pypi/v/Markdown.svg -[md-pypi]: https://pypi.org/project/Markdown/ -[pyversion-button]: https://img.shields.io/pypi/pyversions/Markdown.svg -[bsdlicense-button]: https://img.shields.io/badge/license-BSD-yellow.svg -[bsdlicense]: https://opensource.org/licenses/BSD-3-Clause -[codeofconduct-button]: https://img.shields.io/badge/code%20of%20conduct-contributor%20covenant-green.svg?style=flat-square -[Code of Conduct]: https://github.com/Python-Markdown/markdown/blob/master/CODE_OF_CONDUCT.md - -This is a Python implementation of John Gruber's [Markdown][]. -It is almost completely compliant with the reference implementation, -though there are a few known issues. See [Features][] for information -on what exactly is supported and what is not. Additional features are -supported by the [Available Extensions][]. - -[Python-Markdown]: https://Python-Markdown.github.io/ -[Markdown]: https://daringfireball.net/projects/markdown/ -[Features]: https://Python-Markdown.github.io#Features -[Available Extensions]: https://Python-Markdown.github.io/extensions - -Documentation -------------- - -```bash -pip install markdown -``` -```python -import markdown -html = markdown.markdown(your_text_string) -``` - -For more advanced [installation] and [usage] documentation, see the `docs/` directory -of the distribution or the project website at . - -[installation]: https://python-markdown.github.io/install/ -[usage]: https://python-markdown.github.io/reference/ - -See the change log at . - -Support -------- - -You may report bugs, ask for help, and discuss various other issues on the [bug tracker][]. - -[bug tracker]: https://github.com/Python-Markdown/markdown/issues - -Code of Conduct ---------------- - -Everyone interacting in the Python-Markdown project's code bases, issue trackers, -and mailing lists is expected to follow the [Code of Conduct]. diff --git a/libs/Markdown-3.7.dist-info/RECORD b/libs/Markdown-3.7.dist-info/RECORD deleted file mode 100644 index 34d25a0c7e..0000000000 --- a/libs/Markdown-3.7.dist-info/RECORD +++ /dev/null @@ -1,42 +0,0 @@ -../../bin/markdown_py,sha256=a0a3HrUHepb4z4hcrRdCfAEQ8SiB-QoWxf9g1e-KLv8,237 -Markdown-3.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Markdown-3.7.dist-info/LICENSE.md,sha256=e6TrbRCzKy0R3OE4ITQDUc27swuozMZ4Qdsv_Ybnmso,1650 -Markdown-3.7.dist-info/METADATA,sha256=nY8sewcY6R1akyROqkyO-Jk_eUDY8am_C4MkRP79sWA,7040 -Markdown-3.7.dist-info/RECORD,, -Markdown-3.7.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -Markdown-3.7.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91 -Markdown-3.7.dist-info/entry_points.txt,sha256=lMEyiiA_ZZyfPCBlDviBl-SiU0cfoeuEKpwxw361sKQ,1102 -Markdown-3.7.dist-info/top_level.txt,sha256=IAxs8x618RXoH1uCqeLLxXsDefJvE_mIibr_M4sOlyk,9 -markdown/__init__.py,sha256=dfzwwdpG9L8QLEPBpLFPIHx_BN056aZXp9xZifTxYIU,1777 -markdown/__main__.py,sha256=innFBxRqwPBNxG1zhKktJji4bnRKtVyYYd30ID13Tcw,5859 -markdown/__meta__.py,sha256=RhwfJ30zyGvJaJXLHwQdNH5jw69-5fVKu2p-CVaJz0U,1712 -markdown/blockparser.py,sha256=j4CQImVpiq7g9pz8wCxvzT61X_T2iSAjXupHJk8P3eA,5728 -markdown/blockprocessors.py,sha256=koY5rq8DixzBCHcquvZJp6x2JYyBGjrwxMWNZhd6D2U,27013 -markdown/core.py,sha256=DyyzDsmd-KcuEp8ZWUKJAeUCt7B7G3J3NeqZqp3LphI,21335 -markdown/extensions/__init__.py,sha256=9z1khsdKCVrmrJ_2GfxtPAdjD3FyMe5vhC7wmM4O9m0,4822 -markdown/extensions/abbr.py,sha256=Gqt9TUtLWez2cbsy3SQk5152RZekops2fUJj01bfkfw,6903 -markdown/extensions/admonition.py,sha256=Hqcn3I8JG0i-OPWdoqI189TmlQRgH6bs5PmpCANyLlg,6547 -markdown/extensions/attr_list.py,sha256=t3PrgAr5Ebldnq3nJNbteBt79bN0ccXS5RemmQfUZ9g,7820 -markdown/extensions/codehilite.py,sha256=ChlmpM6S--j-UK7t82859UpYjm8EftdiLqmgDnknyes,13503 -markdown/extensions/def_list.py,sha256=J3NVa6CllfZPsboJCEycPyRhtjBHnOn8ET6omEvVlDo,4029 -markdown/extensions/extra.py,sha256=1vleT284kued4HQBtF83IjSumJVo0q3ng6MjTkVNfNQ,2163 -markdown/extensions/fenced_code.py,sha256=-fYSmRZ9DTYQ8HO9b_78i47kVyVu6mcVJlqVTMdzvo4,8300 -markdown/extensions/footnotes.py,sha256=bRFlmIBOKDI5efG1jZfDkMoV2osfqWip1rN1j2P-mMg,16710 -markdown/extensions/legacy_attrs.py,sha256=oWcyNrfP0F6zsBoBOaD5NiwrJyy4kCpgQLl12HA7JGU,2788 -markdown/extensions/legacy_em.py,sha256=-Z_w4PEGSS-Xg-2-BtGAnXwwy5g5GDgv2tngASnPgxg,1693 -markdown/extensions/md_in_html.py,sha256=y4HEWEnkvfih22fojcaJeAmjx1AtF8N-a_jb6IDFfts,16546 -markdown/extensions/meta.py,sha256=v_4Uq7nbcQ76V1YAvqVPiNLbRLIQHJsnfsk-tN70RmY,2600 -markdown/extensions/nl2br.py,sha256=9KKcrPs62c3ENNnmOJZs0rrXXqUtTCfd43j1_OPpmgU,1090 -markdown/extensions/sane_lists.py,sha256=ogAKcm7gEpcXV7fSTf8JZH5YdKAssPCEOUzdGM3C9Tw,2150 -markdown/extensions/smarty.py,sha256=yqT0OiE2AqYrqqZtcUFFmp2eJsQHomiKzgyG2JFb9rI,11048 -markdown/extensions/tables.py,sha256=oTDvGD1qp9xjVWPGYNgDBWe9NqsX5gS6UU5wUsQ1bC8,8741 -markdown/extensions/toc.py,sha256=PGg-EqbBubm3n0b633r8Xa9kc6JIdbo20HGAOZ6GEl8,18322 -markdown/extensions/wikilinks.py,sha256=j7D2sozica6sqXOUa_GuAXqIzxp-7Hi60bfXymiuma8,3285 -markdown/htmlparser.py,sha256=dEr6IE7i9b6Tc1gdCLZGeWw6g6-E-jK1Z4KPj8yGk8Q,14332 -markdown/inlinepatterns.py,sha256=7_HF5nTOyQag_CyBgU4wwmuI6aMjtadvGadyS9IP21w,38256 -markdown/postprocessors.py,sha256=eYi6eW0mGudmWpmsW45hduLwX66Zr8Bf44WyU9vKp-I,4807 -markdown/preprocessors.py,sha256=pq5NnHKkOSVQeIo-ajC-Yt44kvyMV97D04FBOQXctJM,3224 -markdown/serializers.py,sha256=YtAFYQoOdp_TAmYGow6nBo0eB6I-Sl4PTLdLDfQJHwQ,7174 -markdown/test_tools.py,sha256=MtN4cf3ZPDtb83wXLTol-3q3aIGRIkJ2zWr6fd-RgVE,8662 -markdown/treeprocessors.py,sha256=o4dnoZZsIeVV8qR45Njr8XgwKleWYDS5pv8dKQhJvv8,17651 -markdown/util.py,sha256=vJ1E0xjMzDAlTqLUSJWgdEvxdQfLXDEYUssOQMw9kPQ,13929 diff --git a/libs/Markdown-3.7.dist-info/WHEEL b/libs/Markdown-3.7.dist-info/WHEEL deleted file mode 100644 index da25d7b423..0000000000 --- a/libs/Markdown-3.7.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.2.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/libs/MarkupSafe-2.1.5.dist-info/METADATA b/libs/MarkupSafe-2.1.5.dist-info/METADATA deleted file mode 100644 index dfe37d52df..0000000000 --- a/libs/MarkupSafe-2.1.5.dist-info/METADATA +++ /dev/null @@ -1,93 +0,0 @@ -Metadata-Version: 2.1 -Name: MarkupSafe -Version: 2.1.5 -Summary: Safely add untrusted strings to HTML/XML markup. -Home-page: https://palletsprojects.com/p/markupsafe/ -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://markupsafe.palletsprojects.com/ -Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/markupsafe/ -Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/ -Project-URL: Chat, https://discord.gg/pallets -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Text Processing :: Markup :: HTML -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst - -MarkupSafe -========== - -MarkupSafe implements a text object that escapes characters so it is -safe to use in HTML and XML. Characters that have special meanings are -replaced so that they display as the actual characters. This mitigates -injection attacks, meaning untrusted user input can safely be displayed -on a page. - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - pip install -U MarkupSafe - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -Examples --------- - -.. code-block:: pycon - - >>> from markupsafe import Markup, escape - - >>> # escape replaces special characters and wraps in Markup - >>> escape("") - Markup('<script>alert(document.cookie);</script>') - - >>> # wrap in Markup to mark text "safe" and prevent escaping - >>> Markup("Hello") - Markup('hello') - - >>> escape(Markup("Hello")) - Markup('hello') - - >>> # Markup is a str subclass - >>> # methods and operators escape their arguments - >>> template = Markup("Hello {name}") - >>> template.format(name='"World"') - Markup('Hello "World"') - - -Donate ------- - -The Pallets organization develops and supports MarkupSafe and other -popular packages. In order to grow the community of contributors and -users, and allow the maintainers to devote more time to the projects, -`please donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://markupsafe.palletsprojects.com/ -- Changes: https://markupsafe.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/MarkupSafe/ -- Source Code: https://github.com/pallets/markupsafe/ -- Issue Tracker: https://github.com/pallets/markupsafe/issues/ -- Chat: https://discord.gg/pallets diff --git a/libs/MarkupSafe-2.1.5.dist-info/RECORD b/libs/MarkupSafe-2.1.5.dist-info/RECORD deleted file mode 100644 index 57cd62847c..0000000000 --- a/libs/MarkupSafe-2.1.5.dist-info/RECORD +++ /dev/null @@ -1,13 +0,0 @@ -MarkupSafe-2.1.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -MarkupSafe-2.1.5.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 -MarkupSafe-2.1.5.dist-info/METADATA,sha256=2dRDPam6OZLfpX0wg1JN5P3u9arqACxVSfdGmsJU7o8,3003 -MarkupSafe-2.1.5.dist-info/RECORD,, -MarkupSafe-2.1.5.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -MarkupSafe-2.1.5.dist-info/WHEEL,sha256=EO1EUWjlSI9vqFKe-qOLBJFxSac53mP8l62vW3JFDec,109 -MarkupSafe-2.1.5.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 -markupsafe/__init__.py,sha256=r7VOTjUq7EMQ4v3p4R1LoVOGJg6ysfYRncLr34laRBs,10958 -markupsafe/_native.py,sha256=GR86Qvo_GcgKmKreA1WmYN9ud17OFwkww8E-fiW-57s,1713 -markupsafe/_speedups.c,sha256=X2XvQVtIdcK4Usz70BvkzoOfjTCmQlDkkjYSn-swE0g,7083 -markupsafe/_speedups.cpython-38-darwin.so,sha256=1yfD14PZ-QrFSi3XHMHazowfHExBdp5WS7IC86gAuRc,18712 -markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229 -markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/libs/MarkupSafe-2.1.5.dist-info/WHEEL b/libs/MarkupSafe-2.1.5.dist-info/WHEEL deleted file mode 100644 index 9fd57fe24f..0000000000 --- a/libs/MarkupSafe-2.1.5.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.3) -Root-Is-Purelib: false -Tag: cp38-cp38-macosx_12_0_x86_64 - diff --git a/libs/PlexAPI-4.16.1.dist-info/METADATA b/libs/PlexAPI-4.16.1.dist-info/METADATA deleted file mode 100644 index 97bc91f4b1..0000000000 --- a/libs/PlexAPI-4.16.1.dist-info/METADATA +++ /dev/null @@ -1,282 +0,0 @@ -Metadata-Version: 2.1 -Name: PlexAPI -Version: 4.16.1 -Summary: Python bindings for the Plex API. -Author-email: Michael Shepanski -License: BSD-3-Clause -Project-URL: Homepage, https://github.com/pkkid/python-plexapi -Project-URL: Documentation, https://python-plexapi.readthedocs.io -Keywords: plex,api -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 3 -Classifier: License :: OSI Approved :: BSD License -Requires-Python: >=3.9 -Description-Content-Type: text/x-rst -License-File: LICENSE.txt -License-File: AUTHORS.txt -Requires-Dist: requests -Provides-Extra: alert -Requires-Dist: websocket-client (>=1.3.3) ; extra == 'alert' - -Python-PlexAPI -============== -.. image:: https://github.com/pkkid/python-plexapi/workflows/CI/badge.svg - :target: https://github.com/pkkid/python-plexapi/actions?query=workflow%3ACI -.. image:: https://readthedocs.org/projects/python-plexapi/badge/?version=latest - :target: http://python-plexapi.readthedocs.io/en/latest/?badge=latest -.. image:: https://codecov.io/gh/pkkid/python-plexapi/branch/master/graph/badge.svg?token=fOECznuMtw - :target: https://codecov.io/gh/pkkid/python-plexapi -.. image:: https://img.shields.io/github/tag/pkkid/python-plexapi.svg?label=github+release - :target: https://github.com/pkkid/python-plexapi/releases -.. image:: https://badge.fury.io/py/PlexAPI.svg - :target: https://badge.fury.io/py/PlexAPI -.. image:: https://img.shields.io/github/last-commit/pkkid/python-plexapi.svg - :target: https://img.shields.io/github/last-commit/pkkid/python-plexapi.svg - - -Overview --------- -Unofficial Python bindings for the Plex API. Our goal is to match all capabilities of the official -Plex Web Client. A few of the many features we currently support are: - -* Navigate local or remote shared libraries. -* Perform library actions such as scan, analyze, empty trash. -* Remote control and play media on connected clients, including `Controlling Sonos speakers`_ -* Listen in on all Plex Server notifications. - - -Installation & Documentation ----------------------------- - -.. code-block:: python - - pip install plexapi - -*Install extra features:* - -.. code-block:: python - - pip install plexapi[alert] # Install with dependencies required for plexapi.alert - -Documentation_ can be found at Read the Docs. - -.. _Documentation: http://python-plexapi.readthedocs.io/en/latest/ - -Join our Discord_ for support and discussion. - -.. _Discord: https://discord.gg/GtAnnZAkuw - - -Getting a PlexServer Instance ------------------------------ - -There are two types of authentication. If you are running on a separate network -or using Plex Users you can log into MyPlex to get a PlexServer instance. An -example of this is below. NOTE: Servername below is the name of the server (not -the hostname and port). If logged into Plex Web you can see the server name in -the top left above your available libraries. - -.. code-block:: python - - from plexapi.myplex import MyPlexAccount - account = MyPlexAccount('', '') - plex = account.resource('').connect() # returns a PlexServer instance - -If you want to avoid logging into MyPlex and you already know your auth token -string, you can use the PlexServer object directly as above, by passing in -the baseurl and auth token directly. - -.. code-block:: python - - from plexapi.server import PlexServer - baseurl = 'http://plexserver:32400' - token = '2ffLuB84dqLswk9skLos' - plex = PlexServer(baseurl, token) - - -Usage Examples --------------- - -.. code-block:: python - - # Example 1: List all unwatched movies. - movies = plex.library.section('Movies') - for video in movies.search(unwatched=True): - print(video.title) - - -.. code-block:: python - - # Example 2: Mark all Game of Thrones episodes as played. - plex.library.section('TV Shows').get('Game of Thrones').markPlayed() - - -.. code-block:: python - - # Example 3: List all clients connected to the Server. - for client in plex.clients(): - print(client.title) - - -.. code-block:: python - - # Example 4: Play the movie Cars on another client. - # Note: Client must be on same network as server. - cars = plex.library.section('Movies').get('Cars') - client = plex.client("Michael's iPhone") - client.playMedia(cars) - - -.. code-block:: python - - # Example 5: List all content with the word 'Game' in the title. - for video in plex.search('Game'): - print(f'{video.title} ({video.TYPE})') - - -.. code-block:: python - - # Example 6: List all movies directed by the same person as Elephants Dream. - movies = plex.library.section('Movies') - elephants_dream = movies.get('Elephants Dream') - director = elephants_dream.directors[0] - for movie in movies.search(None, director=director): - print(movie.title) - - -.. code-block:: python - - # Example 7: List files for the latest episode of The 100. - last_episode = plex.library.section('TV Shows').get('The 100').episodes()[-1] - for part in last_episode.iterParts(): - print(part.file) - - -.. code-block:: python - - # Example 8: Get audio/video/all playlists - for playlist in plex.playlists(): - print(playlist.title) - - -.. code-block:: python - - # Example 9: Rate the 100 four stars. - plex.library.section('TV Shows').get('The 100').rate(8.0) - - -Controlling Sonos speakers --------------------------- - -To control Sonos speakers directly using Plex APIs, the following requirements must be met: - -1. Active Plex Pass subscription -2. Sonos account linked to Plex account -3. Plex remote access enabled - -Due to the design of Sonos music services, the API calls to control Sonos speakers route through https://sonos.plex.tv -and back via the Plex server's remote access. Actual media playback is local unless networking restrictions prevent the -Sonos speakers from connecting to the Plex server directly. - -.. code-block:: python - - from plexapi.myplex import MyPlexAccount - from plexapi.server import PlexServer - - baseurl = 'http://plexserver:32400' - token = '2ffLuB84dqLswk9skLos' - - account = MyPlexAccount(token) - server = PlexServer(baseurl, token) - - # List available speakers/groups - for speaker in account.sonos_speakers(): - print(speaker.title) - - # Obtain PlexSonosPlayer instance - speaker = account.sonos_speaker("Kitchen") - - album = server.library.section('Music').get('Stevie Wonder').album('Innervisions') - - # Speaker control examples - speaker.playMedia(album) - speaker.pause() - speaker.setVolume(10) - speaker.skipNext() - - -Running tests over PlexAPI --------------------------- - -Use: - -.. code-block:: bash - - tools/plex-boostraptest.py - -with appropriate -arguments and add this new server to a shared user which username is defined in environment variable `SHARED_USERNAME`. -It uses `official docker image`_ to create a proper instance. - -For skipping the docker and reuse a existing server use - -.. code-block:: bash - - python plex-bootstraptest.py --no-docker --username USERNAME --password PASSWORD --server-name NAME-OF-YOUR-SEVER - -Also in order to run most of the tests you have to provide some environment variables: - -* `PLEXAPI_AUTH_SERVER_BASEURL` containing an URL to your Plex instance, e.g. `http://127.0.0.1:32400` (without trailing - slash) -* `PLEXAPI_AUTH_MYPLEX_USERNAME` and `PLEXAPI_AUTH_MYPLEX_PASSWORD` with your MyPlex username and password accordingly - -After this step you can run tests with following command: - -.. code-block:: bash - - py.test tests -rxXs --ignore=tests/test_sync.py - -Some of the tests in main test-suite require a shared user in your account (e.g. `test_myplex_users`, -`test_myplex_updateFriend`, etc.), you need to provide a valid shared user's username to get them running you need to -provide the username of the shared user as an environment variable `SHARED_USERNAME`. You can enable a Guest account and -simply pass `Guest` as `SHARED_USERNAME` (or just create a user like `plexapitest` and play with it). - -To be able to run tests over Mobile Sync api you have to some some more environment variables, to following values -exactly: - -* PLEXAPI_HEADER_PROVIDES='controller,sync-target' -* PLEXAPI_HEADER_PLATFORM=iOS -* PLEXAPI_HEADER_PLATFORM_VERSION=11.4.1 -* PLEXAPI_HEADER_DEVICE=iPhone - -And finally run the sync-related tests: - -.. code-block:: bash - - py.test tests/test_sync.py -rxXs - -.. _official docker image: https://hub.docker.com/r/plexinc/pms-docker/ - -Common Questions ----------------- - -**Why are you using camelCase and not following PEP8 guidelines?** - -This API reads XML documents provided by MyPlex and the Plex Server. -We decided to conform to their style so that the API variable names directly -match with the provided XML documents. - - -**Why don't you offer feature XYZ?** - -This library is meant to be a wrapper around the XML pages the Plex -server provides. If we are not providing an API that is offered in the -XML pages, please let us know! -- Adding additional features beyond that -should be done outside the scope of this library. - - -**What are some helpful links if trying to understand the raw Plex API?** - -* https://github.com/plexinc/plex-media-player/wiki/Remote-control-API -* https://forums.plex.tv/discussion/104353/pms-web-api-documentation -* https://github.com/Arcanemagus/plex-api/wiki diff --git a/libs/PlexAPI-4.16.1.dist-info/RECORD b/libs/PlexAPI-4.16.1.dist-info/RECORD deleted file mode 100644 index 02ca75395e..0000000000 --- a/libs/PlexAPI-4.16.1.dist-info/RECORD +++ /dev/null @@ -1,31 +0,0 @@ -PlexAPI-4.16.1.dist-info/AUTHORS.txt,sha256=iEonabCDE0G6AnfT0tCcppsJ0AaTJZGhRjIM4lIIAck,228 -PlexAPI-4.16.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -PlexAPI-4.16.1.dist-info/LICENSE.txt,sha256=ZmoFInlwd6lOpMMQCWIbjLLtu4pwhwWArg_dnYS3X5A,1515 -PlexAPI-4.16.1.dist-info/METADATA,sha256=Wqd-vI8B0Geygwyrt4NqBcSsuUZxoDxqBHbLtKjz6Wc,9284 -PlexAPI-4.16.1.dist-info/RECORD,, -PlexAPI-4.16.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -PlexAPI-4.16.1.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 -PlexAPI-4.16.1.dist-info/top_level.txt,sha256=PTwXHiZDiXtrZnSI7lpZkRz1oJs5DyYpiiu_FuhuSlk,8 -plexapi/__init__.py,sha256=rsy6uvdxBP64y4v5lC4yLTP3l5VY3S-Rsk8rE_gDPIM,2144 -plexapi/alert.py,sha256=pSAIwtzsOnY2b97137dG_8YZOpSBxmKJ_kRz0oZw5jA,4065 -plexapi/audio.py,sha256=A6hI88X3nP2fTiXMMu7Y_-iRGOtr6K_iRJtw2yzuX6g,29505 -plexapi/base.py,sha256=aeCngmI8GHicvzIurfGoPFtAfFJXbMJzZ8b8Fi0d3yo,49100 -plexapi/client.py,sha256=IMbtTVes6_XFO6KBOtMqX4DDvY-iC-94lzb4wdIzYS8,27906 -plexapi/collection.py,sha256=Cv4xQQMY0YzfrD4FJirUwepPbq28CkJOiCPauNueaQQ,26267 -plexapi/config.py,sha256=1kiGaq-DooB9zy5KvMLls6_FyIyPIXwddh0zjcmpD-U,2696 -plexapi/const.py,sha256=0tyh_Wsx9JgzwWD0mCyTM6cLnFtukiMEhUqn9ibDSIU,239 -plexapi/exceptions.py,sha256=yQYnQk07EQcwvFGJ44rXPt9Q3L415BYqyxxOCj2R8CI,683 -plexapi/gdm.py,sha256=SVi6uZu5pCuLNUAPIm8WeIJy1J55NTtN-bsBnTvB6Ec,5066 -plexapi/library.py,sha256=ceJryNLApNus7MCmQ8nm4HuO2_UKBgTdD1EGVI-u6yA,143847 -plexapi/media.py,sha256=Gsx8IqSUF71Qg4fCVQiEdPcepc3-i0jlgowEVgZiAj0,56451 -plexapi/mixins.py,sha256=hICrNwbVznjPDibsQHiHXMQ2T4fDooszlR7U47wJ3QM,49090 -plexapi/myplex.py,sha256=AQR2ZHM045-OAF8JN-f4yKMwFFO0B1r_eQPBsgVW0ps,99769 -plexapi/photo.py,sha256=eOyn_0wbXLQ7r0zADWbRTfbuRv70_NNN1DViwG2nW24,15702 -plexapi/playlist.py,sha256=SABCcXfDs3fLE_N0rUwqAkKTbduscQ6cDGpGoutGsrU,24436 -plexapi/playqueue.py,sha256=MU8fZMyTNTZOIJuPkNSGXijDAGeAuAVAiurtGzVFxG0,12937 -plexapi/server.py,sha256=tojLUl4sJdu2qnCwu0f_kac5_LKVfEI9SN5qJ553tms,64062 -plexapi/settings.py,sha256=3suRjHsJUBeRG61WXLpjmNxoTiRFLJMcuZZbzkaDK_Q,7149 -plexapi/sonos.py,sha256=tIr216CC-o2Vk8GLxsNPkXeyq4JYs9pgz244wbbFfgA,5099 -plexapi/sync.py,sha256=1NK-oeUKVvNnLFIVAq8d8vy2jG8Nu4gkQB295Qx2xYE,13728 -plexapi/utils.py,sha256=BvcUNCm_lPnDo5ny4aRlLtVT6KobVG4EqwPjN4w3kAc,24246 -plexapi/video.py,sha256=9DUhtyA1KCwVN8IoJUfa_kUlZEbplWFCli6-D0nr__k,62939 diff --git a/libs/PlexAPI-4.16.1.dist-info/WHEEL b/libs/PlexAPI-4.16.1.dist-info/WHEEL deleted file mode 100644 index 57e3d840d5..0000000000 --- a/libs/PlexAPI-4.16.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.38.4) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/libs/PySocks-1.7.1.dist-info/METADATA b/libs/PySocks-1.7.1.dist-info/METADATA index ea990785ed..00e201fec4 100644 --- a/libs/PySocks-1.7.1.dist-info/METADATA +++ b/libs/PySocks-1.7.1.dist-info/METADATA @@ -1,4 +1,4 @@ -Metadata-Version: 2.1 +Metadata-Version: 2.4 Name: PySocks Version: 1.7.1 Summary: A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information. @@ -16,6 +16,17 @@ Classifier: Programming Language :: Python :: 3.6 Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* Description-Content-Type: text/markdown License-File: LICENSE +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: description-content-type +Dynamic: home-page +Dynamic: keywords +Dynamic: license +Dynamic: license-file +Dynamic: requires-python +Dynamic: summary PySocks ======= diff --git a/libs/PySocks-1.7.1.dist-info/RECORD b/libs/PySocks-1.7.1.dist-info/RECORD index 3b5bbf3d16..6fc522a8b3 100644 --- a/libs/PySocks-1.7.1.dist-info/RECORD +++ b/libs/PySocks-1.7.1.dist-info/RECORD @@ -1,9 +1,9 @@ -PySocks-1.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -PySocks-1.7.1.dist-info/LICENSE,sha256=cCfiFOAU63i3rcwc7aWspxOnn8T2oMUsnaWz5wfm_-k,1401 -PySocks-1.7.1.dist-info/METADATA,sha256=RThVWnkrwm4fr1ITwGmvqqDXAYxHZG_WIoyRdQTBk4g,13237 -PySocks-1.7.1.dist-info/RECORD,, -PySocks-1.7.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -PySocks-1.7.1.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92 -PySocks-1.7.1.dist-info/top_level.txt,sha256=TKSOIfCFBoK9EY8FBYbYqC3PWd3--G15ph9n8-QHPDk,19 +pysocks-1.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pysocks-1.7.1.dist-info/METADATA,sha256=s0tCSaT0WJRL6NNS7WXhn9ua6ECBV8DbCiIuNMaA9gk,13468 +pysocks-1.7.1.dist-info/RECORD,, +pysocks-1.7.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pysocks-1.7.1.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92 +pysocks-1.7.1.dist-info/licenses/LICENSE,sha256=cCfiFOAU63i3rcwc7aWspxOnn8T2oMUsnaWz5wfm_-k,1401 +pysocks-1.7.1.dist-info/top_level.txt,sha256=TKSOIfCFBoK9EY8FBYbYqC3PWd3--G15ph9n8-QHPDk,19 socks.py,sha256=xOYn27t9IGrbTBzWsUUuPa0YBuplgiUykzkOB5V5iFY,31086 sockshandler.py,sha256=2SYGj-pwt1kjgLoZAmyeaEXCeZDWRmfVS_QG6kErGtY,3966 diff --git a/libs/PySocks-1.7.1.dist-info/WHEEL b/libs/PySocks-1.7.1.dist-info/WHEEL index ba48cbcf92..0885d05555 100644 --- a/libs/PySocks-1.7.1.dist-info/WHEEL +++ b/libs/PySocks-1.7.1.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.3) +Generator: setuptools (80.10.2) Root-Is-Purelib: true Tag: py3-none-any diff --git a/libs/PySocks-1.7.1.dist-info/LICENSE b/libs/PySocks-1.7.1.dist-info/licenses/LICENSE similarity index 100% rename from libs/PySocks-1.7.1.dist-info/LICENSE rename to libs/PySocks-1.7.1.dist-info/licenses/LICENSE diff --git a/libs/PyYAML-6.0.2.dist-info/METADATA b/libs/PyYAML-6.0.2.dist-info/METADATA deleted file mode 100644 index db029b770c..0000000000 --- a/libs/PyYAML-6.0.2.dist-info/METADATA +++ /dev/null @@ -1,46 +0,0 @@ -Metadata-Version: 2.1 -Name: PyYAML -Version: 6.0.2 -Summary: YAML parser and emitter for Python -Home-page: https://pyyaml.org/ -Download-URL: https://pypi.org/project/PyYAML/ -Author: Kirill Simonov -Author-email: xi@resolvent.net -License: MIT -Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues -Project-URL: CI, https://github.com/yaml/pyyaml/actions -Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation -Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core -Project-URL: Source Code, https://github.com/yaml/pyyaml -Platform: Any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Cython -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Text Processing :: Markup -Requires-Python: >=3.8 -License-File: LICENSE - -YAML is a data serialization format designed for human readability -and interaction with scripting languages. PyYAML is a YAML parser -and emitter for Python. - -PyYAML features a complete YAML 1.1 parser, Unicode support, pickle -support, capable extension API, and sensible error messages. PyYAML -supports standard YAML tags and provides Python-specific tags that -allow to represent an arbitrary Python object. - -PyYAML is applicable for a broad range of tasks from complex -configuration files to object serialization and persistence. diff --git a/libs/PyYAML-6.0.2.dist-info/RECORD b/libs/PyYAML-6.0.2.dist-info/RECORD deleted file mode 100644 index f01fe76227..0000000000 --- a/libs/PyYAML-6.0.2.dist-info/RECORD +++ /dev/null @@ -1,25 +0,0 @@ -PyYAML-6.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -PyYAML-6.0.2.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101 -PyYAML-6.0.2.dist-info/METADATA,sha256=9-odFB5seu4pGPcEv7E8iyxNF51_uKnaNGjLAhz2lto,2060 -PyYAML-6.0.2.dist-info/RECORD,, -PyYAML-6.0.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -PyYAML-6.0.2.dist-info/WHEEL,sha256=39uaw0gKzAUihvDPhgMAk_aKXc5F8smdVlzAUVAVruU,109 -PyYAML-6.0.2.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11 -_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402 -yaml/__init__.py,sha256=N35S01HMesFTe0aRRMWkPj0Pa8IEbHpE9FK7cr5Bdtw,12311 -yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883 -yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639 -yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851 -yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837 -yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006 -yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533 -yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445 -yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061 -yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440 -yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495 -yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794 -yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190 -yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004 -yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279 -yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165 -yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573 diff --git a/libs/PyYAML-6.0.2.dist-info/WHEEL b/libs/PyYAML-6.0.2.dist-info/WHEEL deleted file mode 100644 index b8b9cfd4ca..0000000000 --- a/libs/PyYAML-6.0.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.44.0) -Root-Is-Purelib: false -Tag: cp38-cp38-macosx_12_0_x86_64 - diff --git a/libs/APScheduler-3.10.4.dist-info/INSTALLER b/libs/PyYAML-6.0.3.dist-info/INSTALLER similarity index 100% rename from libs/APScheduler-3.10.4.dist-info/INSTALLER rename to libs/PyYAML-6.0.3.dist-info/INSTALLER diff --git a/libs/PyYAML-6.0.2.dist-info/LICENSE b/libs/PyYAML-6.0.3.dist-info/LICENSE similarity index 100% rename from libs/PyYAML-6.0.2.dist-info/LICENSE rename to libs/PyYAML-6.0.3.dist-info/LICENSE diff --git a/libs/PyYAML-6.0.3.dist-info/METADATA b/libs/PyYAML-6.0.3.dist-info/METADATA new file mode 100644 index 0000000000..330ffe9df9 --- /dev/null +++ b/libs/PyYAML-6.0.3.dist-info/METADATA @@ -0,0 +1,59 @@ +Metadata-Version: 2.1 +Name: PyYAML +Version: 6.0.3 +Summary: YAML parser and emitter for Python +Home-page: https://pyyaml.org/ +Download-URL: https://pypi.org/project/PyYAML/ +Author: Kirill Simonov +Author-email: xi@resolvent.net +License: MIT +Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues +Project-URL: CI, https://github.com/yaml/pyyaml/actions +Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation +Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core +Project-URL: Source Code, https://github.com/yaml/pyyaml +Platform: Any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: 3.14 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup +Requires-Python: >=3.8 +License-File: LICENSE +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: download-url +Dynamic: home-page +Dynamic: license +Dynamic: license-file +Dynamic: platform +Dynamic: project-url +Dynamic: requires-python +Dynamic: summary + +YAML is a data serialization format designed for human readability +and interaction with scripting languages. PyYAML is a YAML parser +and emitter for Python. + +PyYAML features a complete YAML 1.1 parser, Unicode support, pickle +support, capable extension API, and sensible error messages. PyYAML +supports standard YAML tags and provides Python-specific tags that +allow to represent an arbitrary Python object. + +PyYAML is applicable for a broad range of tasks from complex +configuration files to object serialization and persistence. diff --git a/libs/PyYAML-6.0.3.dist-info/RECORD b/libs/PyYAML-6.0.3.dist-info/RECORD new file mode 100644 index 0000000000..f5113593a2 --- /dev/null +++ b/libs/PyYAML-6.0.3.dist-info/RECORD @@ -0,0 +1,25 @@ +PyYAML-6.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +PyYAML-6.0.3.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101 +PyYAML-6.0.3.dist-info/METADATA,sha256=flU4VTkFLVOleKPdLjqz_9sAdVG3dXdBfUSdeA5B4Tk,2351 +PyYAML-6.0.3.dist-info/RECORD,, +PyYAML-6.0.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +PyYAML-6.0.3.dist-info/WHEEL,sha256=hFYUutZ2FUL_WAhfJuJKZaElTlo_M4xZvGUwp8hfNTA,110 +PyYAML-6.0.3.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11 +_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402 +yaml/__init__.py,sha256=sZ38wzPWp139cwc5ARZFByUvJxtB07X32FUQAzoFR6c,12311 +yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883 +yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639 +yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851 +yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837 +yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006 +yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533 +yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445 +yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061 +yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440 +yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495 +yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794 +yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190 +yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004 +yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279 +yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165 +yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573 diff --git a/libs/APScheduler-3.10.4.dist-info/REQUESTED b/libs/PyYAML-6.0.3.dist-info/REQUESTED similarity index 100% rename from libs/APScheduler-3.10.4.dist-info/REQUESTED rename to libs/PyYAML-6.0.3.dist-info/REQUESTED diff --git a/libs/PyYAML-6.0.3.dist-info/WHEEL b/libs/PyYAML-6.0.3.dist-info/WHEEL new file mode 100644 index 0000000000..4cfaefcad6 --- /dev/null +++ b/libs/PyYAML-6.0.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.44.0) +Root-Is-Purelib: false +Tag: cp310-cp310-macosx_14_0_arm64 + diff --git a/libs/PyYAML-6.0.2.dist-info/top_level.txt b/libs/PyYAML-6.0.3.dist-info/top_level.txt similarity index 100% rename from libs/PyYAML-6.0.2.dist-info/top_level.txt rename to libs/PyYAML-6.0.3.dist-info/top_level.txt diff --git a/libs/SQLAlchemy-2.0.37.dist-info/LICENSE b/libs/SQLAlchemy-2.0.37.dist-info/LICENSE deleted file mode 100644 index dfe1a4d815..0000000000 --- a/libs/SQLAlchemy-2.0.37.dist-info/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright 2005-2025 SQLAlchemy authors and contributors . - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/libs/SQLAlchemy-2.0.37.dist-info/METADATA b/libs/SQLAlchemy-2.0.37.dist-info/METADATA deleted file mode 100644 index e548f29da2..0000000000 --- a/libs/SQLAlchemy-2.0.37.dist-info/METADATA +++ /dev/null @@ -1,243 +0,0 @@ -Metadata-Version: 2.1 -Name: SQLAlchemy -Version: 2.0.37 -Summary: Database Abstraction Library -Home-page: https://www.sqlalchemy.org -Author: Mike Bayer -Author-email: mike_mp@zzzcomputing.com -License: MIT -Project-URL: Documentation, https://docs.sqlalchemy.org -Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/ -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Database :: Front-Ends -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: typing-extensions >=4.6.0 -Requires-Dist: greenlet !=0.4.17 ; python_version < "3.14" and (platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32")))))) -Requires-Dist: importlib-metadata ; python_version < "3.8" -Provides-Extra: aiomysql -Requires-Dist: greenlet !=0.4.17 ; extra == 'aiomysql' -Requires-Dist: aiomysql >=0.2.0 ; extra == 'aiomysql' -Provides-Extra: aioodbc -Requires-Dist: greenlet !=0.4.17 ; extra == 'aioodbc' -Requires-Dist: aioodbc ; extra == 'aioodbc' -Provides-Extra: aiosqlite -Requires-Dist: greenlet !=0.4.17 ; extra == 'aiosqlite' -Requires-Dist: aiosqlite ; extra == 'aiosqlite' -Requires-Dist: typing-extensions !=3.10.0.1 ; extra == 'aiosqlite' -Provides-Extra: asyncio -Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncio' -Provides-Extra: asyncmy -Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncmy' -Requires-Dist: asyncmy !=0.2.4,!=0.2.6,>=0.2.3 ; extra == 'asyncmy' -Provides-Extra: mariadb_connector -Requires-Dist: mariadb !=1.1.10,!=1.1.2,!=1.1.5,>=1.0.1 ; extra == 'mariadb_connector' -Provides-Extra: mssql -Requires-Dist: pyodbc ; extra == 'mssql' -Provides-Extra: mssql_pymssql -Requires-Dist: pymssql ; extra == 'mssql_pymssql' -Provides-Extra: mssql_pyodbc -Requires-Dist: pyodbc ; extra == 'mssql_pyodbc' -Provides-Extra: mypy -Requires-Dist: mypy >=0.910 ; extra == 'mypy' -Provides-Extra: mysql -Requires-Dist: mysqlclient >=1.4.0 ; extra == 'mysql' -Provides-Extra: mysql_connector -Requires-Dist: mysql-connector-python ; extra == 'mysql_connector' -Provides-Extra: oracle -Requires-Dist: cx-oracle >=8 ; extra == 'oracle' -Provides-Extra: oracle_oracledb -Requires-Dist: oracledb >=1.0.1 ; extra == 'oracle_oracledb' -Provides-Extra: postgresql -Requires-Dist: psycopg2 >=2.7 ; extra == 'postgresql' -Provides-Extra: postgresql_asyncpg -Requires-Dist: greenlet !=0.4.17 ; extra == 'postgresql_asyncpg' -Requires-Dist: asyncpg ; extra == 'postgresql_asyncpg' -Provides-Extra: postgresql_pg8000 -Requires-Dist: pg8000 >=1.29.1 ; extra == 'postgresql_pg8000' -Provides-Extra: postgresql_psycopg -Requires-Dist: psycopg >=3.0.7 ; extra == 'postgresql_psycopg' -Provides-Extra: postgresql_psycopg2binary -Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary' -Provides-Extra: postgresql_psycopg2cffi -Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi' -Provides-Extra: postgresql_psycopgbinary -Requires-Dist: psycopg[binary] >=3.0.7 ; extra == 'postgresql_psycopgbinary' -Provides-Extra: pymysql -Requires-Dist: pymysql ; extra == 'pymysql' -Provides-Extra: sqlcipher -Requires-Dist: sqlcipher3-binary ; extra == 'sqlcipher' - -SQLAlchemy -========== - -|PyPI| |Python| |Downloads| - -.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy - :target: https://pypi.org/project/sqlalchemy - :alt: PyPI - -.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy - :target: https://pypi.org/project/sqlalchemy - :alt: PyPI - Python Version - -.. |Downloads| image:: https://static.pepy.tech/badge/sqlalchemy/month - :target: https://pepy.tech/project/sqlalchemy - :alt: PyPI - Downloads - - -The Python SQL Toolkit and Object Relational Mapper - -Introduction -------------- - -SQLAlchemy is the Python SQL toolkit and Object Relational Mapper -that gives application developers the full power and -flexibility of SQL. SQLAlchemy provides a full suite -of well known enterprise-level persistence patterns, -designed for efficient and high-performing database -access, adapted into a simple and Pythonic domain -language. - -Major SQLAlchemy features include: - -* An industrial strength ORM, built - from the core on the identity map, unit of work, - and data mapper patterns. These patterns - allow transparent persistence of objects - using a declarative configuration system. - Domain models - can be constructed and manipulated naturally, - and changes are synchronized with the - current transaction automatically. -* A relationally-oriented query system, exposing - the full range of SQL's capabilities - explicitly, including joins, subqueries, - correlation, and most everything else, - in terms of the object model. - Writing queries with the ORM uses the same - techniques of relational composition you use - when writing SQL. While you can drop into - literal SQL at any time, it's virtually never - needed. -* A comprehensive and flexible system - of eager loading for related collections and objects. - Collections are cached within a session, - and can be loaded on individual access, all - at once using joins, or by query per collection - across the full result set. -* A Core SQL construction system and DBAPI - interaction layer. The SQLAlchemy Core is - separate from the ORM and is a full database - abstraction layer in its own right, and includes - an extensible Python-based SQL expression - language, schema metadata, connection pooling, - type coercion, and custom types. -* All primary and foreign key constraints are - assumed to be composite and natural. Surrogate - integer primary keys are of course still the - norm, but SQLAlchemy never assumes or hardcodes - to this model. -* Database introspection and generation. Database - schemas can be "reflected" in one step into - Python structures representing database metadata; - those same structures can then generate - CREATE statements right back out - all within - the Core, independent of the ORM. - -SQLAlchemy's philosophy: - -* SQL databases behave less and less like object - collections the more size and performance start to - matter; object collections behave less and less like - tables and rows the more abstraction starts to matter. - SQLAlchemy aims to accommodate both of these - principles. -* An ORM doesn't need to hide the "R". A relational - database provides rich, set-based functionality - that should be fully exposed. SQLAlchemy's - ORM provides an open-ended set of patterns - that allow a developer to construct a custom - mediation layer between a domain model and - a relational schema, turning the so-called - "object relational impedance" issue into - a distant memory. -* The developer, in all cases, makes all decisions - regarding the design, structure, and naming conventions - of both the object model as well as the relational - schema. SQLAlchemy only provides the means - to automate the execution of these decisions. -* With SQLAlchemy, there's no such thing as - "the ORM generated a bad query" - you - retain full control over the structure of - queries, including how joins are organized, - how subqueries and correlation is used, what - columns are requested. Everything SQLAlchemy - does is ultimately the result of a developer-initiated - decision. -* Don't use an ORM if the problem doesn't need one. - SQLAlchemy consists of a Core and separate ORM - component. The Core offers a full SQL expression - language that allows Pythonic construction - of SQL constructs that render directly to SQL - strings for a target database, returning - result sets that are essentially enhanced DBAPI - cursors. -* Transactions should be the norm. With SQLAlchemy's - ORM, nothing goes to permanent storage until - commit() is called. SQLAlchemy encourages applications - to create a consistent means of delineating - the start and end of a series of operations. -* Never render a literal value in a SQL statement. - Bound parameters are used to the greatest degree - possible, allowing query optimizers to cache - query plans effectively and making SQL injection - attacks a non-issue. - -Documentation -------------- - -Latest documentation is at: - -https://www.sqlalchemy.org/docs/ - -Installation / Requirements ---------------------------- - -Full documentation for installation is at -`Installation `_. - -Getting Help / Development / Bug reporting ------------------------------------------- - -Please refer to the `SQLAlchemy Community Guide `_. - -Code of Conduct ---------------- - -Above all, SQLAlchemy places great emphasis on polite, thoughtful, and -constructive communication between users and developers. -Please see our current Code of Conduct at -`Code of Conduct `_. - -License -------- - -SQLAlchemy is distributed under the `MIT license -`_. - diff --git a/libs/SQLAlchemy-2.0.37.dist-info/RECORD b/libs/SQLAlchemy-2.0.37.dist-info/RECORD deleted file mode 100644 index d9bb0e04fe..0000000000 --- a/libs/SQLAlchemy-2.0.37.dist-info/RECORD +++ /dev/null @@ -1,275 +0,0 @@ -SQLAlchemy-2.0.37.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -SQLAlchemy-2.0.37.dist-info/LICENSE,sha256=mCFyC1jUpWW2EyEAeorUOraZGjlZ5mzV203Z6uacffw,1100 -SQLAlchemy-2.0.37.dist-info/METADATA,sha256=UywKCGKcABKNtpI-G6qnmmxpFaI6iJcHIDeLUQ2RvWQ,9692 -SQLAlchemy-2.0.37.dist-info/RECORD,, -SQLAlchemy-2.0.37.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -SQLAlchemy-2.0.37.dist-info/WHEEL,sha256=j8_MCNBI7KgztMI2VCVmNiYcEly_P_7tu-qcbaOXbrw,108 -SQLAlchemy-2.0.37.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11 -sqlalchemy/__init__.py,sha256=m8AoRzqL1l_3uFAeJ_vwtlAfXkboxLKJ3oL1RqFnXbM,13033 -sqlalchemy/connectors/__init__.py,sha256=YeSHsOB0YhdM6jZUvHFQFwKqNXO02MlklmGW0yCywjI,476 -sqlalchemy/connectors/aioodbc.py,sha256=KT9xi2xQ4AJgDiGPTV5h_5qi9dummmenKAvWelwza3w,5288 -sqlalchemy/connectors/asyncio.py,sha256=00claZADdFUh2iQmlpqoLhLTBxK0i79Mwd9WZqUtleM,6138 -sqlalchemy/connectors/pyodbc.py,sha256=GsW9bD0H30OMTbGDx9SdaTT_ujgpxP7TM4rfhIzD4mo,8501 -sqlalchemy/cyextension/__init__.py,sha256=4npVIjitKfUs0NQ6f3UdQBDq4ipJ0_ZNB2mpKqtc5ik,244 -sqlalchemy/cyextension/collections.cpython-38-darwin.so,sha256=sEF81qKQrUMYUNYDc5rR0o9KN5ntUi5sU3VFXT_Gbs4,233760 -sqlalchemy/cyextension/collections.pyx,sha256=L7DZ3DGKpgw2MT2ZZRRxCnrcyE5pU1NAFowWgAzQPEc,12571 -sqlalchemy/cyextension/immutabledict.cpython-38-darwin.so,sha256=zPUP2rtm01kkegS3n-rYzu7VgmZNo1hQS65vgIUme4U,94672 -sqlalchemy/cyextension/immutabledict.pxd,sha256=3x3-rXG5eRQ7bBnktZ-OJ9-6ft8zToPmTDOd92iXpB0,291 -sqlalchemy/cyextension/immutabledict.pyx,sha256=KfDTYbTfebstE8xuqAtuXsHNAK0_b5q_ymUiinUe_xs,3535 -sqlalchemy/cyextension/processors.cpython-38-darwin.so,sha256=ZXviRx-dUBLWF5Z2xb2ImYGZR2s1VEEX0rvOvBihfYk,75864 -sqlalchemy/cyextension/processors.pyx,sha256=R1rHsGLEaGeBq5VeCydjClzYlivERIJ9B-XLOJlf2MQ,1792 -sqlalchemy/cyextension/resultproxy.cpython-38-darwin.so,sha256=uA4vRLlgWg8sEjeTXmJ5v4DAxpCubWTNMJGDRgtlgzY,78016 -sqlalchemy/cyextension/resultproxy.pyx,sha256=eWLdyBXiBy_CLQrF5ScfWJm7X0NeelscSXedtj1zv9Q,2725 -sqlalchemy/cyextension/util.cpython-38-darwin.so,sha256=BA75sGnaj5sISpo7_70Id7LycML4iHxagdVolVa8Mf4,93008 -sqlalchemy/cyextension/util.pyx,sha256=B85orxa9LddLuQEaDoVSq1XmAXIbLKxrxpvuB8ogV_o,2530 -sqlalchemy/dialects/__init__.py,sha256=4jxiSgI_fVCNXcz42gQYKEp0k07RAHyQN4ZpjaNsFUI,1770 -sqlalchemy/dialects/_typing.py,sha256=8YwrkOa8IvmBojwwegbL5mL_0UAuzdqYiKHKANpvHMw,971 -sqlalchemy/dialects/mssql/__init__.py,sha256=6t_aNpgbMLdPE9gpHYTf9o6QfVavncztRLbr21l2NaY,1880 -sqlalchemy/dialects/mssql/aioodbc.py,sha256=4CmhwIkZrabpG-r7_ogRVajD-nhRZSFJ0Swz2d0jIHM,2021 -sqlalchemy/dialects/mssql/base.py,sha256=2UCotpN3WBPgMddhXVP6Epc-srvNrYHCnK4kcEbjW6w,132713 -sqlalchemy/dialects/mssql/information_schema.py,sha256=v5MZz1FN72THEwF_u3Eh_2vnWdFE13RYydOioMMcuvU,8084 -sqlalchemy/dialects/mssql/json.py,sha256=F53pibuOVRzgDtjoclOI7LnkKXNVsaVfJyBH1XAhyDo,4756 -sqlalchemy/dialects/mssql/provision.py,sha256=P1tqxZ4f6Oeqn2gNi7dXl82LRLCg1-OB4eWiZc6CHek,5593 -sqlalchemy/dialects/mssql/pymssql.py,sha256=C7yAs3Pw81W1KTVNc6_0sHQuYlJ5iH82vKByY4TkB1g,4097 -sqlalchemy/dialects/mssql/pyodbc.py,sha256=CnO7KDWxbxb7AoZhp_PMDBvVSMuzwq1h4Cav2IWFWDo,27173 -sqlalchemy/dialects/mysql/__init__.py,sha256=ropOMUWrAcL-Q7h-9jQ_tb3ISAFIsNRQ8YVXvn0URl0,2206 -sqlalchemy/dialects/mysql/aiomysql.py,sha256=yrujoFtAG0QvtVlgbGBUMg3kXeXlIH62tvyYTCMUfnE,10013 -sqlalchemy/dialects/mysql/asyncmy.py,sha256=rmVSf86VYxgAUROIKfVtvS-grG9aPBiLY_Gu0KJMjuo,10081 -sqlalchemy/dialects/mysql/base.py,sha256=LkGJ6G1U2xygOawOtQYBfTipGh8MuiE1kNxaD7S9UIY,123432 -sqlalchemy/dialects/mysql/cymysql.py,sha256=KwxSsF4a6uUd6yblhSns8uj4hgmhv4hFInTZNdmRixA,2300 -sqlalchemy/dialects/mysql/dml.py,sha256=VjnTobe_SBNF2RN6tvqa5LOn-9x4teVUyzUedZkOmdc,7768 -sqlalchemy/dialects/mysql/enumerated.py,sha256=qI5gnBYhxk9dhPeUfGiijp0qT2Puazdp27-ba_38uWQ,8447 -sqlalchemy/dialects/mysql/expression.py,sha256=3PEKPwYIZ8mVXkjUgHaj_efPBYuBNWZSnfUcJuoZddA,4121 -sqlalchemy/dialects/mysql/json.py,sha256=W31DojiRypifXKVh3PJSWP7IHqFoeKwzLl-0CJH6QRI,2269 -sqlalchemy/dialects/mysql/mariadb.py,sha256=g4v4WQuXHn556Nn6k-RgvPrmfCql1R46fIEk6UEx0U8,1450 -sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=t4m6kfYBoURjNXRxlEsRajjvArNDc4lmaFGxHQh7VTo,8623 -sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=gdNOGdRqvnCbLZpKjpubu_0tGRQ5Tn_2TZvbp3v9rX0,5729 -sqlalchemy/dialects/mysql/mysqldb.py,sha256=5ME7B0WI9G8tw5482YBejDg38uVMXR2oUasNDOCsAqQ,9526 -sqlalchemy/dialects/mysql/provision.py,sha256=5LCeInPvyEbGuzxSs9rnnLYkMsFpW3IJ8lC-sjTfKnk,3575 -sqlalchemy/dialects/mysql/pymysql.py,sha256=osp0em1s3Cip5Vpcj-PeaH7btHEInorO-qs351muw3Q,4082 -sqlalchemy/dialects/mysql/pyodbc.py,sha256=ZiFNJQq2qiOTzTZLmNJQ938EnS1ItVsNDa3fvNEDqnI,4298 -sqlalchemy/dialects/mysql/reflection.py,sha256=eGV9taua0nZS_HsHyAy6zjcHEHFPXmFdux-bUmtOeWs,22834 -sqlalchemy/dialects/mysql/reserved_words.py,sha256=C9npWSuhsxoVCqETxCQ1zE_UEgy4gfiHw9zI5dPkjWI,9258 -sqlalchemy/dialects/mysql/types.py,sha256=w68OASMw04xkyAc0_GtXkuEhhVqlR6LTwaOch4KaAFQ,24343 -sqlalchemy/dialects/oracle/__init__.py,sha256=rp9qPRNQAk1Yq_Zhe7SsUH8EvFgNOAh8XOF17Lkxpyo,1493 -sqlalchemy/dialects/oracle/base.py,sha256=_JF4OwXmXjAsXj8wXq2m8M2vtMjoxdlOwg1hfcgn3bc,123096 -sqlalchemy/dialects/oracle/cx_oracle.py,sha256=ohENTgLxGUfobRH3K8KdeZgBRPG1rX3vY-ph9blj-2g,56612 -sqlalchemy/dialects/oracle/dictionary.py,sha256=J7tGVE0KyUPZKpPLOary3HdDq1DWd29arF5udLgv8_o,19519 -sqlalchemy/dialects/oracle/oracledb.py,sha256=veqto1AUIbSxRmpUQin0ysMV8Y6sWAkzXt7W8IIl118,33771 -sqlalchemy/dialects/oracle/provision.py,sha256=ga1gNQZlXZKk7DYuYegllUejJxZXRKDGa7dbi_S_poc,8313 -sqlalchemy/dialects/oracle/types.py,sha256=axN6Yidx9tGRIUAbDpBrhMWXE-C8jSllFpTghpGOOzU,9058 -sqlalchemy/dialects/postgresql/__init__.py,sha256=kD8W-SV5e2CesvWg2MQAtncXuZFwGPfR_UODvmRXE08,3892 -sqlalchemy/dialects/postgresql/_psycopg_common.py,sha256=szME-lCjVwqnW9-USA6e8ke8N_bN3IbqnIm_oZruvqc,5696 -sqlalchemy/dialects/postgresql/array.py,sha256=28kndSQwgvNWlO4z6MUh5WYAtNSgkgBa6qSEQCIflks,13856 -sqlalchemy/dialects/postgresql/asyncpg.py,sha256=ysIDXcGT3OG2lu0YdiIn-_pzfL0uDe-tmHs70fOWVVE,41283 -sqlalchemy/dialects/postgresql/base.py,sha256=otAswEHqeRhbN9_AGMxnwDo6r872ECkiJ5FMetXfS0k,179452 -sqlalchemy/dialects/postgresql/dml.py,sha256=2SmyMeYveAgm7OnT_CJvwad2nh8BP37yT6gFs8dBYN8,12126 -sqlalchemy/dialects/postgresql/ext.py,sha256=MtN4IU5sRYvoY-E8PTltJ1CuIGb-aCwY2pHMPJcTboA,16318 -sqlalchemy/dialects/postgresql/hstore.py,sha256=wR4gmvfQWPssHwYTXEsPJTb4LkBS6x4e4XXE6smtDH4,11934 -sqlalchemy/dialects/postgresql/json.py,sha256=9sHFGTRFyNbLsANrVYookw9NOJwIPTsEBRNIOUOzOGw,11612 -sqlalchemy/dialects/postgresql/named_types.py,sha256=TEWaBCjuHM2WJoQNrQErQ6f_bUkWypGJfW71wzVJXWc,17572 -sqlalchemy/dialects/postgresql/operators.py,sha256=ay3ckNsWtqDjxDseTdKMGGqYVzST6lmfhbbYHG_bxCw,2808 -sqlalchemy/dialects/postgresql/pg8000.py,sha256=RAykzZuO3Anr6AsyK2JYr7CPb2pru6WtkrX2phCyCGU,18638 -sqlalchemy/dialects/postgresql/pg_catalog.py,sha256=lgJMn7aDuJI2XeHddLkge5NFy6oB2-aDSn8A47QpwAU,9254 -sqlalchemy/dialects/postgresql/provision.py,sha256=7pg9-nOnaK5XBzqByXNPuvi3rxtnRa3dJxdSPVq4eeA,5770 -sqlalchemy/dialects/postgresql/psycopg.py,sha256=k7zXsJj35aOXCrhsbMxwTQX5JWegrqirFJ1Hgbq-GjQ,23326 -sqlalchemy/dialects/postgresql/psycopg2.py,sha256=1KXw9RzsQEAXJazCBywdP5CwLu-HsCSDAD_Khc_rPTM,32032 -sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=nKilJfvO9mJwk5NRw5iZDekKY5vi379tvdUJ2vn5eyQ,1756 -sqlalchemy/dialects/postgresql/ranges.py,sha256=fnaj4YgCQGO-G_S4k5ea8bYMH7SzggKJdUX5qfaNp4Y,32978 -sqlalchemy/dialects/postgresql/types.py,sha256=sjb-m-h49lbLBFh0P30G8BWgf_aKNiNyVwWEktugwRw,7286 -sqlalchemy/dialects/sqlite/__init__.py,sha256=6Xcz3nPsl8lqCcZ4-VzPRmkMrkKgAp2buKsClZelU7c,1182 -sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=FWS-Nn2jnpITQKGd4xOZCYEW-l1C_erQ3IdDJC855t8,12348 -sqlalchemy/dialects/sqlite/base.py,sha256=PvwPzukomHAkufUzSqgfJcbKC2ZJAkJbVnW2BQB2T58,98271 -sqlalchemy/dialects/sqlite/dml.py,sha256=4N8qh06RuMphLoQgWw7wv5nXIrka57jIFvK2x9xTZqg,9138 -sqlalchemy/dialects/sqlite/json.py,sha256=A62xPyLRZxl2hvgTMM92jd_7jlw9UE_4Y6Udqt-8g04,2777 -sqlalchemy/dialects/sqlite/provision.py,sha256=iLJyeQSy8pfr9lwEu4_d4O_CI4OavAtkNeRi3qqys1U,5632 -sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=di8rYryfL0KAn3pRGepmunHyIRGy-4Hhr-2q_ehPzss,5371 -sqlalchemy/dialects/sqlite/pysqlite.py,sha256=rg7F1S2UOhUu6Y1xNVaqF8VbA-FsRY_Y_XpGTpkKpGs,28087 -sqlalchemy/dialects/type_migration_guidelines.txt,sha256=-uHNdmYFGB7bzUNT6i8M5nb4j6j9YUKAtW4lcBZqsMg,8239 -sqlalchemy/engine/__init__.py,sha256=EF4haWCPu95WtWx1GzcHRJ_bBmtJMznno3I2TQ-ZIHE,2818 -sqlalchemy/engine/_py_processors.py,sha256=7QxgkVOd5h1Qd22qFh-pPZdM7RBRzNjj8lWAMWrilcI,3744 -sqlalchemy/engine/_py_row.py,sha256=yNdrZe36yw6mO7x0OEbG0dGojH7CQkNReIwn9LMUPUs,3787 -sqlalchemy/engine/_py_util.py,sha256=LdpbNRQIrJo3EkmiwNkM5bxGUf4uWuL5uS_u-zHadWc,2484 -sqlalchemy/engine/base.py,sha256=9kCWrDp3ECOlQ7BHK_efYAILo3-emcPSk4F8AFRgN7E,122901 -sqlalchemy/engine/characteristics.py,sha256=PepmGApo1sL01dS1qtSbmHplu9ZCdtuSegiGI7L7NZY,4765 -sqlalchemy/engine/create.py,sha256=4gFkqV7fgJbI1906DC4zDgFFX1-xJQ96GIHIrQuc-w4,33217 -sqlalchemy/engine/cursor.py,sha256=6KIZqlwWMUMv02w_el4uNYFMYcfc7eWbkAxW27UyDLE,76305 -sqlalchemy/engine/default.py,sha256=SHM6boxcDNk7MW_Eyd0zCb557Eqf8KTdX1iTUbS0DLw,84705 -sqlalchemy/engine/events.py,sha256=4_e6Ip32ar2Eb27R4ipamiKC-7Tpg4lVz3txabhT5Rc,37400 -sqlalchemy/engine/interfaces.py,sha256=fGmcrBt8yT78ty0R3e3XUvsPh7XYDU_b1JW3QhK_MwY,113029 -sqlalchemy/engine/mock.py,sha256=_aXG1xzj_TO5UWdz8IthPj1ZJ8IlhsKw6D9mmFN_frQ,4181 -sqlalchemy/engine/processors.py,sha256=XK32bULBkuVVRa703u4-SrTCDi_a18Dxq1M09QFBEPw,2379 -sqlalchemy/engine/reflection.py,sha256=_v9zCy3h28hN4KKIUTc5_7KJv7argSgi8A011b_iCdc,75383 -sqlalchemy/engine/result.py,sha256=rgny4qFLmpj80GSdFK35Dpgc3Qk2tc3eJPpahGWVR-M,77622 -sqlalchemy/engine/row.py,sha256=BPtAwsceiRxB9ANpDNM24uQ1M_Zs0xFkSXoKR_I8xyY,12031 -sqlalchemy/engine/strategies.py,sha256=-0rieXY-iXgV83OrJZr-wozFFQn3amKKHchQ6kL-r7A,442 -sqlalchemy/engine/url.py,sha256=gaEeSEJCD0nVEb8J02rIMASrd5L2wYdq5ZXJaj7szVI,31069 -sqlalchemy/engine/util.py,sha256=4OmXwFlmnq6_vBlfUBHnz5LrI_8bT3TwgynX4wcJfnw,5682 -sqlalchemy/event/__init__.py,sha256=ZjVxFGbt9neH5AC4GFiUN5IG2O4j6Z9v2LdmyagJi9w,997 -sqlalchemy/event/api.py,sha256=NetgcQfbURaZzoxus7_801YDG_LJ7PYqaC3T1lws114,8111 -sqlalchemy/event/attr.py,sha256=YhPXVBPj63Cfyn0nS6h8Ljq0SEbD3mtAZn9HYlzGbtw,20751 -sqlalchemy/event/base.py,sha256=OevVb82IrUoVgFRrjH4b5GquS5pjFHOgzWAxPwwTKMY,15127 -sqlalchemy/event/legacy.py,sha256=lGafKAOF6PY8Bz0AqhN9Q6n-lpXqFLwdv-0T6-UBpow,8227 -sqlalchemy/event/registry.py,sha256=MNEMyR8HZhzQFgxk4Jk_Em6nXTihmGXiSIwPdUnalPM,11144 -sqlalchemy/events.py,sha256=VBRvtckn9JS3tfUfi6UstqUrvQ15J2xamcDByFysIrI,525 -sqlalchemy/exc.py,sha256=AjFBCrOl_V4vQdGegn72Y951RSRMPL6T5qjxnFTGFbM,23978 -sqlalchemy/ext/__init__.py,sha256=BkTNuOg454MpCY9QA3FLK8td7KQhD1W74fOEXxnWibE,322 -sqlalchemy/ext/associationproxy.py,sha256=VhOFB1vB8hmDYQP90_VdpPI9IFzP3NENkG_eDKziVoI,66062 -sqlalchemy/ext/asyncio/__init__.py,sha256=kTIfpwsHWhqZ-VMOBZFBq66kt1XeF0hNuwOToEDe4_Y,1317 -sqlalchemy/ext/asyncio/base.py,sha256=2YQ-nKaHbAm--7q6vbxbznzdwT8oPwetwAarKyu2O8E,8930 -sqlalchemy/ext/asyncio/engine.py,sha256=fe_RZrO-5DiiEgMZ3g-Lti-fdaR7z_Q8gDfPUf-30EY,48198 -sqlalchemy/ext/asyncio/exc.py,sha256=npijuILDXH2p4Q5RzhHzutKwZ5CjtqTcP-U0h9TZUmk,639 -sqlalchemy/ext/asyncio/result.py,sha256=zhhXe13vMT7OfdfGXapgtn4crtiqqctRLb3ka4mmGXY,30477 -sqlalchemy/ext/asyncio/scoping.py,sha256=4f7MX3zUd-4rA8A5wd7j0_GlqCSUxdOPfYd7BBIxkJI,52587 -sqlalchemy/ext/asyncio/session.py,sha256=2wxu06UtJGyf-be2edMFkcK4eLMh8xuGmsAlGRj0YPM,63166 -sqlalchemy/ext/automap.py,sha256=n88mktqvExwjqfsDu3yLIA4wbOIWUpQ1S35Uw3X6ffQ,61675 -sqlalchemy/ext/baked.py,sha256=w3SeRoqnPkIhPL2nRAxfVhyir2ypsiW4kmtmUGKs8qo,17753 -sqlalchemy/ext/compiler.py,sha256=f7o4qhUUldpsx4F1sQoUvdVaT2BhiemqNBCF4r_uQUo,20889 -sqlalchemy/ext/declarative/__init__.py,sha256=SuVflXOGDxx2sB2QSTqNEvqS0fyhOkh3-sy2lRsSOLA,1818 -sqlalchemy/ext/declarative/extensions.py,sha256=yHUPcztU-5E1JrNyELDFWKchAnaYK6Y9-dLcqyc1nUI,19531 -sqlalchemy/ext/horizontal_shard.py,sha256=vouIehpQAuwT0HXyWyynTL3m_gcBuLcB-X8lDB0uQ8U,16691 -sqlalchemy/ext/hybrid.py,sha256=DkvNGtiQYzlEBvs1rYEDXhM8vJEXXh_6DMigsHH9w4k,52531 -sqlalchemy/ext/indexable.py,sha256=_dTOgCS96jURcQd9L-hnUMIJDe9KUMyd9gfH57vs078,11065 -sqlalchemy/ext/instrumentation.py,sha256=iCp89rvfK7buW0jJyzKTBDKyMsd06oTRJDItOk4OVSw,15707 -sqlalchemy/ext/mutable.py,sha256=7Zyh2kQq2gm3J_JwsddinIXk7qUuKWbPzRZOmTultEk,37560 -sqlalchemy/ext/mypy/__init__.py,sha256=yVNtoBDNeTl1sqRoA_fSY3o1g6M8NxqUVvAHPRLmFTw,241 -sqlalchemy/ext/mypy/apply.py,sha256=v_Svc1WiBz9yBXqBVBKoCuPGN286TfVmuuCVZPlbyzo,10591 -sqlalchemy/ext/mypy/decl_class.py,sha256=Nuca4ofHkASAkdqEQlULYB7iLm_KID7Mp384seDhVGg,17384 -sqlalchemy/ext/mypy/infer.py,sha256=29vgn22Hi8E8oIZL6UJCBl6oipiPSAQjxccCEkVb410,19367 -sqlalchemy/ext/mypy/names.py,sha256=hn889DD1nlF0f3drsKi5KSGTG-JefJ2UJrrIQ4L8QWA,10479 -sqlalchemy/ext/mypy/plugin.py,sha256=9YHBp0Bwo92DbDZIUWwIr0hwXPcE4XvHs0-xshvSwUw,9750 -sqlalchemy/ext/mypy/util.py,sha256=CuW2fJ-g9YtkjcypzmrPRaFc-rAvQTzW5A2-w5VTANg,9960 -sqlalchemy/ext/orderinglist.py,sha256=MROa19cm4RZkWXuUuqc1029r7g4HrAJRc17fTHeThvI,14431 -sqlalchemy/ext/serializer.py,sha256=_z95wZMTn3G3sCGN52gwzD4CuKjrhGMr5Eu8g9MxQNg,6169 -sqlalchemy/future/__init__.py,sha256=R1h8VBwMiIUdP3QHv_tFNby557425FJOAGhUoXGvCmc,512 -sqlalchemy/future/engine.py,sha256=2nJFBQAXAE8pqe1cs-D3JjC6wUX2ya2h2e_tniuaBq0,495 -sqlalchemy/inspection.py,sha256=qKEKG37N1OjxpQeVzob1q9VwWjBbjI1x0movJG7fYJ4,5063 -sqlalchemy/log.py,sha256=e_ztNUfZM08FmTWeXN9-doD5YKW44nXxgKCUxxNs6Ow,8607 -sqlalchemy/orm/__init__.py,sha256=BICvTXpLaTNe2AiUaxnZHWzjL5miT9fd_IU-ip3OFNk,8463 -sqlalchemy/orm/_orm_constructors.py,sha256=NiAagQ1060QYS9n5y_gzPvHQQz44EN1dVtamGVtde6E,103626 -sqlalchemy/orm/_typing.py,sha256=vaYRl4_K3n-sjc9u0Rb4eWWpBOoOi92--OHqaGogRvA,4973 -sqlalchemy/orm/attributes.py,sha256=e_U0A4TGWAzL3yXVvk9YVhIRjKM4RTsIE2PNRLn8LbU,92534 -sqlalchemy/orm/base.py,sha256=oCgscNoRrqHwYvc1Iz8ZFhoVXhalu45g9z0m_7_ldaE,27502 -sqlalchemy/orm/bulk_persistence.py,sha256=Ciea9MhJ6ZbAi-uGy5-Kj6lodO9bfRqPq8GSf2qFshE,72663 -sqlalchemy/orm/clsregistry.py,sha256=syn6bB-Ylx-juh5GDCmNrPZ58C-z6sdwRkbZFeKysQU,17974 -sqlalchemy/orm/collections.py,sha256=XxZC8d9UX9E2R-WlNH198OPWRPmpLuYt0Y26LrdbuHc,52252 -sqlalchemy/orm/context.py,sha256=eyh7xTo3SyxIHl8_NBUqJ_GpJ0kZtmnTt32Z67cfqgs,112973 -sqlalchemy/orm/decl_api.py,sha256=SJ25fQjjKyWZDQbq5S69eiybpOzns0LkRziP10iW5-E,64969 -sqlalchemy/orm/decl_base.py,sha256=ZlZmyNVOsCPA_pThMeXuWmAhlJwlvTxdGXhnARsKxhk,83288 -sqlalchemy/orm/dependency.py,sha256=4NMhoogevOiX1Wm5B1_yY2u9MHYlIjJNNoEVRE0yLwA,47631 -sqlalchemy/orm/descriptor_props.py,sha256=LgfdiO_U5uznq5ImenfbWGV5T47bH4b_ztbzB4B7FsU,37231 -sqlalchemy/orm/dynamic.py,sha256=Z4GpcVL8rM8gi0bytQOZXw-_kKi-sExbRWGjU30dK3g,9816 -sqlalchemy/orm/evaluator.py,sha256=PKrUW1zEOvmv1XEgc_hBdYqNcyk4zjWr_rJhCEQBFIc,12353 -sqlalchemy/orm/events.py,sha256=OZtTCpI-DVaE6CY16e42GUVpci1U1GjdNO76xU-Tj5Y,127781 -sqlalchemy/orm/exc.py,sha256=zJgAIofYsWKjktqO5MFxze95GlJASziEOJJx-P5_wOU,7413 -sqlalchemy/orm/identity.py,sha256=5NFtF9ZPZWAOmtOqCPyVX2-_pQq9A5XeN2ns3Wirpv8,9249 -sqlalchemy/orm/instrumentation.py,sha256=WhElvvOWOn3Fuc-Asc5HmcKDX6EzFtBleLJKPZEc5A0,24321 -sqlalchemy/orm/interfaces.py,sha256=W6ADDLOixmm4tnSnUP_I9HFLj9MCO2bODk_WTNjkZGA,48797 -sqlalchemy/orm/loading.py,sha256=6Rd1hWtBPm7SfCUpjPQrcoUg_DSCcfhO8Qhz7SScjRE,58277 -sqlalchemy/orm/mapped_collection.py,sha256=FAqaTlOUCYqdws2KR_fW0T8mMWIrLuAxJGU5f4W1aGs,19682 -sqlalchemy/orm/mapper.py,sha256=-gkJKHeAJmIFT153WFIIySduyyLGbT5plCgSfnsa0I0,171668 -sqlalchemy/orm/path_registry.py,sha256=-aAEhGkDf_2ZUXmHQICQNOa4Z5xhTlhlYLag7eoVpxE,25920 -sqlalchemy/orm/persistence.py,sha256=Uz45Cwxi7FnNiSk2crbh3TzV7b9kb85vmcvOwy5NVmw,61701 -sqlalchemy/orm/properties.py,sha256=vbx_YiSjj3tI94-G-_ghbyWYcIIJQQeGG1P-0RC8Jv4,29065 -sqlalchemy/orm/query.py,sha256=GI_go9ErXYK1BteCmIh5E9iv-jfMJkRBVIlw0XmnYyk,118540 -sqlalchemy/orm/relationships.py,sha256=C40n_-oliMgJJ0FHfwsi1-dm963CrYeKJ5HEYjLdg_o,128899 -sqlalchemy/orm/scoping.py,sha256=-SNRAewfMJ4x4Um8X-yv0k1Thz8E1_kCBmbmG1l1auo,78617 -sqlalchemy/orm/session.py,sha256=1fzksIcb9DtKcwqkS1KkZngkrEYGUHmoNW_o6l8IXQ4,196114 -sqlalchemy/orm/state.py,sha256=1vtlz674sGFmwZ8Ih9TdrslA-0nhU2G52WgV-FoG2j0,37670 -sqlalchemy/orm/state_changes.py,sha256=XJLYYhTZu7nA6uD7xupbLZ9XSzqLYwrDJgW0ZAWvVGE,6815 -sqlalchemy/orm/strategies.py,sha256=qziXv4z2bJeF2qFSj6wogc9BLlxuOnT8nOcEvocVf88,119866 -sqlalchemy/orm/strategy_options.py,sha256=wMYd4E_nRb5ei8Fr3jWeSewNY2k1-AfqtYRGOLiHOFA,85043 -sqlalchemy/orm/sync.py,sha256=RdoxnhvgNjn3Lhtoq4QjvXpj8qfOz__wyibh0FMON0A,5779 -sqlalchemy/orm/unitofwork.py,sha256=hkSIcVonoSt0WWHk019bCDEw0g2o2fg4m4yqoTGyAoo,27033 -sqlalchemy/orm/util.py,sha256=rtClCjtg0eSSC8k-L30W0v6BauJaJuh9Nf-MSqofWuQ,80831 -sqlalchemy/orm/writeonly.py,sha256=R-MVxYDw0ZQ795H21yBtgGSZXWUzSovcb_SO1mv5hoI,22305 -sqlalchemy/pool/__init__.py,sha256=niqzCv2uOZT07DOiV2inlmjrW3lZyqDXGCjnOl1IqJ4,1804 -sqlalchemy/pool/base.py,sha256=mT-PHTlVUGcYRVsMB9LQwNgndjhOTOorWX5-hNRi2FM,52236 -sqlalchemy/pool/events.py,sha256=wdFfvat0fSrVF84Zzsz5E3HnVY0bhL7MPsGME-b2qa8,13149 -sqlalchemy/pool/impl.py,sha256=MLSh83SGNNtZZgZvA-5tvTIT8Dz7U95Bgt8HO_oR1Ps,18944 -sqlalchemy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -sqlalchemy/schema.py,sha256=yt4dcuMAKMleUHVidsAVAsm-JPpASFZXP2xM3pmzYHY,3194 -sqlalchemy/sql/__init__.py,sha256=Y-bZ25Zf-bxqsF2zUkpRGTjFuozNNVQHxUJV3Qmaq2M,5820 -sqlalchemy/sql/_dml_constructors.py,sha256=JF_XucNTfAk6Vz9fYiPWOgpIGtUkDj6VPILysLcrVhk,3795 -sqlalchemy/sql/_elements_constructors.py,sha256=eoQhkoRH0qox171ZSODyxxhj_HZEhO64rSowaN-I-v4,62630 -sqlalchemy/sql/_orm_types.py,sha256=0zeMit-V4rYZe-bB9X3xugnjFnPXH0gmeqkJou9Fows,625 -sqlalchemy/sql/_py_util.py,sha256=4KFXNvBq3hhfrr-A1J1uBml3b3CGguIf1dat9gsEHqE,2173 -sqlalchemy/sql/_selectable_constructors.py,sha256=fwVBsDHHWhngodBG205nvhM-Tb3uR1srbCnN3mPgrjA,18785 -sqlalchemy/sql/_typing.py,sha256=zYKlxXnUW_KIkGuBmBnzj-vFG1QON8_F9JN1dl9KSiM,12771 -sqlalchemy/sql/annotation.py,sha256=qHUEwbdmMD3Ybr0ez-Dyiw9l9UB_RUMHWAUIeO_r3gE,18245 -sqlalchemy/sql/base.py,sha256=kfmVNRimU5z6X6OKqMLMs1bDCFQ47BeyF_MZc23nkjY,73848 -sqlalchemy/sql/cache_key.py,sha256=ET2OIQ6jZK2FSxsdnCvhLCrNJ2Fp3zipQ-gvINgAjhQ,33668 -sqlalchemy/sql/coercions.py,sha256=lRciS5agnpVvx_vHYxJV-aN6QOVb_O4yCnMZ0s07GUE,40750 -sqlalchemy/sql/compiler.py,sha256=eT_zrKvApimVfycvcTdubQK8-QAzGHm5xWKdhOgnWUY,274965 -sqlalchemy/sql/crud.py,sha256=vFegNw5557ayS4kv761zh0bx0yikEKh1ovMrhErHelg,56514 -sqlalchemy/sql/ddl.py,sha256=rfb7gDvLmn_ktgH2xiXLRTczqnMOED1eakXuGuRPklg,45641 -sqlalchemy/sql/default_comparator.py,sha256=uXLr8B-X6KbybwTjLjZ2hN-WZAvqoMhZ-DDHJX7rAUw,16707 -sqlalchemy/sql/dml.py,sha256=oTW8PB-55qf6crAkbxh2JD-TvkT3MO1zqkKDrt5-2c8,65611 -sqlalchemy/sql/elements.py,sha256=RYq5N-IEPnhcDKtokeaCDIGZiUex8oDgwRLCDqjkk_g,176482 -sqlalchemy/sql/events.py,sha256=iWjc_nm1vClDBLg4ZhDnY75CkBdnlDPSPe0MGBSmbiM,18312 -sqlalchemy/sql/expression.py,sha256=rw5tAm8vbd5Vm4MofTZ0ZcXsphz4z9xO_exy-gem6TM,7586 -sqlalchemy/sql/functions.py,sha256=tbBxIeAqLV3kc1YDxyt68mxw0fFy6e93ctRUZSuuf3I,63858 -sqlalchemy/sql/lambdas.py,sha256=h9sPCETBgAanLtVHQsRPHeY-hTEjM5nscq3m4bDstwM,49196 -sqlalchemy/sql/naming.py,sha256=BU0ZdSzXXKHTPhoaKMWJ3gPMoeZSJJe9-3YDYflmjJw,6858 -sqlalchemy/sql/operators.py,sha256=h5bgu31gukGdsYsN_0-1C7IGAdSCFpBxuRjOUnu1Two,76792 -sqlalchemy/sql/roles.py,sha256=drAeWbevjgFAKNcMrH_EuJ-9sSvcq4aeXwAqMXXZGYw,7662 -sqlalchemy/sql/schema.py,sha256=WKKwxkC9oNRHN-B4s35NkWcr5dvavccKf-_1t35Do8A,229896 -sqlalchemy/sql/selectable.py,sha256=5Za7eh4USrgVwJgQGVX1bb2w1qXcy-hGzGpWNPbhf68,237610 -sqlalchemy/sql/sqltypes.py,sha256=yXHvZXfZJmaRvMoX4_jXqazAev33pk0Ltwl5c-D5Ha4,128609 -sqlalchemy/sql/traversals.py,sha256=7GALHt5mFceUv2SMUikIdAb9SUcSbACqhwoei5rPkxc,33664 -sqlalchemy/sql/type_api.py,sha256=wdi3nmOBRdhG6L1z21V_PwQGB8CIRouMdNKoIzJA4Zo,84440 -sqlalchemy/sql/util.py,sha256=G-2ZI6rZ7XxVu5YXaVvLrApeAk5VwSG4C--lqtglgGE,48086 -sqlalchemy/sql/visitors.py,sha256=URpw-GxxUkwjEDbD2xXJGyFJavG5lN6ISoY34JlYRS8,36319 -sqlalchemy/testing/__init__.py,sha256=GgUEqxUNCxg-92_GgBDnljUHsdCxaGPMG1TWy5tjwgk,3160 -sqlalchemy/testing/assertions.py,sha256=RFTkxGq-kDvn3JSUuT_6bU1y0vtoI6pE6ryZgV2YEx4,31439 -sqlalchemy/testing/assertsql.py,sha256=cmhtZrgPBjrqIfzFz3VBWxVNvxWoRllvmoWcUCoqsio,16817 -sqlalchemy/testing/asyncio.py,sha256=QsMzDWARFRrpLoWhuYqzYQPTUZ80fymlKrqOoDkmCmQ,3830 -sqlalchemy/testing/config.py,sha256=HySdB5_FgCW1iHAJVxYo-4wq5gUAEi0N8E93IC6M86Q,12058 -sqlalchemy/testing/engines.py,sha256=c1gFXfpo5S1dvNjGIL03mbW2eVYtUD_9M_ZEfQO2ArM,13414 -sqlalchemy/testing/entities.py,sha256=KdgTVPSALhi9KkAXj2giOYl62ld-1yZziIDBSV8E3vw,3354 -sqlalchemy/testing/exclusions.py,sha256=jzVrBXqyQlyMgvfChMjJOd0ZtReKgkJ4Ik-0mkWe6KM,12460 -sqlalchemy/testing/fixtures/__init__.py,sha256=e5YtfSlkKDRuyIZhEKBCycMX5BOO4MZ-0d97l1JDhJE,1198 -sqlalchemy/testing/fixtures/base.py,sha256=y5iEEdUZIft06fvAOXwKU73ciIFTO5AVgDDGzYD9nOY,12256 -sqlalchemy/testing/fixtures/mypy.py,sha256=9fuJ90F9LBki26dVEVOEtRVXG2koaK803k4nukTnA8o,11973 -sqlalchemy/testing/fixtures/orm.py,sha256=3JJoYdI2tj5-LL7AN8bVa79NV3Guo4d9p6IgheHkWGc,6095 -sqlalchemy/testing/fixtures/sql.py,sha256=ht-OD6fMZ0inxucRzRZG4kEMNicqY8oJdlKbZzHhAJc,15900 -sqlalchemy/testing/pickleable.py,sha256=G3L0xL9OtbX7wThfreRjWd0GW7q0kUKcTUuCN5ETGno,2833 -sqlalchemy/testing/plugin/__init__.py,sha256=vRfF7M763cGm9tLQDWK6TyBNHc80J1nX2fmGGxN14wY,247 -sqlalchemy/testing/plugin/bootstrap.py,sha256=VYnVSMb-u30hGY6xGn6iG-LqiF0CubT90AJPFY_6UiY,1685 -sqlalchemy/testing/plugin/plugin_base.py,sha256=TBWdg2XgXB6QgUUFdKLv1O9-SXMitjHLm2rNNIzXZhQ,21578 -sqlalchemy/testing/plugin/pytestplugin.py,sha256=0rRCp7RlnhJBg3gJEq0t0kJ-BCTQ34bqBE_lEQk5U3U,27656 -sqlalchemy/testing/profiling.py,sha256=SWhWiZImJvDsNn0rQyNki70xdNxZL53ZI98ihxiykbQ,10148 -sqlalchemy/testing/provision.py,sha256=6r2FTnm-t7u8MMbWo7eMhAH3qkL0w0WlmE29MUSEIu4,14702 -sqlalchemy/testing/requirements.py,sha256=MVuTKtZjeTZaYlrAU8XFIB1bhJA_AedqL_q7NwVEGiw,52956 -sqlalchemy/testing/schema.py,sha256=IImFumAdpzOyoKAs0WnaGakq8D3sSU4snD9W4LVOV3s,6513 -sqlalchemy/testing/suite/__init__.py,sha256=S8TLwTiif8xX67qlZUo5I9fl9UjZAFGSzvlptp2WoWc,722 -sqlalchemy/testing/suite/test_cte.py,sha256=d3OWDBNhnAwlyAz_QhFk-vKSWaAI3mADVnqdtTWOuwI,6451 -sqlalchemy/testing/suite/test_ddl.py,sha256=MItp-votCzvahlRqHRagte2Omyq9XUOFdFsgzCb6_-g,12031 -sqlalchemy/testing/suite/test_deprecations.py,sha256=7C6IbxRmq7wg_DLq56f1V5RCS9iVrAv3epJZQTB-dOo,5337 -sqlalchemy/testing/suite/test_dialect.py,sha256=eGJFZCwKmLrIl66ZlkLLZf5Fq6bzWI174gQsJt2bY2c,22923 -sqlalchemy/testing/suite/test_insert.py,sha256=pR0VWMQ9JJPbnANE6634PzR0VFmWMF8im6OTahc4vsQ,18824 -sqlalchemy/testing/suite/test_reflection.py,sha256=EJvTjRDimw9k90zlI5VCkmCzf7Tv5VF9y4O3D8SZMFU,109648 -sqlalchemy/testing/suite/test_results.py,sha256=9FFBNLeXcNRIC9FHfEjFKwfV6w2Bb58ulml_M8Zdokg,16914 -sqlalchemy/testing/suite/test_rowcount.py,sha256=UVyHHQsU0TxkzV_dqCOKR1aROvIq7frKYMVjwUqLWfE,7900 -sqlalchemy/testing/suite/test_select.py,sha256=S81w-Dox6W29Tjmi6LIBJ4HuB5E8dDAzmePDm0PKTYo,61732 -sqlalchemy/testing/suite/test_sequence.py,sha256=DMqyJkL1o4GClrNjzoy7GDn_jPNPTZNvk9t5e-MVXeo,9923 -sqlalchemy/testing/suite/test_types.py,sha256=gPA6t-90Icnpj2ZzITwbqka1DB-rNOoh6_xS9dC-4HU,67805 -sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=0zVc2e3zbCQag_xL4b0i7F062HblHwV46JHLMweYtcE,6141 -sqlalchemy/testing/suite/test_update_delete.py,sha256=_OxH0wggHUqPImalGEPI48RiRx6mO985Om1PtRYOCzA,3994 -sqlalchemy/testing/util.py,sha256=KsUInolFBXUPIXVZKAdb_8rQrW8yW8OCtiA3GXuYRvA,14571 -sqlalchemy/testing/warnings.py,sha256=sj4vfTtjodcfoX6FPH_Zykb4fomjmgqIYj81QPpSwH8,1546 -sqlalchemy/types.py,sha256=m3I9h6xoyT7cjeUx5XCzmaE-GHT2sJVwECiuSJl75Ss,3168 -sqlalchemy/util/__init__.py,sha256=tYWkZV6PYVfEW32zt48FCLH12VyV_kaNUa3KBAOYpSM,8312 -sqlalchemy/util/_collections.py,sha256=RbP4UixqNtRBUrl_QqYDiadVmELSVxxXm2drhvQaIKo,20078 -sqlalchemy/util/_concurrency_py3k.py,sha256=UtPDkb67OOVWYvBqYaQgENg0k_jOA2mQOE04XmrbYq0,9170 -sqlalchemy/util/_has_cy.py,sha256=3oh7s5iQtW9qcI8zYunCfGAKG6fzo2DIpzP5p1BnE8Q,1247 -sqlalchemy/util/_py_collections.py,sha256=irOg3nkzxmtdYfIS46un2cp0JqSiACI7WGQBg-BaEXU,16714 -sqlalchemy/util/compat.py,sha256=TdDfvL21VnBEdSUnjcx-F8XhmVFg9Mvyr67a4omWZAM,8760 -sqlalchemy/util/concurrency.py,sha256=eQVS3YDH3GwB3Uw5pbzmqEBSYTK90EbnE5mQ05fHERg,3304 -sqlalchemy/util/deprecations.py,sha256=L7D4GqeIozpjO8iVybf7jL9dDlgfTbAaQH4TQAX74qE,12012 -sqlalchemy/util/langhelpers.py,sha256=G67avnsStFbslILlbCHmsyAMnShS7RYftFr9a8uFDL8,65140 -sqlalchemy/util/preloaded.py,sha256=RMarsuhtMW8ZuvqLSuR0kwbp45VRlzKpJMLUe7p__qY,5904 -sqlalchemy/util/queue.py,sha256=w1ufhuiC7lzyiZDhciRtRz1uyxU72jRI7SWhhL-p600,10185 -sqlalchemy/util/tool_support.py,sha256=e7lWu6o1QlKq4e6c9PyDsuyFyiWe79vO72UQ_YX2pUA,6135 -sqlalchemy/util/topological.py,sha256=HcJgdCeU0XFIskgIBnTaHXfRXaulaEJRYRwKv4yPNek,3458 -sqlalchemy/util/typing.py,sha256=C4jF7QTNo0w0bjvcIqSSTOvoy8FttuZtyTzjiyoIzQQ,20920 diff --git a/libs/SQLAlchemy-2.0.37.dist-info/WHEEL b/libs/SQLAlchemy-2.0.37.dist-info/WHEEL deleted file mode 100644 index 42e750dfb3..0000000000 --- a/libs/SQLAlchemy-2.0.37.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.3.0) -Root-Is-Purelib: false -Tag: cp38-cp38-macosx_12_0_x86_64 - diff --git a/libs/Unidecode-1.3.8.dist-info/METADATA b/libs/Unidecode-1.3.8.dist-info/METADATA deleted file mode 100644 index 5c7086b9e8..0000000000 --- a/libs/Unidecode-1.3.8.dist-info/METADATA +++ /dev/null @@ -1,310 +0,0 @@ -Metadata-Version: 2.1 -Name: Unidecode -Version: 1.3.8 -Summary: ASCII transliterations of Unicode text -Author: Tomaz Solc -Author-email: tomaz.solc@tablix.org -License: GPL -Classifier: License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+) -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Text Processing -Classifier: Topic :: Text Processing :: Filters -Requires-Python: >=3.5 -License-File: LICENSE - -Unidecode, lossy ASCII transliterations of Unicode text -======================================================= - -It often happens that you have text data in Unicode, but you need to -represent it in ASCII. For example when integrating with legacy code that -doesn't support Unicode, or for ease of entry of non-Roman names on a US -keyboard, or when constructing ASCII machine identifiers from human-readable -Unicode strings that should still be somewhat intelligible. A popular example -of this is when making an URL slug from an article title. - -**Unidecode is not a replacement for fully supporting Unicode for strings in -your program. There are a number of caveats that come with its use, -especially when its output is directly visible to users. Please read the rest -of this README before using Unidecode in your project.** - -In most of examples listed above you could represent Unicode characters as -``???`` or ``\\15BA\\15A0\\1610``, to mention two extreme cases. But that's -nearly useless to someone who actually wants to read what the text says. - -What Unidecode provides is a middle road: the function ``unidecode()`` takes -Unicode data and tries to represent it in ASCII characters (i.e., the -universally displayable characters between 0x00 and 0x7F), where the -compromises taken when mapping between two character sets are chosen to be -near what a human with a US keyboard would choose. - -The quality of resulting ASCII representation varies. For languages of -western origin it should be between perfect and good. On the other hand -transliteration (i.e., conveying, in Roman letters, the pronunciation -expressed by the text in some other writing system) of languages like -Chinese, Japanese or Korean is a very complex issue and this library does -not even attempt to address it. It draws the line at context-free -character-by-character mapping. So a good rule of thumb is that the further -the script you are transliterating is from Latin alphabet, the worse the -transliteration will be. - -Generally Unidecode produces better results than simply stripping accents from -characters (which can be done in Python with built-in functions). It is based -on hand-tuned character mappings that for example also contain ASCII -approximations for symbols and non-Latin alphabets. - -**Note that some people might find certain transliterations offending.** Most -common examples include characters that are used in multiple languages. A user -expects a character to be transliterated in their language but Unidecode uses a -transliteration for a different language. It's best to not use Unidecode for -strings that are directly visible to users of your application. See also the -*Frequently Asked Questions* section for more info on common problems. - -This is a Python port of ``Text::Unidecode`` Perl module by Sean M. Burke -. - - -Module content --------------- - -This library contains a function that takes a string object, possibly -containing non-ASCII characters, and returns a string that can be safely -encoded to ASCII:: - - >>> from unidecode import unidecode - >>> unidecode('kožuÅ¡Äek') - 'kozuscek' - >>> unidecode('30 \U0001d5c4\U0001d5c6/\U0001d5c1') - '30 km/h' - >>> unidecode('\u5317\u4EB0') - 'Bei Jing ' - -You can also specify an *errors* argument to ``unidecode()`` that determines -what Unidecode does with characters that are not present in its transliteration -tables. The default is ``'ignore'`` meaning that Unidecode will ignore those -characters (replace them with an empty string). ``'strict'`` will raise a -``UnidecodeError``. The exception object will contain an *index* attribute that -can be used to find the offending character. ``'replace'`` will replace them -with ``'?'`` (or another string, specified in the *replace_str* argument). -``'preserve'`` will keep the original, non-ASCII character in the string. Note -that if ``'preserve'`` is used the string returned by ``unidecode()`` will not -be ASCII-encodable!:: - - >>> unidecode('\ue000') # unidecode does not have replacements for Private Use Area characters - '' - >>> unidecode('\ue000', errors='strict') - Traceback (most recent call last): - ... - unidecode.UnidecodeError: no replacement found for character '\ue000' in position 0 - -A utility is also included that allows you to transliterate text from the -command line in several ways. Reading from standard input:: - - $ echo hello | unidecode - hello - -from a command line argument:: - - $ unidecode -c hello - hello - -or from a file:: - - $ unidecode hello.txt - hello - -The default encoding used by the utility depends on your system locale. You can -specify another encoding with the ``-e`` argument. See ``unidecode --help`` for -a full list of available options. - -Requirements ------------- - -Nothing except Python itself. Unidecode supports Python 3.5 or later. - -You need a Python build with "wide" Unicode characters (also called "UCS-4 -build") in order for Unidecode to work correctly with characters outside of -Basic Multilingual Plane (BMP). Common characters outside BMP are bold, italic, -script, etc. variants of the Latin alphabet intended for mathematical notation. -Surrogate pair encoding of "narrow" builds is not supported in Unidecode. - -If your Python build supports "wide" Unicode the following expression will -return True:: - - >>> import sys - >>> sys.maxunicode > 0xffff - True - -See `PEP 261 `_ for details -regarding support for "wide" Unicode characters in Python. - - -Installation ------------- - -To install the latest version of Unidecode from the Python package index, use -these commands:: - - $ pip install unidecode - -To install Unidecode from the source distribution and run unit tests, use:: - - $ python setup.py install - $ python setup.py test - -Frequently asked questions --------------------------- - -German umlauts are transliterated incorrectly - Latin letters "a", "o" and "u" with diaeresis are transliterated by - Unidecode as "a", "o", "u", *not* according to German rules "ae", "oe", - "ue". This is intentional and will not be changed. Rationale is that these - letters are used in languages other than German (for example, Finnish and - Turkish). German text transliterated without the extra "e" is much more - readable than other languages transliterated using German rules. A - workaround is to do your own replacements of these characters before - passing the string to ``unidecode()``. - -Japanese Kanji is transliterated as Chinese - Same as with Latin letters with accents discussed in the answer above, the - Unicode standard encodes letters, not letters in a certain language or - their meaning. With Japanese and Chinese this is even more evident because - the same letter can have very different transliterations depending on the - language it is used in. Since Unidecode does not do language-specific - transliteration (see next question), it must decide on one. For certain - characters that are used in both Japanese and Chinese the decision was to - use Chinese transliterations. If you intend to transliterate Japanese, - Chinese or Korean text please consider using other libraries which do - language-specific transliteration, such as `Unihandecode - `_. - -Unidecode should support localization (e.g. a language or country parameter, inspecting system locale, etc.) - Language-specific transliteration is a complicated problem and beyond the - scope of this library. Changes related to this will not be accepted. Please - consider using other libraries which do provide this capability, such as - `Unihandecode `_. - -Unidecode should automatically detect the language of the text being transliterated - Language detection is a completely separate problem and beyond the scope of - this library. - -Unidecode should use a permissive license such as MIT or the BSD license. - The maintainer of Unidecode believes that providing access to source code - on redistribution is a fair and reasonable request when basing products on - voluntary work of many contributors. If the license is not suitable for - you, please consider using other libraries, such as `text-unidecode - `_. - -Unidecode produces completely wrong results (e.g. "u" with diaeresis transliterating as "A 1/4 ") - The strings you are passing to Unidecode have been wrongly decoded - somewhere in your program. For example, you might be decoding utf-8 encoded - strings as latin1. With a misconfigured terminal, locale and/or a text - editor this might not be immediately apparent. Inspect your strings with - ``repr()`` and consult the - `Unicode HOWTO `_. - -Why does Unidecode not replace \\u and \\U backslash escapes in my strings? - Unidecode knows nothing about escape sequences. Interpreting these sequences - and replacing them with actual Unicode characters in string literals is the - task of the Python interpreter. If you are asking this question you are - very likely misunderstanding the purpose of this library. Consult the - `Unicode HOWTO `_ and possibly - the ``unicode_escape`` encoding in the standard library. - -I've upgraded Unidecode and now some URLs on my website return 404 Not Found. - This is an issue with the software that is running your website, not - Unidecode. Occasionally, new versions of Unidecode library are released - which contain improvements to the transliteration tables. This means that - you cannot rely that ``unidecode()`` output will not change across - different versions of Unidecode library. If you use ``unidecode()`` to - generate URLs for your website, either generate the URL slug once and store - it in the database or lock your dependency of Unidecode to one specific - version. - -Some of the issues in this section are discussed in more detail in `this blog -post `_. - - -Performance notes ------------------ - -By default, ``unidecode()`` optimizes for the use case where most of the strings -passed to it are already ASCII-only and no transliteration is necessary (this -default might change in future versions). - -For performance critical applications, two additional functions are exposed: - -``unidecode_expect_ascii()`` is optimized for ASCII-only inputs (approximately -5 times faster than ``unidecode_expect_nonascii()`` on 10 character strings, -more on longer strings), but slightly slower for non-ASCII inputs. - -``unidecode_expect_nonascii()`` takes approximately the same amount of time on -ASCII and non-ASCII inputs, but is slightly faster for non-ASCII inputs than -``unidecode_expect_ascii()``. - -Apart from differences in run time, both functions produce identical results. -For most users of Unidecode, the difference in performance should be -negligible. - - -Source ------- - -You can get the latest development version of Unidecode with:: - - $ git clone https://www.tablix.org/~avian/git/unidecode.git - -There is also an official mirror of this repository on GitHub at -https://github.com/avian2/unidecode - - -Contact -------- - -Please make sure to read the `Frequently asked questions`_ section above before -contacting the maintainer. - -Bug reports, patches and suggestions for Unidecode can be sent to -tomaz.solc@tablix.org. - -Alternatively, you can also open a ticket or pull request at -https://github.com/avian2/unidecode - - -Copyright ---------- - -Original character transliteration tables: - -Copyright 2001, Sean M. Burke , all rights reserved. - -Python code and later additions: - -Copyright 2024, Tomaž Å olc - -This program is free software; you can redistribute it and/or modify it -under the terms of the GNU General Public License as published by the Free -Software Foundation; either version 2 of the License, or (at your option) -any later version. - -This program is distributed in the hope that it will be useful, but WITHOUT -ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for -more details. - -You should have received a copy of the GNU General Public License along -with this program; if not, write to the Free Software Foundation, Inc., 51 -Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. The programs and -documentation in this dist are distributed in the hope that they will be -useful, but without any warranty; without even the implied warranty of -merchantability or fitness for a particular purpose. - -.. - vim: set filetype=rst: diff --git a/libs/Unidecode-1.3.8.dist-info/RECORD b/libs/Unidecode-1.3.8.dist-info/RECORD deleted file mode 100644 index 92b5732cab..0000000000 --- a/libs/Unidecode-1.3.8.dist-info/RECORD +++ /dev/null @@ -1,203 +0,0 @@ -../../bin/unidecode,sha256=NbqWJOWfXecMdJfbAD2hnDahz-mDBHIWjTSidYHnxSA,236 -Unidecode-1.3.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Unidecode-1.3.8.dist-info/LICENSE,sha256=gXf5dRMhNSbfLPYYTY_5hsZ1r7UU1OaKQEAQUhuIBkM,18092 -Unidecode-1.3.8.dist-info/METADATA,sha256=TjOEznFzIHnDfx8CRJjrHfMWiIOOa6drPp6zqa0Obc4,13615 -Unidecode-1.3.8.dist-info/RECORD,, -Unidecode-1.3.8.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -Unidecode-1.3.8.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92 -Unidecode-1.3.8.dist-info/entry_points.txt,sha256=zjje8BrCWj_5MDf7wASbnNdeWYxxdt5BuTQI9x8c_24,50 -Unidecode-1.3.8.dist-info/top_level.txt,sha256=4uYNG2l04s0dm0mEQmPLo2zrjLbhLPKUesLr2dOTdpo,10 -unidecode/__init__.py,sha256=uUP370Iden1EsQtgglNd57DMKOG5mXh9UxIMm8yhDfQ,4230 -unidecode/__main__.py,sha256=VWYWCclyJsdhtNMQtryMFbgsCZtNUsWcEuS7ZOlH1Jc,40 -unidecode/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -unidecode/util.py,sha256=ZxssZFzbZlAf6oiDIu2HZjrAQckbOD2VPD9uy-wZgCI,1652 -unidecode/x000.py,sha256=DaoVzSCvFzhzHbFtzFOE8uS9CgWD7K3JuhmACpFbivY,3038 -unidecode/x001.py,sha256=ylHh3UVaPtibVuUEEWvdSeDFK0OXrWt4-LnxAgYD6qo,3891 -unidecode/x002.py,sha256=NWord8myi2jYV4YwlNZFbKu6HgbbolWLNCOlseR3WsY,3871 -unidecode/x003.py,sha256=5gZS5aXbQ4Z8aH08EehKx4SqAgUNBcTz_x-I3o5qvVg,3825 -unidecode/x004.py,sha256=KAeJjKgkdzMU1MK9J9JqmPeKBDgjhG5UcfyAa594Hk8,4054 -unidecode/x005.py,sha256=7ezPyF52iKiK5LPf6TA5zVUZ7RbIjz7EVLS42aXG9ug,3920 -unidecode/x006.py,sha256=Jye83eXYQqtpowxsQ01jQSDlhAjWbmGNFRdmbojvgyE,3912 -unidecode/x007.py,sha256=6lnnnArEmvi3XeZLFwrCZGStdDKDAHt7alIpdo8S7rk,3987 -unidecode/x009.py,sha256=xNz8qrO1PDseMjOwA0rjsiAhNZTO_uFgjpmbp7qcH_c,4013 -unidecode/x00a.py,sha256=2xksKrrMWF9xLbs8OPfTxT7g86ciwdK9QZ8AQeecmus,4019 -unidecode/x00b.py,sha256=Y7GlfYE2v-D3BkZd3ctfo6L21VG-aR2OFESRb8_WRH4,4019 -unidecode/x00c.py,sha256=jOGpNU7vxghp3jwUuUATiSrDwvgZuOe8nlkcjJYTHco,4007 -unidecode/x00d.py,sha256=lkFf8d_oXN8IZop6CFlYpKdWuJqWGnH0WQMwir4_WgI,4025 -unidecode/x00e.py,sha256=ARKK__sIXUXL4h2Egac2f9ng2Z_YCGD5kYP2oj-ptlI,3989 -unidecode/x00f.py,sha256=TdSmr755Jw1TRtxk5Z4UPZIp1CVhXii8S0zSAcQ2vWk,3998 -unidecode/x010.py,sha256=YhXX8s1dP7YJMzaaV9CMBCOraExb6QrQQWbkFT3d2Jo,4011 -unidecode/x011.py,sha256=bc5lAse0haio2pceaADqkjzTh8MdgNTwTh04W2FJO-Q,4120 -unidecode/x012.py,sha256=XoiRFvNtHV29Q76KcpPBSrC4sLd6faTz4tKZEMIQ45M,4293 -unidecode/x013.py,sha256=UkxSb2Q4xq7dydCZNg_f0Nu90slVSmAckq-btDZ7uAA,4190 -unidecode/x014.py,sha256=4R3w_Dgg9yCw-9KkpqHfWFzyQZZfdb444fMIh240l-Q,4298 -unidecode/x015.py,sha256=TB6O4l2qPxbmF2dejlxXLqX5tTfjl95cMYx1770GHs0,4329 -unidecode/x016.py,sha256=Tx3P-DjDqCLuKbmiG-0cMzw2xFVuojQg3o5yyt4506E,4114 -unidecode/x017.py,sha256=Ks_t-4BgOrTqmqYC6BpqXePI-YyStE7p3P27lzBefSA,4038 -unidecode/x018.py,sha256=C1jpnsK3YO27xpiWJ2DXSAkV9dsPUwKqWtkgePtzp3g,3998 -unidecode/x01d.py,sha256=EwAYkMVHAFvbKRzsQ-e4cRcvS_eia3kYCM2GcaqkBWY,3701 -unidecode/x01e.py,sha256=rG1jtL0dpL-RNsvG-AxX1izkyWkbgwe0BWhATDJtmgg,3845 -unidecode/x01f.py,sha256=NUC2rlFE9YpODdDn4e5uzV7uIqEBNvKw486nOD7UQpQ,3877 -unidecode/x020.py,sha256=lXj8wkWMbD2Iuw3OCrEqZofJjJccnvY3ro5SpyotCq8,4080 -unidecode/x021.py,sha256=QQGWXFmQhQ9ei6rVCx2y-pbx_-7n8bv9DGJpdK_Q8jc,3987 -unidecode/x022.py,sha256=wX6BUR7yKGgSICIzY_B15mqgnjvRbSlepM6aqb2tnGY,4085 -unidecode/x023.py,sha256=weebXXqY3E8OhqS0ziAKHo58lCl3dkkyD0w2aKHqv7Q,4089 -unidecode/x024.py,sha256=JmCTFnYtmMHQvfYP-4f5uDiCxlwhNk7LZLyxLWWGjK8,4003 -unidecode/x025.py,sha256=DAMdCakIv0m21AWcRUNK9QWReCYXPSwVDmbFdriM4qc,3854 -unidecode/x026.py,sha256=TKU0cwRXL8vLAmZ26R8E2dpkmXmRKx4wTU0VEbuTAnM,3874 -unidecode/x027.py,sha256=qZacxfhS5nWgBhbrIT6-wm9yGP_OlAVRJ-GcmUhPl14,3718 -unidecode/x028.py,sha256=FZPCZ9w3N3WOI42h2gHEQgVOAlLBNTZjMu_KQQkIMdk,5069 -unidecode/x029.py,sha256=b8afmG-DjZmHHy0XdjcZlSXtlnwjScIcPBGbMv_YSUQ,4090 -unidecode/x02a.py,sha256=QHAyHnegV0OVOTQ5OnfJKzkaHQIFbWmmMjiFcHGUZi0,4093 -unidecode/x02c.py,sha256=ZkmMztaYT7d81E9qtUU9ayG9hBi5XqWY_ta-X5Hsaqc,4076 -unidecode/x02e.py,sha256=VCGlK7123S2wDzfkggEARyGZKi-0ElepSYECGGluf7E,4072 -unidecode/x02f.py,sha256=hcUTlkw_6Hjnxsk0e28RTd-HWpSK0IGq5hkrwA1fJFk,4091 -unidecode/x030.py,sha256=wdodiC_N7bMsh8vSmVF0STHGZnOAsZnVN-_RPiqupRA,4028 -unidecode/x031.py,sha256=jed0xoqQmUnnOqATVe7z9F2zigAZVAJX6BrWtXFPWbs,4044 -unidecode/x032.py,sha256=lj4IwokKA0IdIJiJJTfmBUGVYmWvLowFtPLwLzhfokU,4466 -unidecode/x033.py,sha256=ImTd4BRRPgCqWmrvJPoikoL0dJMKH8eQgd48vksi60A,4513 -unidecode/x04d.py,sha256=hcUTlkw_6Hjnxsk0e28RTd-HWpSK0IGq5hkrwA1fJFk,4091 -unidecode/x04e.py,sha256=X-Pzl5_QGkYexzNTY04C_tq3RvbyAUYemf0C4mIl5-U,4630 -unidecode/x04f.py,sha256=BM29-2OTb6aR7CN7NMN3nnC9BGxgediLEHGMcIB5ENU,4597 -unidecode/x050.py,sha256=SPmkA-PD39V8eO4DByxVa8HyqanGcw54xW51kLnaieY,4676 -unidecode/x051.py,sha256=GGJT-fiYxTk_FAAW6eTobT3pOGI-Qq1M3eCxN7c7f5E,4681 -unidecode/x052.py,sha256=a09eo_5pL6jpU9TW-zG2w2iXTYp6awtQ4OxGnLdcwKg,4654 -unidecode/x053.py,sha256=4x8X4Hrf56DOAINYi8JxStXW4m7FGJNiH-51JzCxE64,4608 -unidecode/x054.py,sha256=N8hO8YrlNoepnrYLUZ_EcTVRqI1lekqq3h-i-UNlTJw,4577 -unidecode/x055.py,sha256=_PK65HLpk7puojAFGeOm5Cdk-PDevHHI6NR8sHuo0Ko,4595 -unidecode/x056.py,sha256=mlNNouWFIjpZdjuBWhxFGSB_UDh0OItlsShjHQRjhxc,4607 -unidecode/x057.py,sha256=uivN7P3d-kkonqBATLKOM0ni4jVvsSzA9SOEFhbOuP4,4627 -unidecode/x058.py,sha256=lPNpdrFLFfaBoQz8Cwm2Ess8m4m_45ylIHspOUpDrLk,4664 -unidecode/x059.py,sha256=BdA_NFQqr-aGpuyo9he6uxDwm9facV-ql5axiKqgByk,4640 -unidecode/x05a.py,sha256=9UFNWH8FpkHUArS2-Td3VYOo21VQkoqYW7A0Slk0YhQ,4632 -unidecode/x05b.py,sha256=yfWnRe6mtnqY3b3Ac2_IJBA5vBYb64PYF9XM4HSZygU,4666 -unidecode/x05c.py,sha256=6iZj6HHnJ4lF3k1i68-9Dgge2H3KAlyZtNxW0BIu66o,4602 -unidecode/x05d.py,sha256=Wudbb7xOtWry4Xu5xm9j80vFkigCedGq5uHcYAYl0o8,4660 -unidecode/x05e.py,sha256=wKqvr0lkEy1yfXbYj2OtXHBxw5FxVz_MzJULXWrGvA0,4662 -unidecode/x05f.py,sha256=NnSIJOl_9CC4IRwBIQ6CEhTfvvzZ2PXhZSLJuC6sgHY,4656 -unidecode/x060.py,sha256=-Ajr6Q7RP_fdetvZ2hWflxNiaOokB3q5oeRCt7CqcDc,4640 -unidecode/x061.py,sha256=aqOY7Jt--4JhdktU2RB1bf5J0fH27fRDLhV55aR3gO0,4656 -unidecode/x062.py,sha256=wxQkvAGrppx4Y5E-hAVCps0I9bz_fbG1YSqs1E8k9sU,4616 -unidecode/x063.py,sha256=wAcyLr9CJ35G4sNTfvYb7DtFjeRlyo585JC2_-aBuQM,4648 -unidecode/x064.py,sha256=8e775dKt12GedypWT9jPXeqWLkW5-AsVG106FlfiTvA,4651 -unidecode/x065.py,sha256=fPak6ADqEOBFPfP2u7pAIZ_ObbgtdGFa4enmjVBpsVE,4634 -unidecode/x066.py,sha256=K6g6XTVEFEAppiln64vxgA2V1FMWl0YdbhDJgihQsTA,4675 -unidecode/x067.py,sha256=5d8zLxoh2vS76uBWQckXGbeyjzEUJ5aJMAMvNA-YxLs,4627 -unidecode/x068.py,sha256=-UhVYRQGQtxQJbgwyHAox-JHizu_RvO7Lb5I1F9mpvY,4670 -unidecode/x069.py,sha256=cRQZP6ZGJQsx5l2qSfpe9XmiDfxlGh7rEh30_u9oTSo,4665 -unidecode/x06a.py,sha256=iXZkuxRRsgUuNlVlNliR7gio4M4WUN0JNCPdINrzYlY,4662 -unidecode/x06b.py,sha256=5GRxv36m9zR163UNrGb_c64-uueKrpqyeeRWG9ZDme0,4600 -unidecode/x06c.py,sha256=RNKzdImtimBIuLtvbsUAzYSV7iZmVvPWyV8dj91KJlw,4637 -unidecode/x06d.py,sha256=jFvmxRU4VHSeoahyFtHIHqpvfqvJbNzvsrDn4Kd7WAQ,4647 -unidecode/x06e.py,sha256=1esJUSaQ4QotdjhxG6vtvC3CDWjY2rTr4EVLD4if8CU,4630 -unidecode/x06f.py,sha256=s7JTw6eW_6pqjCc1DEMDQ178vtllhHiejtvb360vDVc,4638 -unidecode/x070.py,sha256=oLeIanQmBbyz8OU_l5VQ-POF8mY5XbLL3rfEjr3XkUw,4677 -unidecode/x071.py,sha256=v1S9E-H06WC0rr10gP27Dqev2nxRlymECJ681BSs9Y4,4644 -unidecode/x072.py,sha256=veZOktQoJQ2wmKKLjq17UM5hAa3xo3nRLdFgSHjv8rI,4645 -unidecode/x073.py,sha256=NWkyVIbNgSu_U9katu1LRaLkL7iHx4bSuRtfsqRG4yk,4642 -unidecode/x074.py,sha256=AocniPNZMcBTeiDWA6OLzQilcWMc_ZHh7pCGXTzqMSg,4686 -unidecode/x075.py,sha256=P3SrhI5BQ5sJ66hyu_LWDONpuzLZJBKsl7f-A37sJXc,4675 -unidecode/x076.py,sha256=9rwfe41pej250BneHHO663PU9vVWyrnHRnP11VUqxEc,4635 -unidecode/x077.py,sha256=ugbmqiry2-tBstXW0Q9o7XEZQimpagZK1EttvBCK1sE,4673 -unidecode/x078.py,sha256=NxeTS_dXa6jmc7iDVUve6_SqO4AhjULng_Gei7pqbRE,4630 -unidecode/x079.py,sha256=ucPPGrgm-AnnWdVFd__unqiSMtdEpZQF6E8ta6IzdiQ,4590 -unidecode/x07a.py,sha256=fjyeO--0F5Kd80F0yOvFIIuiDW7lFKWaVIUqQRIwR9k,4659 -unidecode/x07b.py,sha256=3g39Yw2ZMs7_tcC3OT2e4nGxaWMY6V8iJ2Z6PsnhPS4,4667 -unidecode/x07c.py,sha256=Cbs98r7vdJD_YxpXgAAYoPdA7RDYR82MXN44TQJxoN8,4647 -unidecode/x07d.py,sha256=EKFrTQTNFLGnsm3qI76ALxrxGCcDuyEbapi9j9jy1B4,4678 -unidecode/x07e.py,sha256=r96YBkHoCO8GAvO0j3BdY45RdlNkqpiFWl-Q6mieVcc,4680 -unidecode/x07f.py,sha256=MNRU4aNOE2dKl4p0_WPy-oga_cx7wZ6w4Mlk-RN3PeU,4658 -unidecode/x080.py,sha256=9feIVoCdOFolKgZfRCpdL80l9kRvjbl0z9sV4FAk2Qg,4643 -unidecode/x081.py,sha256=ffvplClKTCDre83MhO7-X3tgdUWfjvkUMxQCPEnRj_U,4671 -unidecode/x082.py,sha256=XTFSjZO8LO3SFcYh9h-Oqby6a67hFDx4B-AQRyptlJU,4641 -unidecode/x083.py,sha256=wXP1lZZAravJZm1f1bCT1cumocFGRG0ZQmgFMVCOSDQ,4635 -unidecode/x084.py,sha256=inA5ODar8zAherLeTyX9-KtCUOrTigxDwb3ei2Kl1CE,4630 -unidecode/x085.py,sha256=QDKK-wbb04nCFc91pSGhyHsxcS_MhdeQLsRqqXhV9h8,4628 -unidecode/x086.py,sha256=DcXhJemXKgrGwPBRFCbINxfxatqjpy7jFgM9jbN8eEk,4608 -unidecode/x087.py,sha256=nddqMqheth-n7kHCyjRNvVPO82UI_PdOic1kQer_iF0,4641 -unidecode/x088.py,sha256=0PVL160fpQ-Kkw29X-bLviyfs4TKIAwp_-SwEWsvemM,4639 -unidecode/x089.py,sha256=RrIGIX6dojryaYh6Da4ysaM_-yREbNZ-HasFX2O_SQc,4624 -unidecode/x08a.py,sha256=NjMp9ck824PXG2gcJXfi_9oQCFgXhhiallO3bYCtXCE,4647 -unidecode/x08b.py,sha256=vUwkG_IOBIhB8gQAaVbgD5EAIA1wY4BBPk5kXwAcPg0,4639 -unidecode/x08c.py,sha256=0sHcCXB9YzXE9oJcwzPtPUltCn6Oo-itdY5vk6MbtdA,4628 -unidecode/x08d.py,sha256=SWD7xSIR-1P30S5-yuNDHpVjWlpfxmUxuJr7f178WsA,4630 -unidecode/x08e.py,sha256=Ym0RQUdsgZJdVmOI56nzSmzfxuKjuS5MUbPSOeyv2Ws,4655 -unidecode/x08f.py,sha256=tNFpnEzNLIY4xHbcR0rZqaoNUKinj-XO2XfSnh6c4u4,4649 -unidecode/x090.py,sha256=XGomJNriNZsHQRUDy3vKwFc4W38uxeqWpn5SHM4G4j8,4627 -unidecode/x091.py,sha256=u8tRZhaVNa2mbsDSYIKqRZ3u4Npj-kiz55rC9izadnM,4653 -unidecode/x092.py,sha256=NvNce8y3YFlPI20pN1a4LY68sid5ApetXs9bo9cxb7w,4644 -unidecode/x093.py,sha256=O2e1p58RB1TS2Au-JSjft3FgPBx1YRAGxnviqSsfnYE,4646 -unidecode/x094.py,sha256=k8ZwNc9qCSzU2b8wMrWUeGSg39tPMiwiKHCiKw6zteM,4653 -unidecode/x095.py,sha256=H2O3xJDE3cAOecyYMUTl6fLs9shETPFwZshtIIK5V3E,4667 -unidecode/x096.py,sha256=sev3zRm46EBQgEtkR4T-Ah0cHYEM-9CM2pFCCc21BFI,4608 -unidecode/x097.py,sha256=S1nZBdt-MHySCAgV9MDdNSQTCSaD62iAhz8EjikfS5A,4633 -unidecode/x098.py,sha256=w0KMxUF7XyG9gdfTJylYsG_clvm3RD_LIM5XHR0xsdY,4643 -unidecode/x099.py,sha256=nlaWb2nRTSnFfDjseDRJ1a3Y0okOHbNA1-htKo9SkAM,4627 -unidecode/x09a.py,sha256=Z8pQsTc62CWgm0JPnj3kokKKf9_qfzRpo0u5iH61CaE,4623 -unidecode/x09b.py,sha256=njA75MlCgC-5UuS1Hvm-mdgsVwW9r801odfBTJg-BFE,4653 -unidecode/x09c.py,sha256=NveMhN85_Cm4H1cnfHDTcnSj675MOVBq9Lkjpw3YxA0,4659 -unidecode/x09d.py,sha256=_0fAaUhK3axhhWLA4QPNJf_J9YSs1MCKx2xR-vl5QYI,4630 -unidecode/x09e.py,sha256=wreETFCeKf9bVvLc3E7MUAvlu2CN5xKeebf3ESuV13s,4613 -unidecode/x09f.py,sha256=pNAdX7-9yMEPXtozjCuXxzc74eCVft9meOTxCtU7vJw,4420 -unidecode/x0a0.py,sha256=EpopPuuocybgCcpX19Ii-udqsPXJjSces3360lqJ8vs,4428 -unidecode/x0a1.py,sha256=0hvF77d5E640SujjdHVqy5gMUH85gEdOv80eRvCEAGM,4469 -unidecode/x0a2.py,sha256=9Icpfk_ElebYd_xN09OMziFrpAGPXEUNVmawpnhbBaQ,4503 -unidecode/x0a3.py,sha256=G1lPrnCqYz0s4wsSa1qM0WgrZBWO_beRk3AgK0iVZLA,4521 -unidecode/x0a4.py,sha256=nWPXzCragW0rsDQPa6ooo9KOc-SOjVCP8KIOuGc7WpU,4373 -unidecode/x0ac.py,sha256=wj7hl88VlCdc_eGpOL4m4CBJILyQqd9atObC5Xvd0aA,4709 -unidecode/x0ad.py,sha256=Rz5rn0fM-CqRjaN4TvSq_1StAQdyAF2WX3cUvcQHaWU,4766 -unidecode/x0ae.py,sha256=jNIBVB-Pw2ZNihAeyWbDIEq9Yt9zlhdfGylfvAaxUks,4875 -unidecode/x0af.py,sha256=Am5YC8Zfrun5NUKxU6LrU2-d5GgkGSBs7fZt2rqSi74,5012 -unidecode/x0b0.py,sha256=1bgHerCDAqIcJHYeGddJjJfRWiHCKtU2B0J-XGvcbbc,4853 -unidecode/x0b1.py,sha256=Six-lzGdvgJx4YsIa0lTusnBEV1zbCKQCquq17TDJoQ,4746 -unidecode/x0b2.py,sha256=HQDbmglNi4QfiRSGucUclgq_4FGpRjbJkWU1JTLAFGc,4680 -unidecode/x0b3.py,sha256=1lqxghVZiiStOAx1IG_vc1zZTXrAa7Z__QY6ZWvo2aA,4741 -unidecode/x0b4.py,sha256=V6BNSTxpyP8VuqF7x5z7bpF3MQAkwZfKtEu6NFr_vSg,4762 -unidecode/x0b5.py,sha256=9NVd2hNLyRlLceVlznba1dreqBGeKU_0gzmkgAw0gyg,4919 -unidecode/x0b6.py,sha256=V_vRsB0GICu9hqhO4pnbPWreDSevJ3bbmLRJkuQUxnE,4996 -unidecode/x0b7.py,sha256=CwBaCBICyVagnFjUpkwabuDvBJw7gAeqkSRpfBAVv8s,4833 -unidecode/x0b8.py,sha256=xYp-xy2LIwq95OWyS9vYMc_Z5od9dud0W1dxeg4P_Jk,4714 -unidecode/x0b9.py,sha256=z3hKNzBq_MeK9V3AyQzaY58cgi0-VGOsLk3-UFmszLQ,4704 -unidecode/x0ba.py,sha256=4gubifoBeJUUrwXEI4litJygekufEycmWDLrJ-Qvs14,4765 -unidecode/x0bb.py,sha256=bsCTABUdC6yTn8_0vhYe5jRP1z_BoAdificB8Y1c1hA,4730 -unidecode/x0bc.py,sha256=AhQvAz7yHlbQ_4c2KOIisq07eZJ5JQn6cV8I31oT9kg,4707 -unidecode/x0bd.py,sha256=IGtyVxIUr1mU3hokn6iUDJhXZezQozVvfWOyf4Pa5dI,4752 -unidecode/x0be.py,sha256=1D-hXu3p3wvOnGVMjEqVsrltYe7UuSwit2yqN5eFizc,4849 -unidecode/x0bf.py,sha256=NkEXqr2ER3BNFkTasDV9CHnkRBuX_Ao5OHGv_NgKAew,5010 -unidecode/x0c0.py,sha256=zDlHpyM0omza5TqGLb8Rhl7Wd-LlV1AjvH_xdnEnNFw,4856 -unidecode/x0c1.py,sha256=AC6xJyx9UblKAGNqGN7AH2Idb3_3vbc-I5U0Myig5fA,4765 -unidecode/x0c2.py,sha256=siRYLA8Cv9Z8XsRp3WQOBdRrPkjJOuEh8z1-3SMXOzQ,4710 -unidecode/x0c3.py,sha256=hlAFe6lsz0aLMixlpeFjV4I-WTIiA3B2BU58yGlTwRg,4975 -unidecode/x0c4.py,sha256=z3xZwSkf5ru1FCdBMHOr5fyglzVdyPhQVtWjq9xInsQ,5024 -unidecode/x0c5.py,sha256=F-DR0eVMRkemOnNXOtDjI5i6gW9136XLmWM_yMVvc84,4581 -unidecode/x0c6.py,sha256=7p_jMrHf3WUa_zANms-RGVN1bAeshgWLkC16_VcSawA,4490 -unidecode/x0c7.py,sha256=5eOAq4jFsPZ-zKO7lHzAGj_EvXdaMC4Kud7gvE-B7Tg,4564 -unidecode/x0c8.py,sha256=wltKvhBgn51jULzwUnEbmyDuK9JvQpQee0uTKK42-20,4733 -unidecode/x0c9.py,sha256=GoARON07wCoHN2wRHb5fvzqE9L3Yme2hKeciynUIAIk,4722 -unidecode/x0ca.py,sha256=BsBZTNj3npIkdo3L9pSEX7XvDT68KV7wFtOOwyEb2So,5007 -unidecode/x0cb.py,sha256=8T7vnJMRmYGyySYthMWz0bgN-MremktGImjejodFeMo,5012 -unidecode/x0cc.py,sha256=GKoHN-4vL4Y3EL42G0xbN74Tgspew1oMvxQtsIa3ess,4749 -unidecode/x0cd.py,sha256=7sZ05OjugbaombMRDYOVxgstZbXMcuX5kHFheKv4W2E,4738 -unidecode/x0ce.py,sha256=mOEHFrsAwIvcTnh7OKVK5qbuXUXHfJOR7D4FtXsQmao,4708 -unidecode/x0cf.py,sha256=H9PeYcbOG68F_yc7zsELUuN05ANfFNOUX-e3-gzx7Ow,4713 -unidecode/x0d0.py,sha256=eULqcGHPmaoEdl0EwRB5wWSu8M43bp4HoFo5gGljacg,4706 -unidecode/x0d1.py,sha256=BClLDAjPgsAX6MJCsuHfmfuhH9qfzUy_vb-d9zBs3Oc,4767 -unidecode/x0d2.py,sha256=e74nqGo4E4sF1sy8qBFu2ecWoRfJdoXI1xRFRPqYEz8,4724 -unidecode/x0d3.py,sha256=8-UmvJ3-ILXo9d3GA-ReOE4OfUenL3tVUJYldZ9gHu0,4705 -unidecode/x0d4.py,sha256=fwUmzksoddTKB8fH2rZMxRK3pJtLrxhcrYpHfBauAwE,4758 -unidecode/x0d5.py,sha256=rANSL5ndzLgSgYJQNEw57AfXpicRe7pvHRlKTPb4-QQ,4680 -unidecode/x0d6.py,sha256=fT8_cRzp7y60IIhn87kM9lLehKGAg5wYmfFOwgGp6e0,4765 -unidecode/x0d7.py,sha256=40-m7uKNvylWCcVBuTXrbiP6Lrj_4d4PWgLcX8670Kk,4468 -unidecode/x0f9.py,sha256=2PD0_fpDnaFO9ftICjYSOhnjAfBppjsj1TcLIuYjnCI,4567 -unidecode/x0fa.py,sha256=XHxCfXOhHDqzjG0Nw6n1sT5Q_MKLCovPFe-22IQxVXU,4172 -unidecode/x0fb.py,sha256=n_5urRXj6Ecf0MKMnuwNY0UK6TJtUW2hKcNLQqa2Gf8,3787 -unidecode/x0fc.py,sha256=KcyQnyv7gxNeVcAnRwQrm4NlabZE3CrnmtLqXj_7te8,3595 -unidecode/x0fd.py,sha256=mVHMrX8AhRzwCkMNA4sJkhwirK3BqmNv6YZfyCpE9Is,3703 -unidecode/x0fe.py,sha256=CrdwUOf0sl8yUfOFnXOXFZ8U662dQThpGMwGBkY8cJ4,3769 -unidecode/x0ff.py,sha256=Ijfv5VVDCTWRzRqwMYSp0fSycy176gn7P8ut8x3bv-w,3957 -unidecode/x1d4.py,sha256=xzL0OicR95IWq6LiApIPEgPoST8dyVgYuIUGxkz1b28,3863 -unidecode/x1d5.py,sha256=bmTSTgWnsLP7yUDZq2Irtz84Zm7bmLzYzurY0eI0uIU,3863 -unidecode/x1d6.py,sha256=8H0RmEfbY82X1iQwr0vcsgQGCvGKv19_773K_T2NI2A,4052 -unidecode/x1d7.py,sha256=yyHV2dCo1p_m_QVgz1H9S6XqeiN9GpGxB-ZqAW7l5ts,4057 -unidecode/x1f1.py,sha256=URX9F6UPgUo4-tpr7bhPm4G5ruFDoScW5bZLwzR88Yg,4308 -unidecode/x1f6.py,sha256=Ji4t-EFmJmo3CDeZ0yD7pX58hj5fQQc99TOrD-yad9k,4103 diff --git a/libs/Unidecode-1.3.8.dist-info/WHEEL b/libs/Unidecode-1.3.8.dist-info/WHEEL deleted file mode 100644 index ba48cbcf92..0000000000 --- a/libs/Unidecode-1.3.8.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.3) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/libs/alembic-1.14.0.dist-info/LICENSE b/libs/alembic-1.14.0.dist-info/LICENSE deleted file mode 100644 index be8de0089e..0000000000 --- a/libs/alembic-1.14.0.dist-info/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright 2009-2024 Michael Bayer. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/libs/alembic-1.14.0.dist-info/METADATA b/libs/alembic-1.14.0.dist-info/METADATA deleted file mode 100644 index 2f6963fc66..0000000000 --- a/libs/alembic-1.14.0.dist-info/METADATA +++ /dev/null @@ -1,142 +0,0 @@ -Metadata-Version: 2.1 -Name: alembic -Version: 1.14.0 -Summary: A database migration tool for SQLAlchemy. -Home-page: https://alembic.sqlalchemy.org -Author: Mike Bayer -Author-email: mike_mp@zzzcomputing.com -License: MIT -Project-URL: Documentation, https://alembic.sqlalchemy.org/en/latest/ -Project-URL: Changelog, https://alembic.sqlalchemy.org/en/latest/changelog.html -Project-URL: Source, https://github.com/sqlalchemy/alembic/ -Project-URL: Issue Tracker, https://github.com/sqlalchemy/alembic/issues/ -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Environment :: Console -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Database :: Front-Ends -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: SQLAlchemy >=1.3.0 -Requires-Dist: Mako -Requires-Dist: typing-extensions >=4 -Requires-Dist: importlib-metadata ; python_version < "3.9" -Requires-Dist: importlib-resources ; python_version < "3.9" -Provides-Extra: tz -Requires-Dist: backports.zoneinfo ; (python_version < "3.9") and extra == 'tz' - -Alembic is a database migrations tool written by the author -of `SQLAlchemy `_. A migrations tool -offers the following functionality: - -* Can emit ALTER statements to a database in order to change - the structure of tables and other constructs -* Provides a system whereby "migration scripts" may be constructed; - each script indicates a particular series of steps that can "upgrade" a - target database to a new version, and optionally a series of steps that can - "downgrade" similarly, doing the same steps in reverse. -* Allows the scripts to execute in some sequential manner. - -The goals of Alembic are: - -* Very open ended and transparent configuration and operation. A new - Alembic environment is generated from a set of templates which is selected - among a set of options when setup first occurs. The templates then deposit a - series of scripts that define fully how database connectivity is established - and how migration scripts are invoked; the migration scripts themselves are - generated from a template within that series of scripts. The scripts can - then be further customized to define exactly how databases will be - interacted with and what structure new migration files should take. -* Full support for transactional DDL. The default scripts ensure that all - migrations occur within a transaction - for those databases which support - this (Postgresql, Microsoft SQL Server), migrations can be tested with no - need to manually undo changes upon failure. -* Minimalist script construction. Basic operations like renaming - tables/columns, adding/removing columns, changing column attributes can be - performed through one line commands like alter_column(), rename_table(), - add_constraint(). There is no need to recreate full SQLAlchemy Table - structures for simple operations like these - the functions themselves - generate minimalist schema structures behind the scenes to achieve the given - DDL sequence. -* "auto generation" of migrations. While real world migrations are far more - complex than what can be automatically determined, Alembic can still - eliminate the initial grunt work in generating new migration directives - from an altered schema. The ``--autogenerate`` feature will inspect the - current status of a database using SQLAlchemy's schema inspection - capabilities, compare it to the current state of the database model as - specified in Python, and generate a series of "candidate" migrations, - rendering them into a new migration script as Python directives. The - developer then edits the new file, adding additional directives and data - migrations as needed, to produce a finished migration. Table and column - level changes can be detected, with constraints and indexes to follow as - well. -* Full support for migrations generated as SQL scripts. Those of us who - work in corporate environments know that direct access to DDL commands on a - production database is a rare privilege, and DBAs want textual SQL scripts. - Alembic's usage model and commands are oriented towards being able to run a - series of migrations into a textual output file as easily as it runs them - directly to a database. Care must be taken in this mode to not invoke other - operations that rely upon in-memory SELECTs of rows - Alembic tries to - provide helper constructs like bulk_insert() to help with data-oriented - operations that are compatible with script-based DDL. -* Non-linear, dependency-graph versioning. Scripts are given UUID - identifiers similarly to a DVCS, and the linkage of one script to the next - is achieved via human-editable markers within the scripts themselves. - The structure of a set of migration files is considered as a - directed-acyclic graph, meaning any migration file can be dependent - on any other arbitrary set of migration files, or none at - all. Through this open-ended system, migration files can be organized - into branches, multiple roots, and mergepoints, without restriction. - Commands are provided to produce new branches, roots, and merges of - branches automatically. -* Provide a library of ALTER constructs that can be used by any SQLAlchemy - application. The DDL constructs build upon SQLAlchemy's own DDLElement base - and can be used standalone by any application or script. -* At long last, bring SQLite and its inability to ALTER things into the fold, - but in such a way that SQLite's very special workflow needs are accommodated - in an explicit way that makes the most of a bad situation, through the - concept of a "batch" migration, where multiple changes to a table can - be batched together to form a series of instructions for a single, subsequent - "move-and-copy" workflow. You can even use "move-and-copy" workflow for - other databases, if you want to recreate a table in the background - on a busy system. - -Documentation and status of Alembic is at https://alembic.sqlalchemy.org/ - -The SQLAlchemy Project -====================== - -Alembic is part of the `SQLAlchemy Project `_ and -adheres to the same standards and conventions as the core project. - -Development / Bug reporting / Pull requests -___________________________________________ - -Please refer to the -`SQLAlchemy Community Guide `_ for -guidelines on coding and participating in this project. - -Code of Conduct -_______________ - -Above all, SQLAlchemy places great emphasis on polite, thoughtful, and -constructive communication between users and developers. -Please see our current Code of Conduct at -`Code of Conduct `_. - -License -======= - -Alembic is distributed under the `MIT license -`_. diff --git a/libs/alembic-1.14.0.dist-info/RECORD b/libs/alembic-1.14.0.dist-info/RECORD deleted file mode 100644 index f7cc605d01..0000000000 --- a/libs/alembic-1.14.0.dist-info/RECORD +++ /dev/null @@ -1,86 +0,0 @@ -../../bin/alembic,sha256=xqPGhIsDow0IG3BUa3a_VtCtKJgqxLpVJuFe1PQcGoA,236 -alembic-1.14.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -alembic-1.14.0.dist-info/LICENSE,sha256=zhnnuit3ylhLgqZ5KFbhOOswsxHIlrB2wJpAXuRfvuk,1059 -alembic-1.14.0.dist-info/METADATA,sha256=5hNrxl9umF2WKbNL-MxyMUEZem8-OxRa49Qz9w7jqzo,7390 -alembic-1.14.0.dist-info/RECORD,, -alembic-1.14.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alembic-1.14.0.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91 -alembic-1.14.0.dist-info/entry_points.txt,sha256=aykM30soxwGN0pB7etLc1q0cHJbL9dy46RnK9VX4LLw,48 -alembic-1.14.0.dist-info/top_level.txt,sha256=FwKWd5VsPFC8iQjpu1u9Cn-JnK3-V1RhUCmWqz1cl-s,8 -alembic/__init__.py,sha256=qw_qYmTjOKiGcs--x0c6kjZo70tQTR5m8_lqF98Qr_0,63 -alembic/__main__.py,sha256=373m7-TBh72JqrSMYviGrxCHZo-cnweM8AGF8A22PmY,78 -alembic/autogenerate/__init__.py,sha256=ntmUTXhjLm4_zmqIwyVaECdpPDn6_u1yM9vYk6-553E,543 -alembic/autogenerate/api.py,sha256=Bh-37G0PSFeT9WSfEQ-3TZoainXGLL2nsl4okv_xYc0,22173 -alembic/autogenerate/compare.py,sha256=cdUBH6qsedaJsnToSOu4MfcJaI4bjUJ4VWqtBlqsSr8,44944 -alembic/autogenerate/render.py,sha256=YB3C90rq7XDhjTia9GAnK6yfnVVzCROziZrbArmG9SE,35481 -alembic/autogenerate/rewriter.py,sha256=uZWRkTYJoncoEJ5WY1QBRiozjyChqZDJPy4LtcRibjM,7846 -alembic/command.py,sha256=2tkKrIoEgPfXrGgvMRGrUXH4l-7z466DOxd7Q2XOfL8,22169 -alembic/config.py,sha256=BZ7mwFRk2gq8GFNxxy9qvMUFx43YbDbQTC99OnjqiKY,22216 -alembic/context.py,sha256=hK1AJOQXJ29Bhn276GYcosxeG7pC5aZRT5E8c4bMJ4Q,195 -alembic/context.pyi,sha256=hUHbSnbSeEEMVkk0gDSXOq4_9edSjYzsjmmf-mL9Iao,31737 -alembic/ddl/__init__.py,sha256=Df8fy4Vn_abP8B7q3x8gyFwEwnLw6hs2Ljt_bV3EZWE,152 -alembic/ddl/_autogen.py,sha256=Blv2RrHNyF4cE6znCQXNXG5T9aO-YmiwD4Fz-qfoaWA,9275 -alembic/ddl/base.py,sha256=gazpvtk_6XURcsa0libwcaIquL5HwJDP1ZWKJ6P7x0I,9788 -alembic/ddl/impl.py,sha256=7-oxMb7KeycaK96x-kXw4mR6NSE1tmN0UEZIZrPcuhY,30195 -alembic/ddl/mssql.py,sha256=ydvgBSaftKYjaBaMyqius66Ta4CICQSj79Og3Ed2atY,14219 -alembic/ddl/mysql.py,sha256=kXOGYmpnL_9WL3ijXNsG4aAwy3m1HWJOoLZSePzmJF0,17316 -alembic/ddl/oracle.py,sha256=669YlkcZihlXFbnXhH2krdrvDry8q5pcUGfoqkg_R6Y,6243 -alembic/ddl/postgresql.py,sha256=GNCnx-N8UsCIstfW49J8ivYcKgRB8KFNPRgNtORC_AM,29883 -alembic/ddl/sqlite.py,sha256=wLXhb8bJWRspKQTb-iVfepR4LXYgOuEbUWKX5qwDhIQ,7570 -alembic/environment.py,sha256=MM5lPayGT04H3aeng1H7GQ8HEAs3VGX5yy6mDLCPLT4,43 -alembic/migration.py,sha256=MV6Fju6rZtn2fTREKzXrCZM6aIBGII4OMZFix0X-GLs,41 -alembic/op.py,sha256=flHtcsVqOD-ZgZKK2pv-CJ5Cwh-KJ7puMUNXzishxLw,167 -alembic/op.pyi,sha256=QZ1ERetxIrpZNTyg48Btn5OJhhpMId-_MLMP36RauOw,50168 -alembic/operations/__init__.py,sha256=e0KQSZAgLpTWvyvreB7DWg7RJV_MWSOPVDgCqsd2FzY,318 -alembic/operations/base.py,sha256=JRaOtPqyqfaPjzGHxuP9VMcO1KsJNmbbLOvwG82qxGA,74474 -alembic/operations/batch.py,sha256=YqtD4hJ3_RkFxvI7zbmBwxcLEyLHYyWQpsz4l5L85yI,26943 -alembic/operations/ops.py,sha256=guIpLQzlqgkdP2LGDW8vWg_DXeAouEldiVZDgRas7YI,94953 -alembic/operations/schemaobj.py,sha256=Wp-bBe4a8lXPTvIHJttBY0ejtpVR5Jvtb2kI-U2PztQ,9468 -alembic/operations/toimpl.py,sha256=Fx-UKcq6S8pVtsEwPFjTKtEcAVKjfptn-BfpE1k3_ck,7517 -alembic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alembic/runtime/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alembic/runtime/environment.py,sha256=SkYB_am1h3FSG8IsExAQxGP_7WwzOVigqjlO747Aokc,41497 -alembic/runtime/migration.py,sha256=9GZ_bYZ6yMF7DUD1hgZdmB0YqvcdcNBBfxFaXKHeQoM,49857 -alembic/script/__init__.py,sha256=lSj06O391Iy5avWAiq8SPs6N8RBgxkSPjP8wpXcNDGg,100 -alembic/script/base.py,sha256=XLNpdsLnBBSz4ZKMFUArFUdtL1HcjtuUDHNbA-5VlZA,37809 -alembic/script/revision.py,sha256=NTu-eu5Y78u4NoVXpT0alpD2oL40SGATA2sEMEf1el4,62306 -alembic/script/write_hooks.py,sha256=NGB6NGgfdf7HK6XNNpSKqUCfzxazj-NRUePgFx7MJSM,5036 -alembic/templates/async/README,sha256=ISVtAOvqvKk_5ThM5ioJE-lMkvf9IbknFUFVU_vPma4,58 -alembic/templates/async/alembic.ini.mako,sha256=lw_6ie1tMbYGpbvE7MnzJvx101RbSTh9uu4t9cvDpug,3638 -alembic/templates/async/env.py,sha256=zbOCf3Y7w2lg92hxSwmG1MM_7y56i_oRH4AKp0pQBYo,2389 -alembic/templates/async/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635 -alembic/templates/generic/README,sha256=MVlc9TYmr57RbhXET6QxgyCcwWP7w-vLkEsirENqiIQ,38 -alembic/templates/generic/alembic.ini.mako,sha256=YcwTOEoiZr663Gkt6twCjmaqZao0n6xjRl0B5prK79s,3746 -alembic/templates/generic/env.py,sha256=TLRWOVW3Xpt_Tpf8JFzlnoPn_qoUu8UV77Y4o9XD6yI,2103 -alembic/templates/generic/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635 -alembic/templates/multidb/README,sha256=dWLDhnBgphA4Nzb7sNlMfCS3_06YqVbHhz-9O5JNqyI,606 -alembic/templates/multidb/alembic.ini.mako,sha256=AW1OGb-QezxBY5mynSWW7b1lGKnh9sVPImfGgfXf2EM,3840 -alembic/templates/multidb/env.py,sha256=6zNjnW8mXGUk7erTsAvrfhvqoczJ-gagjVq1Ypg2YIQ,4230 -alembic/templates/multidb/script.py.mako,sha256=N06nMtNSwHkgl0EBXDyMt8njp9tlOesR583gfq21nbY,1090 -alembic/testing/__init__.py,sha256=kOxOh5nwmui9d-_CCq9WA4Udwy7ITjm453w74CTLZDo,1159 -alembic/testing/assertions.py,sha256=ScUb1sVopIl70BirfHUJDvwswC70Q93CiIWwkiZbhHg,5207 -alembic/testing/env.py,sha256=giHWVLhHkfNWrPEfrAqhpMOLL6FgWoBCVAzBVrVbSSA,10766 -alembic/testing/fixtures.py,sha256=nBntOynOmVCFc7IYiN3DIQ3TBNTfiGCvL_1-FyCry8o,9462 -alembic/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alembic/testing/plugin/bootstrap.py,sha256=9C6wtjGrIVztZ928w27hsQE0KcjDLIUtUN3dvZKsMVk,50 -alembic/testing/requirements.py,sha256=dKeAO1l5TwBqXarJN-IPORlCqCJv-41Dj6oXoEikxHQ,5133 -alembic/testing/schemacompare.py,sha256=N5UqSNCOJetIKC4vKhpYzQEpj08XkdgIoqBmEPQ3tlc,4838 -alembic/testing/suite/__init__.py,sha256=MvE7-hwbaVN1q3NM-ztGxORU9dnIelUCINKqNxewn7Y,288 -alembic/testing/suite/_autogen_fixtures.py,sha256=cDq1pmzHe15S6dZPGNC6sqFaCQ3hLT_oPV2IDigUGQ0,9880 -alembic/testing/suite/test_autogen_comments.py,sha256=aEGqKUDw4kHjnDk298aoGcQvXJWmZXcIX_2FxH4cJK8,6283 -alembic/testing/suite/test_autogen_computed.py,sha256=CXAeF-5Wr2cmW8PB7ztHG_4ZQsn1gSWrHWfxi72grNU,6147 -alembic/testing/suite/test_autogen_diffs.py,sha256=T4SR1n_kmcOKYhR4W1-dA0e5sddJ69DSVL2HW96kAkE,8394 -alembic/testing/suite/test_autogen_fks.py,sha256=AqFmb26Buex167HYa9dZWOk8x-JlB1OK3bwcvvjDFaU,32927 -alembic/testing/suite/test_autogen_identity.py,sha256=kcuqngG7qXAKPJDX4U8sRzPKHEJECHuZ0DtuaS6tVkk,5824 -alembic/testing/suite/test_environment.py,sha256=OwD-kpESdLoc4byBrGrXbZHvqtPbzhFCG4W9hJOJXPQ,11877 -alembic/testing/suite/test_op.py,sha256=2XQCdm_NmnPxHGuGj7hmxMzIhKxXNotUsKdACXzE1mM,1343 -alembic/testing/util.py,sha256=CQrcQDA8fs_7ME85z5ydb-Bt70soIIID-qNY1vbR2dg,3350 -alembic/testing/warnings.py,sha256=RxA7x_8GseANgw07Us8JN_1iGbANxaw6_VitX2ZGQH4,1078 -alembic/util/__init__.py,sha256=KSZ7UT2YzH6CietgUtljVoE3QnGjoFKOi7RL5sgUxrk,1688 -alembic/util/compat.py,sha256=RjHdQa1NomU3Zlvgfvza0OMiSRQSLRL3xVl3OdUy2UE,2594 -alembic/util/editor.py,sha256=JIz6_BdgV8_oKtnheR6DZoB7qnrHrlRgWjx09AsTsUw,2546 -alembic/util/exc.py,sha256=KQTru4zcgAmN4IxLMwLFS56XToUewaXB7oOLcPNjPwg,98 -alembic/util/langhelpers.py,sha256=LpOcovnhMnP45kTt8zNJ4BHpyQrlF40OL6yDXjqKtsE,10026 -alembic/util/messaging.py,sha256=BxAHiJsYHBPb2m8zv4yaueSRAlVuYXWkRCeN02JXhqw,3250 -alembic/util/pyfiles.py,sha256=zltVdcwEJJCPS2gHsQvkHkQakuF6wXiZ6zfwHbGNT0g,3489 -alembic/util/sqla_compat.py,sha256=XMfZaLdbVbJoniNUyI3RUUXu4gCWljjVBbJ7db6vCgc,19526 diff --git a/libs/alembic-1.14.0.dist-info/WHEEL b/libs/alembic-1.14.0.dist-info/WHEEL deleted file mode 100644 index 9b78c44519..0000000000 --- a/libs/alembic-1.14.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.3.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/libs/Flask_Cors-5.0.0.dist-info/INSTALLER b/libs/alembic-1.18.4.dist-info/INSTALLER similarity index 100% rename from libs/Flask_Cors-5.0.0.dist-info/INSTALLER rename to libs/alembic-1.18.4.dist-info/INSTALLER diff --git a/libs/alembic-1.18.4.dist-info/METADATA b/libs/alembic-1.18.4.dist-info/METADATA new file mode 100644 index 0000000000..04d82dce2d --- /dev/null +++ b/libs/alembic-1.18.4.dist-info/METADATA @@ -0,0 +1,139 @@ +Metadata-Version: 2.4 +Name: alembic +Version: 1.18.4 +Summary: A database migration tool for SQLAlchemy. +Author-email: Mike Bayer +License-Expression: MIT +Project-URL: Homepage, https://alembic.sqlalchemy.org +Project-URL: Documentation, https://alembic.sqlalchemy.org/en/latest/ +Project-URL: Changelog, https://alembic.sqlalchemy.org/en/latest/changelog.html +Project-URL: Source, https://github.com/sqlalchemy/alembic/ +Project-URL: Issue Tracker, https://github.com/sqlalchemy/alembic/issues/ +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Environment :: Console +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Requires-Python: >=3.10 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: SQLAlchemy>=1.4.23 +Requires-Dist: Mako +Requires-Dist: typing-extensions>=4.12 +Requires-Dist: tomli; python_version < "3.11" +Provides-Extra: tz +Requires-Dist: tzdata; extra == "tz" +Dynamic: license-file + +Alembic is a database migrations tool written by the author +of `SQLAlchemy `_. A migrations tool +offers the following functionality: + +* Can emit ALTER statements to a database in order to change + the structure of tables and other constructs +* Provides a system whereby "migration scripts" may be constructed; + each script indicates a particular series of steps that can "upgrade" a + target database to a new version, and optionally a series of steps that can + "downgrade" similarly, doing the same steps in reverse. +* Allows the scripts to execute in some sequential manner. + +The goals of Alembic are: + +* Very open ended and transparent configuration and operation. A new + Alembic environment is generated from a set of templates which is selected + among a set of options when setup first occurs. The templates then deposit a + series of scripts that define fully how database connectivity is established + and how migration scripts are invoked; the migration scripts themselves are + generated from a template within that series of scripts. The scripts can + then be further customized to define exactly how databases will be + interacted with and what structure new migration files should take. +* Full support for transactional DDL. The default scripts ensure that all + migrations occur within a transaction - for those databases which support + this (Postgresql, Microsoft SQL Server), migrations can be tested with no + need to manually undo changes upon failure. +* Minimalist script construction. Basic operations like renaming + tables/columns, adding/removing columns, changing column attributes can be + performed through one line commands like alter_column(), rename_table(), + add_constraint(). There is no need to recreate full SQLAlchemy Table + structures for simple operations like these - the functions themselves + generate minimalist schema structures behind the scenes to achieve the given + DDL sequence. +* "auto generation" of migrations. While real world migrations are far more + complex than what can be automatically determined, Alembic can still + eliminate the initial grunt work in generating new migration directives + from an altered schema. The ``--autogenerate`` feature will inspect the + current status of a database using SQLAlchemy's schema inspection + capabilities, compare it to the current state of the database model as + specified in Python, and generate a series of "candidate" migrations, + rendering them into a new migration script as Python directives. The + developer then edits the new file, adding additional directives and data + migrations as needed, to produce a finished migration. Table and column + level changes can be detected, with constraints and indexes to follow as + well. +* Full support for migrations generated as SQL scripts. Those of us who + work in corporate environments know that direct access to DDL commands on a + production database is a rare privilege, and DBAs want textual SQL scripts. + Alembic's usage model and commands are oriented towards being able to run a + series of migrations into a textual output file as easily as it runs them + directly to a database. Care must be taken in this mode to not invoke other + operations that rely upon in-memory SELECTs of rows - Alembic tries to + provide helper constructs like bulk_insert() to help with data-oriented + operations that are compatible with script-based DDL. +* Non-linear, dependency-graph versioning. Scripts are given UUID + identifiers similarly to a DVCS, and the linkage of one script to the next + is achieved via human-editable markers within the scripts themselves. + The structure of a set of migration files is considered as a + directed-acyclic graph, meaning any migration file can be dependent + on any other arbitrary set of migration files, or none at + all. Through this open-ended system, migration files can be organized + into branches, multiple roots, and mergepoints, without restriction. + Commands are provided to produce new branches, roots, and merges of + branches automatically. +* Provide a library of ALTER constructs that can be used by any SQLAlchemy + application. The DDL constructs build upon SQLAlchemy's own DDLElement base + and can be used standalone by any application or script. +* At long last, bring SQLite and its inability to ALTER things into the fold, + but in such a way that SQLite's very special workflow needs are accommodated + in an explicit way that makes the most of a bad situation, through the + concept of a "batch" migration, where multiple changes to a table can + be batched together to form a series of instructions for a single, subsequent + "move-and-copy" workflow. You can even use "move-and-copy" workflow for + other databases, if you want to recreate a table in the background + on a busy system. + +Documentation and status of Alembic is at https://alembic.sqlalchemy.org/ + +The SQLAlchemy Project +====================== + +Alembic is part of the `SQLAlchemy Project `_ and +adheres to the same standards and conventions as the core project. + +Development / Bug reporting / Pull requests +___________________________________________ + +Please refer to the +`SQLAlchemy Community Guide `_ for +guidelines on coding and participating in this project. + +Code of Conduct +_______________ + +Above all, SQLAlchemy places great emphasis on polite, thoughtful, and +constructive communication between users and developers. +Please see our current Code of Conduct at +`Code of Conduct `_. + +License +======= + +Alembic is distributed under the `MIT license +`_. diff --git a/libs/alembic-1.18.4.dist-info/RECORD b/libs/alembic-1.18.4.dist-info/RECORD new file mode 100644 index 0000000000..e515af6fb8 --- /dev/null +++ b/libs/alembic-1.18.4.dist-info/RECORD @@ -0,0 +1,104 @@ +../../bin/alembic,sha256=L1WsuXkAaKzUoMmUw815BjlcETDzheJhPsz_pk-A2uM,189 +alembic-1.18.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +alembic-1.18.4.dist-info/METADATA,sha256=sPH3Zq5eEaNtbnI1os9Rvk7eBbFJSMPq13poNNaxvfs,7217 +alembic-1.18.4.dist-info/RECORD,, +alembic-1.18.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +alembic-1.18.4.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92 +alembic-1.18.4.dist-info/entry_points.txt,sha256=aykM30soxwGN0pB7etLc1q0cHJbL9dy46RnK9VX4LLw,48 +alembic-1.18.4.dist-info/licenses/LICENSE,sha256=bmjZSgOg4-Mn3fPobR6-3BTuzjkiAiYY_CRqNilv0Mw,1059 +alembic-1.18.4.dist-info/top_level.txt,sha256=FwKWd5VsPFC8iQjpu1u9Cn-JnK3-V1RhUCmWqz1cl-s,8 +alembic/__init__.py,sha256=6ppwNUS6dfdFIm5uwZaaZ9lDZ7pIwkTNyQcbjY47V3I,93 +alembic/__main__.py,sha256=373m7-TBh72JqrSMYviGrxCHZo-cnweM8AGF8A22PmY,78 +alembic/autogenerate/__init__.py,sha256=ntmUTXhjLm4_zmqIwyVaECdpPDn6_u1yM9vYk6-553E,543 +alembic/autogenerate/api.py,sha256=8tVNDSHlqsBgj1IVLdqvZr_jlvz9kp3O5EKIL9biaZg,22781 +alembic/autogenerate/compare/__init__.py,sha256=kCvA0ZK0rTahNv9wlgyIB5DH2lFEhTRO4PFmoqcL9JE,1809 +alembic/autogenerate/compare/comments.py,sha256=agSrWsZhJ47i-E-EqiP3id2CXTTbP0muOKk1-9in9lg,3234 +alembic/autogenerate/compare/constraints.py,sha256=7sLSvUK9M2CbMRRQy5pveIXbjDLRDnfPx0Dvi_KXOf8,27906 +alembic/autogenerate/compare/schema.py,sha256=plQ7JJ1zJGlnajweSV8lAD9tDYPks5G40sliocTuXJA,1695 +alembic/autogenerate/compare/server_defaults.py,sha256=D--5EvEfyX0fSVkK6iLtRoer5sYK6xeNC2TIdu7klUk,10792 +alembic/autogenerate/compare/tables.py,sha256=47pAgVhbmXGLrm3dMK6hrNABxOAe_cGSQmPtCBwORVc,10611 +alembic/autogenerate/compare/types.py,sha256=75bOduz-dOiyLI065XD5sEP_JF9GPLkDAQ_y5B8lXF0,4005 +alembic/autogenerate/compare/util.py,sha256=K_GArJ2xQXZi6ftb8gkgZuIdVqvyep3E2ZXq8F3-jIU,9521 +alembic/autogenerate/render.py,sha256=ceQL8nk8m2kBtQq5gtxtDLR9iR0Sck8xG_61Oez-Sqs,37270 +alembic/autogenerate/rewriter.py,sha256=NIASSS-KaNKPmbm1k4pE45aawwjSh1Acf6eZrOwnUGM,7814 +alembic/command.py,sha256=7RzAwwXR31sOl0oVItyZl9B0j3TeR5dRyx9634lVsLM,25297 +alembic/config.py,sha256=VoCZV2cFZoF0Xa1OxHqsA-MKzuwBRaJSC7hxZ3-uWN4,34983 +alembic/context.py,sha256=hK1AJOQXJ29Bhn276GYcosxeG7pC5aZRT5E8c4bMJ4Q,195 +alembic/context.pyi,sha256=b_naI_W8dyiZRsL_n299a-LbqLZxKTAgDIXubRLVKlY,32531 +alembic/ddl/__init__.py,sha256=Df8fy4Vn_abP8B7q3x8gyFwEwnLw6hs2Ljt_bV3EZWE,152 +alembic/ddl/_autogen.py,sha256=Blv2RrHNyF4cE6znCQXNXG5T9aO-YmiwD4Fz-qfoaWA,9275 +alembic/ddl/base.py,sha256=dNhLIZnFMP7Cr8rE_e2Zb5skGgCMBOdca1PajXqZYhs,11977 +alembic/ddl/impl.py,sha256=IU3yHFVI3v0QHEwNL_LSN1PRpPF0n09NFFqRZkW86wE,31376 +alembic/ddl/mssql.py,sha256=dee0acwnxmTZXuYPqqlYaDiSbKS46zVH0WRULjX5Blg,17398 +alembic/ddl/mysql.py,sha256=2fvzGcdg4qqCJogGnzvQN636vUi9mF6IoQWLGevvF_A,18456 +alembic/ddl/oracle.py,sha256=669YlkcZihlXFbnXhH2krdrvDry8q5pcUGfoqkg_R6Y,6243 +alembic/ddl/postgresql.py,sha256=04M4OpZOCJJ3ipuHoVwlR1gI1sgRwOguRRVx_mFg8Uc,30417 +alembic/ddl/sqlite.py,sha256=TmzU3YaR3aw_0spSrA6kcUY8fyDfwsu4GkH5deYPEK8,8017 +alembic/environment.py,sha256=MM5lPayGT04H3aeng1H7GQ8HEAs3VGX5yy6mDLCPLT4,43 +alembic/migration.py,sha256=MV6Fju6rZtn2fTREKzXrCZM6aIBGII4OMZFix0X-GLs,41 +alembic/op.py,sha256=flHtcsVqOD-ZgZKK2pv-CJ5Cwh-KJ7puMUNXzishxLw,167 +alembic/op.pyi,sha256=ABBlNk4Eg7DR17knSKIjmvHQBNAmKh3aHQNHU8Oyw08,53347 +alembic/operations/__init__.py,sha256=e0KQSZAgLpTWvyvreB7DWg7RJV_MWSOPVDgCqsd2FzY,318 +alembic/operations/base.py,sha256=ubpv1HDol0g0nuLi0b8-uN7-HEVRZ6mq8arvK9EGo0g,78432 +alembic/operations/batch.py,sha256=hYOpzG2FK_8hk-rHNuLuFAA3-VXRSOnsTrpz2YlA61Q,26947 +alembic/operations/ops.py,sha256=ofbHkReZkZX2n9lXDaIPlrKe2U1mwgQpZNhEbuC4QrM,99325 +alembic/operations/schemaobj.py,sha256=Wp-bBe4a8lXPTvIHJttBY0ejtpVR5Jvtb2kI-U2PztQ,9468 +alembic/operations/toimpl.py,sha256=f8rH3jdob9XvEJr6CoWEkX6X1zgNB5qxdcEQugyhBvU,8466 +alembic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +alembic/runtime/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +alembic/runtime/environment.py,sha256=1cR1v18sIKvOPZMlc4fHGU4J8r6Dec9h4o3WXkMmFKQ,42400 +alembic/runtime/migration.py,sha256=mR2Ee1h9Yy6OMFeDL4LOYorLYby2l2f899WGK_boECw,48427 +alembic/runtime/plugins.py,sha256=pWCDhMX8MvR8scXhiGSRNYNW7-ckEbOW2qK58xRFy1Q,5707 +alembic/script/__init__.py,sha256=lSj06O391Iy5avWAiq8SPs6N8RBgxkSPjP8wpXcNDGg,100 +alembic/script/base.py,sha256=OInSjbfcnUSjVCc5vVYY33UJ1Uo5xE5Huicp8P9VM1I,36698 +alembic/script/revision.py,sha256=SEePZPTMIyfjF73QAD0VIax9jc1dALkiLQZwTzwiyPw,62312 +alembic/script/write_hooks.py,sha256=KWH12250h_JcdBkGsLVo9JKYKpNcJxBUjwZ9r_r88Bc,5369 +alembic/templates/async/README,sha256=ISVtAOvqvKk_5ThM5ioJE-lMkvf9IbknFUFVU_vPma4,58 +alembic/templates/async/alembic.ini.mako,sha256=esbuCnpkyjntJC7k9NnYcCAzhrRQ8NVC4pWineiRk_w,5010 +alembic/templates/async/env.py,sha256=zbOCf3Y7w2lg92hxSwmG1MM_7y56i_oRH4AKp0pQBYo,2389 +alembic/templates/async/script.py.mako,sha256=04kgeBtNMa4cCnG8CfQcKt6P6rnloIfj8wy0u_DBydM,704 +alembic/templates/generic/README,sha256=MVlc9TYmr57RbhXET6QxgyCcwWP7w-vLkEsirENqiIQ,38 +alembic/templates/generic/alembic.ini.mako,sha256=2i2vPsGQSmE9XMiLz8tSBF_UIA8PJl0-fAvbRVmiK_w,5010 +alembic/templates/generic/env.py,sha256=TLRWOVW3Xpt_Tpf8JFzlnoPn_qoUu8UV77Y4o9XD6yI,2103 +alembic/templates/generic/script.py.mako,sha256=04kgeBtNMa4cCnG8CfQcKt6P6rnloIfj8wy0u_DBydM,704 +alembic/templates/multidb/README,sha256=dWLDhnBgphA4Nzb7sNlMfCS3_06YqVbHhz-9O5JNqyI,606 +alembic/templates/multidb/alembic.ini.mako,sha256=asVt3aJVwjuuw9bopfMofVvonO31coXBbV5DeMRN6cM,5336 +alembic/templates/multidb/env.py,sha256=6zNjnW8mXGUk7erTsAvrfhvqoczJ-gagjVq1Ypg2YIQ,4230 +alembic/templates/multidb/script.py.mako,sha256=ZbCXMkI5Wj2dwNKcxuVGkKZ7Iav93BNx_bM4zbGi3c8,1235 +alembic/templates/pyproject/README,sha256=dMhIiFoeM7EdeaOXBs3mVQ6zXACMyGXDb_UBB6sGRA0,60 +alembic/templates/pyproject/alembic.ini.mako,sha256=bQnEoydnLOUgg9vNbTOys4r5MaW8lmwYFXSrlfdEEkw,782 +alembic/templates/pyproject/env.py,sha256=TLRWOVW3Xpt_Tpf8JFzlnoPn_qoUu8UV77Y4o9XD6yI,2103 +alembic/templates/pyproject/pyproject.toml.mako,sha256=W6x_K-xLfEvyM8D4B3Fg0l20P1h6SPK33188pqRFroQ,3000 +alembic/templates/pyproject/script.py.mako,sha256=04kgeBtNMa4cCnG8CfQcKt6P6rnloIfj8wy0u_DBydM,704 +alembic/templates/pyproject_async/README,sha256=2Q5XcEouiqQ-TJssO9805LROkVUd0F6d74rTnuLrifA,45 +alembic/templates/pyproject_async/alembic.ini.mako,sha256=bQnEoydnLOUgg9vNbTOys4r5MaW8lmwYFXSrlfdEEkw,782 +alembic/templates/pyproject_async/env.py,sha256=zbOCf3Y7w2lg92hxSwmG1MM_7y56i_oRH4AKp0pQBYo,2389 +alembic/templates/pyproject_async/pyproject.toml.mako,sha256=W6x_K-xLfEvyM8D4B3Fg0l20P1h6SPK33188pqRFroQ,3000 +alembic/templates/pyproject_async/script.py.mako,sha256=04kgeBtNMa4cCnG8CfQcKt6P6rnloIfj8wy0u_DBydM,704 +alembic/testing/__init__.py,sha256=PTMhi_2PZ1T_3atQS2CIr0V4YRZzx_doKI-DxKdQS44,1297 +alembic/testing/assertions.py,sha256=VKXMEVWjuPAsYnNxP3WnUpXaFN3ytNFf9LI72OEJ074,5344 +alembic/testing/env.py,sha256=oQN56xXHtHfK8RD-8pH8yZ-uWcjpuNL1Mt5HNrzZyc0,12151 +alembic/testing/fixtures.py,sha256=meqm10rd1ynppW6tw1wcpDJJLyQezZ7FwKyqcrwIOok,11931 +alembic/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +alembic/testing/plugin/bootstrap.py,sha256=9C6wtjGrIVztZ928w27hsQE0KcjDLIUtUN3dvZKsMVk,50 +alembic/testing/requirements.py,sha256=OZSHd8I3zOb7288cZxUTebqxx8j0T6I8MekH15TyPvY,4566 +alembic/testing/schemacompare.py,sha256=N5UqSNCOJetIKC4vKhpYzQEpj08XkdgIoqBmEPQ3tlc,4838 +alembic/testing/suite/__init__.py,sha256=MvE7-hwbaVN1q3NM-ztGxORU9dnIelUCINKqNxewn7Y,288 +alembic/testing/suite/_autogen_fixtures.py,sha256=3nNTd8iDeVeSgpPIj8KAraNbU-PkJtxDb4X_TVsZ528,14200 +alembic/testing/suite/test_autogen_comments.py,sha256=aEGqKUDw4kHjnDk298aoGcQvXJWmZXcIX_2FxH4cJK8,6283 +alembic/testing/suite/test_autogen_computed.py,sha256=puJ0hBtLzNz8LiPGqDPS8vse6dUS9VCBpUdw-cOksZo,4554 +alembic/testing/suite/test_autogen_diffs.py,sha256=T4SR1n_kmcOKYhR4W1-dA0e5sddJ69DSVL2HW96kAkE,8394 +alembic/testing/suite/test_autogen_fks.py,sha256=wHKjD4Egf7IZlH0HYw-c8uti0jhJpOm5K42QMXf5tIw,32930 +alembic/testing/suite/test_autogen_identity.py,sha256=kcuqngG7qXAKPJDX4U8sRzPKHEJECHuZ0DtuaS6tVkk,5824 +alembic/testing/suite/test_environment.py,sha256=OwD-kpESdLoc4byBrGrXbZHvqtPbzhFCG4W9hJOJXPQ,11877 +alembic/testing/suite/test_op.py,sha256=2XQCdm_NmnPxHGuGj7hmxMzIhKxXNotUsKdACXzE1mM,1343 +alembic/testing/util.py,sha256=CQrcQDA8fs_7ME85z5ydb-Bt70soIIID-qNY1vbR2dg,3350 +alembic/testing/warnings.py,sha256=cDDWzvxNZE6x9dME2ACTXSv01G81JcIbE1GIE_s1kvg,831 +alembic/util/__init__.py,sha256=xNpZtajyTF4eVEbLj0Pcm2FbNkIZD_pCvKGKSPucTEs,1777 +alembic/util/compat.py,sha256=NytmcsMtK8WEEVwWc-ZWYlSOi55BtRlmJXjxnF3nsh8,3810 +alembic/util/editor.py,sha256=JIz6_BdgV8_oKtnheR6DZoB7qnrHrlRgWjx09AsTsUw,2546 +alembic/util/exc.py,sha256=SublpLmAeAW8JeEml-1YyhIjkSORTkZbvHVVJeoPymg,993 +alembic/util/langhelpers.py,sha256=GBbR01xNi1kmz8W37h0NzXl3hBC1SY7k7Bj-h5jVgps,13164 +alembic/util/messaging.py,sha256=3bEBoDy4EAXETXAvArlYjeMITXDTgPTu6ZoE3ytnzSw,3294 +alembic/util/pyfiles.py,sha256=QUZYc5kE3Z7nV64PblcRffzA7VfVaiFB2x3vtcG0_AE,4707 +alembic/util/sqla_compat.py,sha256=llgJVtOsO1c3euS9_peORZkM9QeSvQWa-1LNHqrzEM4,15246 diff --git a/libs/Flask_Cors-5.0.0.dist-info/REQUESTED b/libs/alembic-1.18.4.dist-info/REQUESTED similarity index 100% rename from libs/Flask_Cors-5.0.0.dist-info/REQUESTED rename to libs/alembic-1.18.4.dist-info/REQUESTED diff --git a/libs/alembic-1.18.4.dist-info/WHEEL b/libs/alembic-1.18.4.dist-info/WHEEL new file mode 100644 index 0000000000..0885d05555 --- /dev/null +++ b/libs/alembic-1.18.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.10.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/libs/alembic-1.14.0.dist-info/entry_points.txt b/libs/alembic-1.18.4.dist-info/entry_points.txt similarity index 100% rename from libs/alembic-1.14.0.dist-info/entry_points.txt rename to libs/alembic-1.18.4.dist-info/entry_points.txt diff --git a/libs/alembic-1.18.4.dist-info/licenses/LICENSE b/libs/alembic-1.18.4.dist-info/licenses/LICENSE new file mode 100644 index 0000000000..b03e235f9a --- /dev/null +++ b/libs/alembic-1.18.4.dist-info/licenses/LICENSE @@ -0,0 +1,19 @@ +Copyright 2009-2026 Michael Bayer. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/libs/alembic-1.14.0.dist-info/top_level.txt b/libs/alembic-1.18.4.dist-info/top_level.txt similarity index 100% rename from libs/alembic-1.14.0.dist-info/top_level.txt rename to libs/alembic-1.18.4.dist-info/top_level.txt diff --git a/libs/alembic/__init__.py b/libs/alembic/__init__.py index 637b2d4e14..059b6b1588 100644 --- a/libs/alembic/__init__.py +++ b/libs/alembic/__init__.py @@ -1,4 +1,6 @@ from . import context from . import op +from .runtime import plugins -__version__ = "1.14.0" + +__version__ = "1.18.4" diff --git a/libs/alembic/autogenerate/api.py b/libs/alembic/autogenerate/api.py index 4c03916288..b2e3faef78 100644 --- a/libs/alembic/autogenerate/api.py +++ b/libs/alembic/autogenerate/api.py @@ -1,6 +1,7 @@ from __future__ import annotations import contextlib +import logging from typing import Any from typing import Dict from typing import Iterator @@ -17,11 +18,9 @@ from . import render from .. import util from ..operations import ops +from ..runtime.plugins import Plugin from ..util import sqla_compat -"""Provide the 'autogenerate' feature which can produce migration operations -automatically.""" - if TYPE_CHECKING: from sqlalchemy.engine import Connection from sqlalchemy.engine import Dialect @@ -42,6 +41,10 @@ from ..script.base import Script from ..script.base import ScriptDirectory from ..script.revision import _GetRevArg + from ..util import PriorityDispatcher + + +log = logging.getLogger(__name__) def compare_metadata(context: MigrationContext, metadata: MetaData) -> Any: @@ -277,7 +280,7 @@ class AutogenContext: """Maintains configuration and state that's specific to an autogenerate operation.""" - metadata: Optional[MetaData] = None + metadata: Union[MetaData, Sequence[MetaData], None] = None """The :class:`~sqlalchemy.schema.MetaData` object representing the destination. @@ -304,7 +307,7 @@ class AutogenContext: """ - dialect: Optional[Dialect] = None + dialect: Dialect """The :class:`~sqlalchemy.engine.Dialect` object currently in use. This is normally obtained from the @@ -326,13 +329,15 @@ class AutogenContext: """ - migration_context: MigrationContext = None # type: ignore[assignment] + migration_context: MigrationContext """The :class:`.MigrationContext` established by the ``env.py`` script.""" + comparators: PriorityDispatcher + def __init__( self, migration_context: MigrationContext, - metadata: Optional[MetaData] = None, + metadata: Union[MetaData, Sequence[MetaData], None] = None, opts: Optional[Dict[str, Any]] = None, autogenerate: bool = True, ) -> None: @@ -346,6 +351,19 @@ def __init__( "the database for schema information" ) + # branch off from the "global" comparators. This collection + # is empty in Alembic except that it is populated by third party + # extensions that don't use the plugin system. so we will build + # off of whatever is in there. + if autogenerate: + self.comparators = compare.comparators.branch() + Plugin.populate_autogenerate_priority_dispatch( + self.comparators, + include_plugins=migration_context.opts.get( + "autogenerate_plugins", ["alembic.autogenerate.*"] + ), + ) + if opts is None: opts = migration_context.opts @@ -380,9 +398,8 @@ def __init__( self._name_filters = name_filters self.migration_context = migration_context - if self.migration_context is not None: - self.connection = self.migration_context.bind - self.dialect = self.migration_context.dialect + self.connection = self.migration_context.bind + self.dialect = self.migration_context.dialect self.imports = set() self.opts: Dict[str, Any] = opts diff --git a/libs/alembic/autogenerate/compare.py b/libs/alembic/autogenerate/compare.py deleted file mode 100644 index 0d98519643..0000000000 --- a/libs/alembic/autogenerate/compare.py +++ /dev/null @@ -1,1329 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -import contextlib -import logging -import re -from typing import Any -from typing import cast -from typing import Dict -from typing import Iterator -from typing import Mapping -from typing import Optional -from typing import Set -from typing import Tuple -from typing import TYPE_CHECKING -from typing import TypeVar -from typing import Union - -from sqlalchemy import event -from sqlalchemy import inspect -from sqlalchemy import schema as sa_schema -from sqlalchemy import text -from sqlalchemy import types as sqltypes -from sqlalchemy.sql import expression -from sqlalchemy.sql.schema import ForeignKeyConstraint -from sqlalchemy.sql.schema import Index -from sqlalchemy.sql.schema import UniqueConstraint -from sqlalchemy.util import OrderedSet - -from .. import util -from ..ddl._autogen import is_index_sig -from ..ddl._autogen import is_uq_sig -from ..operations import ops -from ..util import sqla_compat - -if TYPE_CHECKING: - from typing import Literal - - from sqlalchemy.engine.reflection import Inspector - from sqlalchemy.sql.elements import quoted_name - from sqlalchemy.sql.elements import TextClause - from sqlalchemy.sql.schema import Column - from sqlalchemy.sql.schema import Table - - from alembic.autogenerate.api import AutogenContext - from alembic.ddl.impl import DefaultImpl - from alembic.operations.ops import AlterColumnOp - from alembic.operations.ops import MigrationScript - from alembic.operations.ops import ModifyTableOps - from alembic.operations.ops import UpgradeOps - from ..ddl._autogen import _constraint_sig - - -log = logging.getLogger(__name__) - - -def _populate_migration_script( - autogen_context: AutogenContext, migration_script: MigrationScript -) -> None: - upgrade_ops = migration_script.upgrade_ops_list[-1] - downgrade_ops = migration_script.downgrade_ops_list[-1] - - _produce_net_changes(autogen_context, upgrade_ops) - upgrade_ops.reverse_into(downgrade_ops) - - -comparators = util.Dispatcher(uselist=True) - - -def _produce_net_changes( - autogen_context: AutogenContext, upgrade_ops: UpgradeOps -) -> None: - connection = autogen_context.connection - assert connection is not None - include_schemas = autogen_context.opts.get("include_schemas", False) - - inspector: Inspector = inspect(connection) - - default_schema = connection.dialect.default_schema_name - schemas: Set[Optional[str]] - if include_schemas: - schemas = set(inspector.get_schema_names()) - # replace default schema name with None - schemas.discard("information_schema") - # replace the "default" schema with None - schemas.discard(default_schema) - schemas.add(None) - else: - schemas = {None} - - schemas = { - s for s in schemas if autogen_context.run_name_filters(s, "schema", {}) - } - - assert autogen_context.dialect is not None - comparators.dispatch("schema", autogen_context.dialect.name)( - autogen_context, upgrade_ops, schemas - ) - - -@comparators.dispatch_for("schema") -def _autogen_for_tables( - autogen_context: AutogenContext, - upgrade_ops: UpgradeOps, - schemas: Union[Set[None], Set[Optional[str]]], -) -> None: - inspector = autogen_context.inspector - - conn_table_names: Set[Tuple[Optional[str], str]] = set() - - version_table_schema = ( - autogen_context.migration_context.version_table_schema - ) - version_table = autogen_context.migration_context.version_table - - for schema_name in schemas: - tables = set(inspector.get_table_names(schema=schema_name)) - if schema_name == version_table_schema: - tables = tables.difference( - [autogen_context.migration_context.version_table] - ) - - conn_table_names.update( - (schema_name, tname) - for tname in tables - if autogen_context.run_name_filters( - tname, "table", {"schema_name": schema_name} - ) - ) - - metadata_table_names = OrderedSet( - [(table.schema, table.name) for table in autogen_context.sorted_tables] - ).difference([(version_table_schema, version_table)]) - - _compare_tables( - conn_table_names, - metadata_table_names, - inspector, - upgrade_ops, - autogen_context, - ) - - -def _compare_tables( - conn_table_names: set, - metadata_table_names: set, - inspector: Inspector, - upgrade_ops: UpgradeOps, - autogen_context: AutogenContext, -) -> None: - default_schema = inspector.bind.dialect.default_schema_name - - # tables coming from the connection will not have "schema" - # set if it matches default_schema_name; so we need a list - # of table names from local metadata that also have "None" if schema - # == default_schema_name. Most setups will be like this anyway but - # some are not (see #170) - metadata_table_names_no_dflt_schema = OrderedSet( - [ - (schema if schema != default_schema else None, tname) - for schema, tname in metadata_table_names - ] - ) - - # to adjust for the MetaData collection storing the tables either - # as "schemaname.tablename" or just "tablename", create a new lookup - # which will match the "non-default-schema" keys to the Table object. - tname_to_table = { - no_dflt_schema: autogen_context.table_key_to_table[ - sa_schema._get_table_key(tname, schema) - ] - for no_dflt_schema, (schema, tname) in zip( - metadata_table_names_no_dflt_schema, metadata_table_names - ) - } - metadata_table_names = metadata_table_names_no_dflt_schema - - for s, tname in metadata_table_names.difference(conn_table_names): - name = "%s.%s" % (s, tname) if s else tname - metadata_table = tname_to_table[(s, tname)] - if autogen_context.run_object_filters( - metadata_table, tname, "table", False, None - ): - upgrade_ops.ops.append( - ops.CreateTableOp.from_table(metadata_table) - ) - log.info("Detected added table %r", name) - modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) - - comparators.dispatch("table")( - autogen_context, - modify_table_ops, - s, - tname, - None, - metadata_table, - ) - if not modify_table_ops.is_empty(): - upgrade_ops.ops.append(modify_table_ops) - - removal_metadata = sa_schema.MetaData() - for s, tname in conn_table_names.difference(metadata_table_names): - name = sa_schema._get_table_key(tname, s) - exists = name in removal_metadata.tables - t = sa_schema.Table(tname, removal_metadata, schema=s) - - if not exists: - event.listen( - t, - "column_reflect", - # fmt: off - autogen_context.migration_context.impl. - _compat_autogen_column_reflect - (inspector), - # fmt: on - ) - sqla_compat._reflect_table(inspector, t) - if autogen_context.run_object_filters(t, tname, "table", True, None): - modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) - - comparators.dispatch("table")( - autogen_context, modify_table_ops, s, tname, t, None - ) - if not modify_table_ops.is_empty(): - upgrade_ops.ops.append(modify_table_ops) - - upgrade_ops.ops.append(ops.DropTableOp.from_table(t)) - log.info("Detected removed table %r", name) - - existing_tables = conn_table_names.intersection(metadata_table_names) - - existing_metadata = sa_schema.MetaData() - conn_column_info = {} - for s, tname in existing_tables: - name = sa_schema._get_table_key(tname, s) - exists = name in existing_metadata.tables - t = sa_schema.Table(tname, existing_metadata, schema=s) - if not exists: - event.listen( - t, - "column_reflect", - # fmt: off - autogen_context.migration_context.impl. - _compat_autogen_column_reflect(inspector), - # fmt: on - ) - sqla_compat._reflect_table(inspector, t) - conn_column_info[(s, tname)] = t - - for s, tname in sorted(existing_tables, key=lambda x: (x[0] or "", x[1])): - s = s or None - name = "%s.%s" % (s, tname) if s else tname - metadata_table = tname_to_table[(s, tname)] - conn_table = existing_metadata.tables[name] - - if autogen_context.run_object_filters( - metadata_table, tname, "table", False, conn_table - ): - modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) - with _compare_columns( - s, - tname, - conn_table, - metadata_table, - modify_table_ops, - autogen_context, - inspector, - ): - comparators.dispatch("table")( - autogen_context, - modify_table_ops, - s, - tname, - conn_table, - metadata_table, - ) - - if not modify_table_ops.is_empty(): - upgrade_ops.ops.append(modify_table_ops) - - -_IndexColumnSortingOps: Mapping[str, Any] = util.immutabledict( - { - "asc": expression.asc, - "desc": expression.desc, - "nulls_first": expression.nullsfirst, - "nulls_last": expression.nullslast, - "nullsfirst": expression.nullsfirst, # 1_3 name - "nullslast": expression.nullslast, # 1_3 name - } -) - - -def _make_index( - impl: DefaultImpl, params: Dict[str, Any], conn_table: Table -) -> Optional[Index]: - exprs: list[Union[Column[Any], TextClause]] = [] - sorting = params.get("column_sorting") - - for num, col_name in enumerate(params["column_names"]): - item: Union[Column[Any], TextClause] - if col_name is None: - assert "expressions" in params - name = params["expressions"][num] - item = text(name) - else: - name = col_name - item = conn_table.c[col_name] - if sorting and name in sorting: - for operator in sorting[name]: - if operator in _IndexColumnSortingOps: - item = _IndexColumnSortingOps[operator](item) - exprs.append(item) - ix = sa_schema.Index( - params["name"], - *exprs, - unique=params["unique"], - _table=conn_table, - **impl.adjust_reflected_dialect_options(params, "index"), - ) - if "duplicates_constraint" in params: - ix.info["duplicates_constraint"] = params["duplicates_constraint"] - return ix - - -def _make_unique_constraint( - impl: DefaultImpl, params: Dict[str, Any], conn_table: Table -) -> UniqueConstraint: - uq = sa_schema.UniqueConstraint( - *[conn_table.c[cname] for cname in params["column_names"]], - name=params["name"], - **impl.adjust_reflected_dialect_options(params, "unique_constraint"), - ) - if "duplicates_index" in params: - uq.info["duplicates_index"] = params["duplicates_index"] - - return uq - - -def _make_foreign_key( - params: Dict[str, Any], conn_table: Table -) -> ForeignKeyConstraint: - tname = params["referred_table"] - if params["referred_schema"]: - tname = "%s.%s" % (params["referred_schema"], tname) - - options = params.get("options", {}) - - const = sa_schema.ForeignKeyConstraint( - [conn_table.c[cname] for cname in params["constrained_columns"]], - ["%s.%s" % (tname, n) for n in params["referred_columns"]], - onupdate=options.get("onupdate"), - ondelete=options.get("ondelete"), - deferrable=options.get("deferrable"), - initially=options.get("initially"), - name=params["name"], - ) - # needed by 0.7 - conn_table.append_constraint(const) - return const - - -@contextlib.contextmanager -def _compare_columns( - schema: Optional[str], - tname: Union[quoted_name, str], - conn_table: Table, - metadata_table: Table, - modify_table_ops: ModifyTableOps, - autogen_context: AutogenContext, - inspector: Inspector, -) -> Iterator[None]: - name = "%s.%s" % (schema, tname) if schema else tname - metadata_col_names = OrderedSet( - c.name for c in metadata_table.c if not c.system - ) - metadata_cols_by_name = { - c.name: c for c in metadata_table.c if not c.system - } - - conn_col_names = { - c.name: c - for c in conn_table.c - if autogen_context.run_name_filters( - c.name, "column", {"table_name": tname, "schema_name": schema} - ) - } - - for cname in metadata_col_names.difference(conn_col_names): - if autogen_context.run_object_filters( - metadata_cols_by_name[cname], cname, "column", False, None - ): - modify_table_ops.ops.append( - ops.AddColumnOp.from_column_and_tablename( - schema, tname, metadata_cols_by_name[cname] - ) - ) - log.info("Detected added column '%s.%s'", name, cname) - - for colname in metadata_col_names.intersection(conn_col_names): - metadata_col = metadata_cols_by_name[colname] - conn_col = conn_table.c[colname] - if not autogen_context.run_object_filters( - metadata_col, colname, "column", False, conn_col - ): - continue - alter_column_op = ops.AlterColumnOp(tname, colname, schema=schema) - - comparators.dispatch("column")( - autogen_context, - alter_column_op, - schema, - tname, - colname, - conn_col, - metadata_col, - ) - - if alter_column_op.has_changes(): - modify_table_ops.ops.append(alter_column_op) - - yield - - for cname in set(conn_col_names).difference(metadata_col_names): - if autogen_context.run_object_filters( - conn_table.c[cname], cname, "column", True, None - ): - modify_table_ops.ops.append( - ops.DropColumnOp.from_column_and_tablename( - schema, tname, conn_table.c[cname] - ) - ) - log.info("Detected removed column '%s.%s'", name, cname) - - -_C = TypeVar("_C", bound=Union[UniqueConstraint, ForeignKeyConstraint, Index]) - - -@comparators.dispatch_for("table") -def _compare_indexes_and_uniques( - autogen_context: AutogenContext, - modify_ops: ModifyTableOps, - schema: Optional[str], - tname: Union[quoted_name, str], - conn_table: Optional[Table], - metadata_table: Optional[Table], -) -> None: - inspector = autogen_context.inspector - is_create_table = conn_table is None - is_drop_table = metadata_table is None - impl = autogen_context.migration_context.impl - - # 1a. get raw indexes and unique constraints from metadata ... - if metadata_table is not None: - metadata_unique_constraints = { - uq - for uq in metadata_table.constraints - if isinstance(uq, sa_schema.UniqueConstraint) - } - metadata_indexes = set(metadata_table.indexes) - else: - metadata_unique_constraints = set() - metadata_indexes = set() - - conn_uniques = conn_indexes = frozenset() # type:ignore[var-annotated] - - supports_unique_constraints = False - - unique_constraints_duplicate_unique_indexes = False - - if conn_table is not None: - # 1b. ... and from connection, if the table exists - try: - conn_uniques = inspector.get_unique_constraints( # type:ignore[assignment] # noqa - tname, schema=schema - ) - supports_unique_constraints = True - except NotImplementedError: - pass - except TypeError: - # number of arguments is off for the base - # method in SQLAlchemy due to the cache decorator - # not being present - pass - else: - conn_uniques = [ # type:ignore[assignment] - uq - for uq in conn_uniques - if autogen_context.run_name_filters( - uq["name"], - "unique_constraint", - {"table_name": tname, "schema_name": schema}, - ) - ] - for uq in conn_uniques: - if uq.get("duplicates_index"): - unique_constraints_duplicate_unique_indexes = True - try: - conn_indexes = inspector.get_indexes( # type:ignore[assignment] - tname, schema=schema - ) - except NotImplementedError: - pass - else: - conn_indexes = [ # type:ignore[assignment] - ix - for ix in conn_indexes - if autogen_context.run_name_filters( - ix["name"], - "index", - {"table_name": tname, "schema_name": schema}, - ) - ] - - # 2. convert conn-level objects from raw inspector records - # into schema objects - if is_drop_table: - # for DROP TABLE uniques are inline, don't need them - conn_uniques = set() # type:ignore[assignment] - else: - conn_uniques = { # type:ignore[assignment] - _make_unique_constraint(impl, uq_def, conn_table) - for uq_def in conn_uniques - } - - conn_indexes = { # type:ignore[assignment] - index - for index in ( - _make_index(impl, ix, conn_table) for ix in conn_indexes - ) - if index is not None - } - - # 2a. if the dialect dupes unique indexes as unique constraints - # (mysql and oracle), correct for that - - if unique_constraints_duplicate_unique_indexes: - _correct_for_uq_duplicates_uix( - conn_uniques, - conn_indexes, - metadata_unique_constraints, - metadata_indexes, - autogen_context.dialect, - impl, - ) - - # 3. give the dialect a chance to omit indexes and constraints that - # we know are either added implicitly by the DB or that the DB - # can't accurately report on - impl.correct_for_autogen_constraints( - conn_uniques, # type: ignore[arg-type] - conn_indexes, # type: ignore[arg-type] - metadata_unique_constraints, - metadata_indexes, - ) - - # 4. organize the constraints into "signature" collections, the - # _constraint_sig() objects provide a consistent facade over both - # Index and UniqueConstraint so we can easily work with them - # interchangeably - metadata_unique_constraints_sig = { - impl._create_metadata_constraint_sig(uq) - for uq in metadata_unique_constraints - } - - metadata_indexes_sig = { - impl._create_metadata_constraint_sig(ix) for ix in metadata_indexes - } - - conn_unique_constraints = { - impl._create_reflected_constraint_sig(uq) for uq in conn_uniques - } - - conn_indexes_sig = { - impl._create_reflected_constraint_sig(ix) for ix in conn_indexes - } - - # 5. index things by name, for those objects that have names - metadata_names = { - cast(str, c.md_name_to_sql_name(autogen_context)): c - for c in metadata_unique_constraints_sig.union(metadata_indexes_sig) - if c.is_named - } - - conn_uniques_by_name: Dict[sqla_compat._ConstraintName, _constraint_sig] - conn_indexes_by_name: Dict[sqla_compat._ConstraintName, _constraint_sig] - - conn_uniques_by_name = {c.name: c for c in conn_unique_constraints} - conn_indexes_by_name = {c.name: c for c in conn_indexes_sig} - conn_names = { - c.name: c - for c in conn_unique_constraints.union(conn_indexes_sig) - if sqla_compat.constraint_name_string(c.name) - } - - doubled_constraints = { - name: (conn_uniques_by_name[name], conn_indexes_by_name[name]) - for name in set(conn_uniques_by_name).intersection( - conn_indexes_by_name - ) - } - - # 6. index things by "column signature", to help with unnamed unique - # constraints. - conn_uniques_by_sig = {uq.unnamed: uq for uq in conn_unique_constraints} - metadata_uniques_by_sig = { - uq.unnamed: uq for uq in metadata_unique_constraints_sig - } - unnamed_metadata_uniques = { - uq.unnamed: uq - for uq in metadata_unique_constraints_sig - if not sqla_compat._constraint_is_named( - uq.const, autogen_context.dialect - ) - } - - # assumptions: - # 1. a unique constraint or an index from the connection *always* - # has a name. - # 2. an index on the metadata side *always* has a name. - # 3. a unique constraint on the metadata side *might* have a name. - # 4. The backend may double up indexes as unique constraints and - # vice versa (e.g. MySQL, Postgresql) - - def obj_added(obj: _constraint_sig): - if is_index_sig(obj): - if autogen_context.run_object_filters( - obj.const, obj.name, "index", False, None - ): - modify_ops.ops.append(ops.CreateIndexOp.from_index(obj.const)) - log.info( - "Detected added index '%r' on '%s'", - obj.name, - obj.column_names, - ) - elif is_uq_sig(obj): - if not supports_unique_constraints: - # can't report unique indexes as added if we don't - # detect them - return - if is_create_table or is_drop_table: - # unique constraints are created inline with table defs - return - if autogen_context.run_object_filters( - obj.const, obj.name, "unique_constraint", False, None - ): - modify_ops.ops.append( - ops.AddConstraintOp.from_constraint(obj.const) - ) - log.info( - "Detected added unique constraint %r on '%s'", - obj.name, - obj.column_names, - ) - else: - assert False - - def obj_removed(obj: _constraint_sig): - if is_index_sig(obj): - if obj.is_unique and not supports_unique_constraints: - # many databases double up unique constraints - # as unique indexes. without that list we can't - # be sure what we're doing here - return - - if autogen_context.run_object_filters( - obj.const, obj.name, "index", True, None - ): - modify_ops.ops.append(ops.DropIndexOp.from_index(obj.const)) - log.info("Detected removed index %r on %r", obj.name, tname) - elif is_uq_sig(obj): - if is_create_table or is_drop_table: - # if the whole table is being dropped, we don't need to - # consider unique constraint separately - return - if autogen_context.run_object_filters( - obj.const, obj.name, "unique_constraint", True, None - ): - modify_ops.ops.append( - ops.DropConstraintOp.from_constraint(obj.const) - ) - log.info( - "Detected removed unique constraint %r on %r", - obj.name, - tname, - ) - else: - assert False - - def obj_changed( - old: _constraint_sig, - new: _constraint_sig, - msg: str, - ): - if is_index_sig(old): - assert is_index_sig(new) - - if autogen_context.run_object_filters( - new.const, new.name, "index", False, old.const - ): - log.info( - "Detected changed index %r on %r: %s", old.name, tname, msg - ) - modify_ops.ops.append(ops.DropIndexOp.from_index(old.const)) - modify_ops.ops.append(ops.CreateIndexOp.from_index(new.const)) - elif is_uq_sig(old): - assert is_uq_sig(new) - - if autogen_context.run_object_filters( - new.const, new.name, "unique_constraint", False, old.const - ): - log.info( - "Detected changed unique constraint %r on %r: %s", - old.name, - tname, - msg, - ) - modify_ops.ops.append( - ops.DropConstraintOp.from_constraint(old.const) - ) - modify_ops.ops.append( - ops.AddConstraintOp.from_constraint(new.const) - ) - else: - assert False - - for removed_name in sorted(set(conn_names).difference(metadata_names)): - conn_obj = conn_names[removed_name] - if ( - is_uq_sig(conn_obj) - and conn_obj.unnamed in unnamed_metadata_uniques - ): - continue - elif removed_name in doubled_constraints: - conn_uq, conn_idx = doubled_constraints[removed_name] - if ( - all( - conn_idx.unnamed != meta_idx.unnamed - for meta_idx in metadata_indexes_sig - ) - and conn_uq.unnamed not in metadata_uniques_by_sig - ): - obj_removed(conn_uq) - obj_removed(conn_idx) - else: - obj_removed(conn_obj) - - for existing_name in sorted(set(metadata_names).intersection(conn_names)): - metadata_obj = metadata_names[existing_name] - - if existing_name in doubled_constraints: - conn_uq, conn_idx = doubled_constraints[existing_name] - if is_index_sig(metadata_obj): - conn_obj = conn_idx - else: - conn_obj = conn_uq - else: - conn_obj = conn_names[existing_name] - - if type(conn_obj) != type(metadata_obj): - obj_removed(conn_obj) - obj_added(metadata_obj) - else: - comparison = metadata_obj.compare_to_reflected(conn_obj) - - if comparison.is_different: - # constraint are different - obj_changed(conn_obj, metadata_obj, comparison.message) - elif comparison.is_skip: - # constraint cannot be compared, skip them - thing = ( - "index" if is_index_sig(conn_obj) else "unique constraint" - ) - log.info( - "Cannot compare %s %r, assuming equal and skipping. %s", - thing, - conn_obj.name, - comparison.message, - ) - else: - # constraint are equal - assert comparison.is_equal - - for added_name in sorted(set(metadata_names).difference(conn_names)): - obj = metadata_names[added_name] - obj_added(obj) - - for uq_sig in unnamed_metadata_uniques: - if uq_sig not in conn_uniques_by_sig: - obj_added(unnamed_metadata_uniques[uq_sig]) - - -def _correct_for_uq_duplicates_uix( - conn_unique_constraints, - conn_indexes, - metadata_unique_constraints, - metadata_indexes, - dialect, - impl, -): - # dedupe unique indexes vs. constraints, since MySQL / Oracle - # doesn't really have unique constraints as a separate construct. - # but look in the metadata and try to maintain constructs - # that already seem to be defined one way or the other - # on that side. This logic was formerly local to MySQL dialect, - # generalized to Oracle and others. See #276 - - # resolve final rendered name for unique constraints defined in the - # metadata. this includes truncation of long names. naming convention - # names currently should already be set as cons.name, however leave this - # to the sqla_compat to decide. - metadata_cons_names = [ - (sqla_compat._get_constraint_final_name(cons, dialect), cons) - for cons in metadata_unique_constraints - ] - - metadata_uq_names = { - name for name, cons in metadata_cons_names if name is not None - } - - unnamed_metadata_uqs = { - impl._create_metadata_constraint_sig(cons).unnamed - for name, cons in metadata_cons_names - if name is None - } - - metadata_ix_names = { - sqla_compat._get_constraint_final_name(cons, dialect) - for cons in metadata_indexes - if cons.unique - } - - # for reflection side, names are in their final database form - # already since they're from the database - conn_ix_names = {cons.name: cons for cons in conn_indexes if cons.unique} - - uqs_dupe_indexes = { - cons.name: cons - for cons in conn_unique_constraints - if cons.info["duplicates_index"] - } - - for overlap in uqs_dupe_indexes: - if overlap not in metadata_uq_names: - if ( - impl._create_reflected_constraint_sig( - uqs_dupe_indexes[overlap] - ).unnamed - not in unnamed_metadata_uqs - ): - conn_unique_constraints.discard(uqs_dupe_indexes[overlap]) - elif overlap not in metadata_ix_names: - conn_indexes.discard(conn_ix_names[overlap]) - - -@comparators.dispatch_for("column") -def _compare_nullable( - autogen_context: AutogenContext, - alter_column_op: AlterColumnOp, - schema: Optional[str], - tname: Union[quoted_name, str], - cname: Union[quoted_name, str], - conn_col: Column[Any], - metadata_col: Column[Any], -) -> None: - metadata_col_nullable = metadata_col.nullable - conn_col_nullable = conn_col.nullable - alter_column_op.existing_nullable = conn_col_nullable - - if conn_col_nullable is not metadata_col_nullable: - if ( - sqla_compat._server_default_is_computed( - metadata_col.server_default, conn_col.server_default - ) - and sqla_compat._nullability_might_be_unset(metadata_col) - or ( - sqla_compat._server_default_is_identity( - metadata_col.server_default, conn_col.server_default - ) - ) - ): - log.info( - "Ignoring nullable change on identity column '%s.%s'", - tname, - cname, - ) - else: - alter_column_op.modify_nullable = metadata_col_nullable - log.info( - "Detected %s on column '%s.%s'", - "NULL" if metadata_col_nullable else "NOT NULL", - tname, - cname, - ) - - -@comparators.dispatch_for("column") -def _setup_autoincrement( - autogen_context: AutogenContext, - alter_column_op: AlterColumnOp, - schema: Optional[str], - tname: Union[quoted_name, str], - cname: quoted_name, - conn_col: Column[Any], - metadata_col: Column[Any], -) -> None: - if metadata_col.table._autoincrement_column is metadata_col: - alter_column_op.kw["autoincrement"] = True - elif metadata_col.autoincrement is True: - alter_column_op.kw["autoincrement"] = True - elif metadata_col.autoincrement is False: - alter_column_op.kw["autoincrement"] = False - - -@comparators.dispatch_for("column") -def _compare_type( - autogen_context: AutogenContext, - alter_column_op: AlterColumnOp, - schema: Optional[str], - tname: Union[quoted_name, str], - cname: Union[quoted_name, str], - conn_col: Column[Any], - metadata_col: Column[Any], -) -> None: - conn_type = conn_col.type - alter_column_op.existing_type = conn_type - metadata_type = metadata_col.type - if conn_type._type_affinity is sqltypes.NullType: - log.info( - "Couldn't determine database type " "for column '%s.%s'", - tname, - cname, - ) - return - if metadata_type._type_affinity is sqltypes.NullType: - log.info( - "Column '%s.%s' has no type within " "the model; can't compare", - tname, - cname, - ) - return - - isdiff = autogen_context.migration_context._compare_type( - conn_col, metadata_col - ) - - if isdiff: - alter_column_op.modify_type = metadata_type - log.info( - "Detected type change from %r to %r on '%s.%s'", - conn_type, - metadata_type, - tname, - cname, - ) - - -def _render_server_default_for_compare( - metadata_default: Optional[Any], autogen_context: AutogenContext -) -> Optional[str]: - if isinstance(metadata_default, sa_schema.DefaultClause): - if isinstance(metadata_default.arg, str): - metadata_default = metadata_default.arg - else: - metadata_default = str( - metadata_default.arg.compile( - dialect=autogen_context.dialect, - compile_kwargs={"literal_binds": True}, - ) - ) - if isinstance(metadata_default, str): - return metadata_default - else: - return None - - -def _normalize_computed_default(sqltext: str) -> str: - """we want to warn if a computed sql expression has changed. however - we don't want false positives and the warning is not that critical. - so filter out most forms of variability from the SQL text. - - """ - - return re.sub(r"[ \(\)'\"`\[\]\t\r\n]", "", sqltext).lower() - - -def _compare_computed_default( - autogen_context: AutogenContext, - alter_column_op: AlterColumnOp, - schema: Optional[str], - tname: str, - cname: str, - conn_col: Column[Any], - metadata_col: Column[Any], -) -> None: - rendered_metadata_default = str( - cast(sa_schema.Computed, metadata_col.server_default).sqltext.compile( - dialect=autogen_context.dialect, - compile_kwargs={"literal_binds": True}, - ) - ) - - # since we cannot change computed columns, we do only a crude comparison - # here where we try to eliminate syntactical differences in order to - # get a minimal comparison just to emit a warning. - - rendered_metadata_default = _normalize_computed_default( - rendered_metadata_default - ) - - if isinstance(conn_col.server_default, sa_schema.Computed): - rendered_conn_default = str( - conn_col.server_default.sqltext.compile( - dialect=autogen_context.dialect, - compile_kwargs={"literal_binds": True}, - ) - ) - if rendered_conn_default is None: - rendered_conn_default = "" - else: - rendered_conn_default = _normalize_computed_default( - rendered_conn_default - ) - else: - rendered_conn_default = "" - - if rendered_metadata_default != rendered_conn_default: - _warn_computed_not_supported(tname, cname) - - -def _warn_computed_not_supported(tname: str, cname: str) -> None: - util.warn("Computed default on %s.%s cannot be modified" % (tname, cname)) - - -def _compare_identity_default( - autogen_context, - alter_column_op, - schema, - tname, - cname, - conn_col, - metadata_col, -): - impl = autogen_context.migration_context.impl - diff, ignored_attr, is_alter = impl._compare_identity_default( - metadata_col.server_default, conn_col.server_default - ) - - return diff, is_alter - - -@comparators.dispatch_for("column") -def _compare_server_default( - autogen_context: AutogenContext, - alter_column_op: AlterColumnOp, - schema: Optional[str], - tname: Union[quoted_name, str], - cname: Union[quoted_name, str], - conn_col: Column[Any], - metadata_col: Column[Any], -) -> Optional[bool]: - metadata_default = metadata_col.server_default - conn_col_default = conn_col.server_default - if conn_col_default is None and metadata_default is None: - return False - - if sqla_compat._server_default_is_computed(metadata_default): - # return False in case of a computed column as the server - # default. Note that DDL for adding or removing "GENERATED AS" from - # an existing column is not currently known for any backend. - # Once SQLAlchemy can reflect "GENERATED" as the "computed" element, - # we would also want to ignore and/or warn for changes vs. the - # metadata (or support backend specific DDL if applicable). - if not sqla_compat.has_computed_reflection: - return False - - else: - return ( - _compare_computed_default( # type:ignore[func-returns-value] - autogen_context, - alter_column_op, - schema, - tname, - cname, - conn_col, - metadata_col, - ) - ) - if sqla_compat._server_default_is_computed(conn_col_default): - _warn_computed_not_supported(tname, cname) - return False - - if sqla_compat._server_default_is_identity( - metadata_default, conn_col_default - ): - alter_column_op.existing_server_default = conn_col_default - diff, is_alter = _compare_identity_default( - autogen_context, - alter_column_op, - schema, - tname, - cname, - conn_col, - metadata_col, - ) - if is_alter: - alter_column_op.modify_server_default = metadata_default - if diff: - log.info( - "Detected server default on column '%s.%s': " - "identity options attributes %s", - tname, - cname, - sorted(diff), - ) - else: - rendered_metadata_default = _render_server_default_for_compare( - metadata_default, autogen_context - ) - - rendered_conn_default = ( - cast(Any, conn_col_default).arg.text if conn_col_default else None - ) - - alter_column_op.existing_server_default = conn_col_default - - is_diff = autogen_context.migration_context._compare_server_default( - conn_col, - metadata_col, - rendered_metadata_default, - rendered_conn_default, - ) - if is_diff: - alter_column_op.modify_server_default = metadata_default - log.info("Detected server default on column '%s.%s'", tname, cname) - - return None - - -@comparators.dispatch_for("column") -def _compare_column_comment( - autogen_context: AutogenContext, - alter_column_op: AlterColumnOp, - schema: Optional[str], - tname: Union[quoted_name, str], - cname: quoted_name, - conn_col: Column[Any], - metadata_col: Column[Any], -) -> Optional[Literal[False]]: - assert autogen_context.dialect is not None - if not autogen_context.dialect.supports_comments: - return None - - metadata_comment = metadata_col.comment - conn_col_comment = conn_col.comment - if conn_col_comment is None and metadata_comment is None: - return False - - alter_column_op.existing_comment = conn_col_comment - - if conn_col_comment != metadata_comment: - alter_column_op.modify_comment = metadata_comment - log.info("Detected column comment '%s.%s'", tname, cname) - - return None - - -@comparators.dispatch_for("table") -def _compare_foreign_keys( - autogen_context: AutogenContext, - modify_table_ops: ModifyTableOps, - schema: Optional[str], - tname: Union[quoted_name, str], - conn_table: Table, - metadata_table: Table, -) -> None: - # if we're doing CREATE TABLE, all FKs are created - # inline within the table def - if conn_table is None or metadata_table is None: - return - - inspector = autogen_context.inspector - metadata_fks = { - fk - for fk in metadata_table.constraints - if isinstance(fk, sa_schema.ForeignKeyConstraint) - } - - conn_fks_list = [ - fk - for fk in inspector.get_foreign_keys(tname, schema=schema) - if autogen_context.run_name_filters( - fk["name"], - "foreign_key_constraint", - {"table_name": tname, "schema_name": schema}, - ) - ] - - conn_fks = { - _make_foreign_key(const, conn_table) # type: ignore[arg-type] - for const in conn_fks_list - } - - impl = autogen_context.migration_context.impl - - # give the dialect a chance to correct the FKs to match more - # closely - autogen_context.migration_context.impl.correct_for_autogen_foreignkeys( - conn_fks, metadata_fks - ) - - metadata_fks_sig = { - impl._create_metadata_constraint_sig(fk) for fk in metadata_fks - } - - conn_fks_sig = { - impl._create_reflected_constraint_sig(fk) for fk in conn_fks - } - - # check if reflected FKs include options, indicating the backend - # can reflect FK options - if conn_fks_list and "options" in conn_fks_list[0]: - conn_fks_by_sig = {c.unnamed: c for c in conn_fks_sig} - metadata_fks_by_sig = {c.unnamed: c for c in metadata_fks_sig} - else: - # otherwise compare by sig without options added - conn_fks_by_sig = {c.unnamed_no_options: c for c in conn_fks_sig} - metadata_fks_by_sig = { - c.unnamed_no_options: c for c in metadata_fks_sig - } - - metadata_fks_by_name = { - c.name: c for c in metadata_fks_sig if c.name is not None - } - conn_fks_by_name = {c.name: c for c in conn_fks_sig if c.name is not None} - - def _add_fk(obj, compare_to): - if autogen_context.run_object_filters( - obj.const, obj.name, "foreign_key_constraint", False, compare_to - ): - modify_table_ops.ops.append( - ops.CreateForeignKeyOp.from_constraint(const.const) # type: ignore[has-type] # noqa: E501 - ) - - log.info( - "Detected added foreign key (%s)(%s) on table %s%s", - ", ".join(obj.source_columns), - ", ".join(obj.target_columns), - "%s." % obj.source_schema if obj.source_schema else "", - obj.source_table, - ) - - def _remove_fk(obj, compare_to): - if autogen_context.run_object_filters( - obj.const, obj.name, "foreign_key_constraint", True, compare_to - ): - modify_table_ops.ops.append( - ops.DropConstraintOp.from_constraint(obj.const) - ) - log.info( - "Detected removed foreign key (%s)(%s) on table %s%s", - ", ".join(obj.source_columns), - ", ".join(obj.target_columns), - "%s." % obj.source_schema if obj.source_schema else "", - obj.source_table, - ) - - # so far it appears we don't need to do this by name at all. - # SQLite doesn't preserve constraint names anyway - - for removed_sig in set(conn_fks_by_sig).difference(metadata_fks_by_sig): - const = conn_fks_by_sig[removed_sig] - if removed_sig not in metadata_fks_by_sig: - compare_to = ( - metadata_fks_by_name[const.name].const - if const.name in metadata_fks_by_name - else None - ) - _remove_fk(const, compare_to) - - for added_sig in set(metadata_fks_by_sig).difference(conn_fks_by_sig): - const = metadata_fks_by_sig[added_sig] - if added_sig not in conn_fks_by_sig: - compare_to = ( - conn_fks_by_name[const.name].const - if const.name in conn_fks_by_name - else None - ) - _add_fk(const, compare_to) - - -@comparators.dispatch_for("table") -def _compare_table_comment( - autogen_context: AutogenContext, - modify_table_ops: ModifyTableOps, - schema: Optional[str], - tname: Union[quoted_name, str], - conn_table: Optional[Table], - metadata_table: Optional[Table], -) -> None: - assert autogen_context.dialect is not None - if not autogen_context.dialect.supports_comments: - return - - # if we're doing CREATE TABLE, comments will be created inline - # with the create_table op. - if conn_table is None or metadata_table is None: - return - - if conn_table.comment is None and metadata_table.comment is None: - return - - if metadata_table.comment is None and conn_table.comment is not None: - modify_table_ops.ops.append( - ops.DropTableCommentOp( - tname, existing_comment=conn_table.comment, schema=schema - ) - ) - elif metadata_table.comment != conn_table.comment: - modify_table_ops.ops.append( - ops.CreateTableCommentOp( - tname, - metadata_table.comment, - existing_comment=conn_table.comment, - schema=schema, - ) - ) diff --git a/libs/alembic/autogenerate/compare/__init__.py b/libs/alembic/autogenerate/compare/__init__.py new file mode 100644 index 0000000000..a49640cf86 --- /dev/null +++ b/libs/alembic/autogenerate/compare/__init__.py @@ -0,0 +1,62 @@ +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING + +from . import comments +from . import constraints +from . import schema +from . import server_defaults +from . import tables +from . import types +from ... import util +from ...runtime.plugins import Plugin + +if TYPE_CHECKING: + from ..api import AutogenContext + from ...operations.ops import MigrationScript + from ...operations.ops import UpgradeOps + + +log = logging.getLogger(__name__) + +comparators = util.PriorityDispatcher() +"""global registry which alembic keeps empty, but copies when creating +a new AutogenContext. + +This is to support a variety of third party plugins that hook their autogen +functionality onto this collection. + +""" + + +def _populate_migration_script( + autogen_context: AutogenContext, migration_script: MigrationScript +) -> None: + upgrade_ops = migration_script.upgrade_ops_list[-1] + downgrade_ops = migration_script.downgrade_ops_list[-1] + + _produce_net_changes(autogen_context, upgrade_ops) + upgrade_ops.reverse_into(downgrade_ops) + + +def _produce_net_changes( + autogen_context: AutogenContext, upgrade_ops: UpgradeOps +) -> None: + assert autogen_context.dialect is not None + + autogen_context.comparators.dispatch( + "autogenerate", qualifier=autogen_context.dialect.name + )(autogen_context, upgrade_ops) + + +Plugin.setup_plugin_from_module(schema, "alembic.autogenerate.schemas") +Plugin.setup_plugin_from_module(tables, "alembic.autogenerate.tables") +Plugin.setup_plugin_from_module(types, "alembic.autogenerate.types") +Plugin.setup_plugin_from_module( + constraints, "alembic.autogenerate.constraints" +) +Plugin.setup_plugin_from_module( + server_defaults, "alembic.autogenerate.defaults" +) +Plugin.setup_plugin_from_module(comments, "alembic.autogenerate.comments") diff --git a/libs/alembic/autogenerate/compare/comments.py b/libs/alembic/autogenerate/compare/comments.py new file mode 100644 index 0000000000..70de74e2d7 --- /dev/null +++ b/libs/alembic/autogenerate/compare/comments.py @@ -0,0 +1,106 @@ +from __future__ import annotations + +import logging +from typing import Any +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union + +from ...operations import ops +from ...util import PriorityDispatchResult + +if TYPE_CHECKING: + + from sqlalchemy.sql.elements import quoted_name + from sqlalchemy.sql.schema import Column + from sqlalchemy.sql.schema import Table + + from ..api import AutogenContext + from ...operations.ops import AlterColumnOp + from ...operations.ops import ModifyTableOps + from ...runtime.plugins import Plugin + +log = logging.getLogger(__name__) + + +def _compare_column_comment( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: Union[quoted_name, str], + cname: quoted_name, + conn_col: Column[Any], + metadata_col: Column[Any], +) -> PriorityDispatchResult: + assert autogen_context.dialect is not None + if not autogen_context.dialect.supports_comments: + return PriorityDispatchResult.CONTINUE + + metadata_comment = metadata_col.comment + conn_col_comment = conn_col.comment + if conn_col_comment is None and metadata_comment is None: + return PriorityDispatchResult.CONTINUE + + alter_column_op.existing_comment = conn_col_comment + + if conn_col_comment != metadata_comment: + alter_column_op.modify_comment = metadata_comment + log.info("Detected column comment '%s.%s'", tname, cname) + + return PriorityDispatchResult.STOP + else: + return PriorityDispatchResult.CONTINUE + + +def _compare_table_comment( + autogen_context: AutogenContext, + modify_table_ops: ModifyTableOps, + schema: Optional[str], + tname: Union[quoted_name, str], + conn_table: Optional[Table], + metadata_table: Optional[Table], +) -> PriorityDispatchResult: + assert autogen_context.dialect is not None + if not autogen_context.dialect.supports_comments: + return PriorityDispatchResult.CONTINUE + + # if we're doing CREATE TABLE, comments will be created inline + # with the create_table op. + if conn_table is None or metadata_table is None: + return PriorityDispatchResult.CONTINUE + + if conn_table.comment is None and metadata_table.comment is None: + return PriorityDispatchResult.CONTINUE + + if metadata_table.comment is None and conn_table.comment is not None: + modify_table_ops.ops.append( + ops.DropTableCommentOp( + tname, existing_comment=conn_table.comment, schema=schema + ) + ) + return PriorityDispatchResult.STOP + elif metadata_table.comment != conn_table.comment: + modify_table_ops.ops.append( + ops.CreateTableCommentOp( + tname, + metadata_table.comment, + existing_comment=conn_table.comment, + schema=schema, + ) + ) + return PriorityDispatchResult.STOP + + return PriorityDispatchResult.CONTINUE + + +def setup(plugin: Plugin) -> None: + plugin.add_autogenerate_comparator( + _compare_column_comment, + "column", + "comments", + ) + plugin.add_autogenerate_comparator( + _compare_table_comment, + "table", + "comments", + ) diff --git a/libs/alembic/autogenerate/compare/constraints.py b/libs/alembic/autogenerate/compare/constraints.py new file mode 100644 index 0000000000..ae1f20e4b1 --- /dev/null +++ b/libs/alembic/autogenerate/compare/constraints.py @@ -0,0 +1,812 @@ +# mypy: allow-untyped-defs, allow-untyped-calls, allow-incomplete-defs + +from __future__ import annotations + +import logging +from typing import Any +from typing import cast +from typing import Collection +from typing import Dict +from typing import Mapping +from typing import Optional +from typing import TYPE_CHECKING +from typing import TypeVar +from typing import Union + +from sqlalchemy import schema as sa_schema +from sqlalchemy import text +from sqlalchemy.sql import expression +from sqlalchemy.sql.schema import ForeignKeyConstraint +from sqlalchemy.sql.schema import Index +from sqlalchemy.sql.schema import UniqueConstraint + +from .util import _InspectorConv +from ... import util +from ...ddl._autogen import is_index_sig +from ...ddl._autogen import is_uq_sig +from ...operations import ops +from ...util import PriorityDispatchResult +from ...util import sqla_compat + +if TYPE_CHECKING: + from sqlalchemy.engine.interfaces import ReflectedForeignKeyConstraint + from sqlalchemy.engine.interfaces import ReflectedIndex + from sqlalchemy.engine.interfaces import ReflectedUniqueConstraint + from sqlalchemy.sql.elements import quoted_name + from sqlalchemy.sql.elements import TextClause + from sqlalchemy.sql.schema import Column + from sqlalchemy.sql.schema import Table + + from ...autogenerate.api import AutogenContext + from ...ddl._autogen import _constraint_sig + from ...ddl.impl import DefaultImpl + from ...operations.ops import AlterColumnOp + from ...operations.ops import ModifyTableOps + from ...runtime.plugins import Plugin + +_C = TypeVar("_C", bound=Union[UniqueConstraint, ForeignKeyConstraint, Index]) + + +log = logging.getLogger(__name__) + + +def _compare_indexes_and_uniques( + autogen_context: AutogenContext, + modify_ops: ModifyTableOps, + schema: Optional[str], + tname: Union[quoted_name, str], + conn_table: Optional[Table], + metadata_table: Optional[Table], +) -> PriorityDispatchResult: + inspector = autogen_context.inspector + is_create_table = conn_table is None + is_drop_table = metadata_table is None + impl = autogen_context.migration_context.impl + + # 1a. get raw indexes and unique constraints from metadata ... + if metadata_table is not None: + metadata_unique_constraints = { + uq + for uq in metadata_table.constraints + if isinstance(uq, sa_schema.UniqueConstraint) + } + metadata_indexes = set(metadata_table.indexes) + else: + metadata_unique_constraints = set() + metadata_indexes = set() + + conn_uniques: Collection[UniqueConstraint] = frozenset() + conn_indexes: Collection[Index] = frozenset() + + supports_unique_constraints = False + + unique_constraints_duplicate_unique_indexes = False + + if conn_table is not None: + conn_uniques_reflected: Collection[ReflectedUniqueConstraint] = ( + frozenset() + ) + conn_indexes_reflected: Collection[ReflectedIndex] = frozenset() + + # 1b. ... and from connection, if the table exists + try: + conn_uniques_reflected = _InspectorConv( + inspector + ).get_unique_constraints(tname, schema=schema) + + supports_unique_constraints = True + except NotImplementedError: + pass + except TypeError: + # number of arguments is off for the base + # method in SQLAlchemy due to the cache decorator + # not being present + pass + else: + conn_uniques_reflected = [ + uq + for uq in conn_uniques_reflected + if autogen_context.run_name_filters( + uq["name"], + "unique_constraint", + {"table_name": tname, "schema_name": schema}, + ) + ] + for uq in conn_uniques_reflected: + if uq.get("duplicates_index"): + unique_constraints_duplicate_unique_indexes = True + try: + conn_indexes_reflected = _InspectorConv(inspector).get_indexes( + tname, schema=schema + ) + except NotImplementedError: + pass + else: + conn_indexes_reflected = [ + ix + for ix in conn_indexes_reflected + if autogen_context.run_name_filters( + ix["name"], + "index", + {"table_name": tname, "schema_name": schema}, + ) + ] + + # 2. convert conn-level objects from raw inspector records + # into schema objects + if is_drop_table: + # for DROP TABLE uniques are inline, don't need them + conn_uniques = set() + else: + conn_uniques = { + _make_unique_constraint(impl, uq_def, conn_table) + for uq_def in conn_uniques_reflected + } + + conn_indexes = { + index + for index in ( + _make_index(impl, ix, conn_table) + for ix in conn_indexes_reflected + ) + if index is not None + } + + # 2a. if the dialect dupes unique indexes as unique constraints + # (mysql and oracle), correct for that + + if unique_constraints_duplicate_unique_indexes: + _correct_for_uq_duplicates_uix( + conn_uniques, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + autogen_context.dialect, + impl, + ) + + # 3. give the dialect a chance to omit indexes and constraints that + # we know are either added implicitly by the DB or that the DB + # can't accurately report on + impl.correct_for_autogen_constraints( + conn_uniques, # type: ignore[arg-type] + conn_indexes, # type: ignore[arg-type] + metadata_unique_constraints, + metadata_indexes, + ) + + # 4. organize the constraints into "signature" collections, the + # _constraint_sig() objects provide a consistent facade over both + # Index and UniqueConstraint so we can easily work with them + # interchangeably + metadata_unique_constraints_sig = { + impl._create_metadata_constraint_sig(uq) + for uq in metadata_unique_constraints + } + + metadata_indexes_sig = { + impl._create_metadata_constraint_sig(ix) for ix in metadata_indexes + } + + conn_unique_constraints = { + impl._create_reflected_constraint_sig(uq) for uq in conn_uniques + } + + conn_indexes_sig = { + impl._create_reflected_constraint_sig(ix) for ix in conn_indexes + } + + # 5. index things by name, for those objects that have names + metadata_names = { + cast(str, c.md_name_to_sql_name(autogen_context)): c + for c in metadata_unique_constraints_sig.union(metadata_indexes_sig) + if c.is_named + } + + conn_uniques_by_name: Dict[ + sqla_compat._ConstraintName, + _constraint_sig[sa_schema.UniqueConstraint], + ] + conn_indexes_by_name: Dict[ + sqla_compat._ConstraintName, _constraint_sig[sa_schema.Index] + ] + + conn_uniques_by_name = {c.name: c for c in conn_unique_constraints} + conn_indexes_by_name = {c.name: c for c in conn_indexes_sig} + conn_names = { + c.name: c + for c in conn_unique_constraints.union(conn_indexes_sig) + if sqla_compat.constraint_name_string(c.name) + } + + doubled_constraints = { + name: (conn_uniques_by_name[name], conn_indexes_by_name[name]) + for name in set(conn_uniques_by_name).intersection( + conn_indexes_by_name + ) + } + + # 6. index things by "column signature", to help with unnamed unique + # constraints. + conn_uniques_by_sig = {uq.unnamed: uq for uq in conn_unique_constraints} + metadata_uniques_by_sig = { + uq.unnamed: uq for uq in metadata_unique_constraints_sig + } + unnamed_metadata_uniques = { + uq.unnamed: uq + for uq in metadata_unique_constraints_sig + if not sqla_compat._constraint_is_named( + uq.const, autogen_context.dialect + ) + } + + # assumptions: + # 1. a unique constraint or an index from the connection *always* + # has a name. + # 2. an index on the metadata side *always* has a name. + # 3. a unique constraint on the metadata side *might* have a name. + # 4. The backend may double up indexes as unique constraints and + # vice versa (e.g. MySQL, Postgresql) + + def obj_added( + obj: ( + _constraint_sig[sa_schema.UniqueConstraint] + | _constraint_sig[sa_schema.Index] + ), + ): + if is_index_sig(obj): + if autogen_context.run_object_filters( + obj.const, obj.name, "index", False, None + ): + modify_ops.ops.append(ops.CreateIndexOp.from_index(obj.const)) + log.info( + "Detected added index %r on '%s'", + obj.name, + obj.column_names, + ) + elif is_uq_sig(obj): + if not supports_unique_constraints: + # can't report unique indexes as added if we don't + # detect them + return + if is_create_table or is_drop_table: + # unique constraints are created inline with table defs + return + if autogen_context.run_object_filters( + obj.const, obj.name, "unique_constraint", False, None + ): + modify_ops.ops.append( + ops.AddConstraintOp.from_constraint(obj.const) + ) + log.info( + "Detected added unique constraint %r on '%s'", + obj.name, + obj.column_names, + ) + else: + assert False + + def obj_removed( + obj: ( + _constraint_sig[sa_schema.UniqueConstraint] + | _constraint_sig[sa_schema.Index] + ), + ): + if is_index_sig(obj): + if obj.is_unique and not supports_unique_constraints: + # many databases double up unique constraints + # as unique indexes. without that list we can't + # be sure what we're doing here + return + + if autogen_context.run_object_filters( + obj.const, obj.name, "index", True, None + ): + modify_ops.ops.append(ops.DropIndexOp.from_index(obj.const)) + log.info("Detected removed index %r on %r", obj.name, tname) + elif is_uq_sig(obj): + if is_create_table or is_drop_table: + # if the whole table is being dropped, we don't need to + # consider unique constraint separately + return + if autogen_context.run_object_filters( + obj.const, obj.name, "unique_constraint", True, None + ): + modify_ops.ops.append( + ops.DropConstraintOp.from_constraint(obj.const) + ) + log.info( + "Detected removed unique constraint %r on %r", + obj.name, + tname, + ) + else: + assert False + + def obj_changed( + old: ( + _constraint_sig[sa_schema.UniqueConstraint] + | _constraint_sig[sa_schema.Index] + ), + new: ( + _constraint_sig[sa_schema.UniqueConstraint] + | _constraint_sig[sa_schema.Index] + ), + msg: str, + ): + if is_index_sig(old): + assert is_index_sig(new) + + if autogen_context.run_object_filters( + new.const, new.name, "index", False, old.const + ): + log.info( + "Detected changed index %r on %r: %s", old.name, tname, msg + ) + modify_ops.ops.append(ops.DropIndexOp.from_index(old.const)) + modify_ops.ops.append(ops.CreateIndexOp.from_index(new.const)) + elif is_uq_sig(old): + assert is_uq_sig(new) + + if autogen_context.run_object_filters( + new.const, new.name, "unique_constraint", False, old.const + ): + log.info( + "Detected changed unique constraint %r on %r: %s", + old.name, + tname, + msg, + ) + modify_ops.ops.append( + ops.DropConstraintOp.from_constraint(old.const) + ) + modify_ops.ops.append( + ops.AddConstraintOp.from_constraint(new.const) + ) + else: + assert False + + for removed_name in sorted(set(conn_names).difference(metadata_names)): + conn_obj = conn_names[removed_name] + if ( + is_uq_sig(conn_obj) + and conn_obj.unnamed in unnamed_metadata_uniques + ): + continue + elif removed_name in doubled_constraints: + conn_uq, conn_idx = doubled_constraints[removed_name] + if ( + all( + conn_idx.unnamed != meta_idx.unnamed + for meta_idx in metadata_indexes_sig + ) + and conn_uq.unnamed not in metadata_uniques_by_sig + ): + obj_removed(conn_uq) + obj_removed(conn_idx) + else: + obj_removed(conn_obj) + + for existing_name in sorted(set(metadata_names).intersection(conn_names)): + metadata_obj = metadata_names[existing_name] + + if existing_name in doubled_constraints: + conn_uq, conn_idx = doubled_constraints[existing_name] + if is_index_sig(metadata_obj): + conn_obj = conn_idx + else: + conn_obj = conn_uq + else: + conn_obj = conn_names[existing_name] + + if type(conn_obj) != type(metadata_obj): + obj_removed(conn_obj) + obj_added(metadata_obj) + else: + # TODO: for plugins, let's do is_index_sig / is_uq_sig + # here so we know index or unique, then + # do a sub-dispatch, + # autogen_context.comparators.dispatch("index") + # or + # autogen_context.comparators.dispatch("unique_constraint") + # + comparison = metadata_obj.compare_to_reflected(conn_obj) + + if comparison.is_different: + # constraint are different + obj_changed(conn_obj, metadata_obj, comparison.message) + elif comparison.is_skip: + # constraint cannot be compared, skip them + thing = ( + "index" if is_index_sig(conn_obj) else "unique constraint" + ) + log.info( + "Cannot compare %s %r, assuming equal and skipping. %s", + thing, + conn_obj.name, + comparison.message, + ) + else: + # constraint are equal + assert comparison.is_equal + + for added_name in sorted(set(metadata_names).difference(conn_names)): + obj = metadata_names[added_name] + obj_added(obj) + + for uq_sig in unnamed_metadata_uniques: + if uq_sig not in conn_uniques_by_sig: + obj_added(unnamed_metadata_uniques[uq_sig]) + + return PriorityDispatchResult.CONTINUE + + +def _correct_for_uq_duplicates_uix( + conn_unique_constraints, + conn_indexes, + metadata_unique_constraints, + metadata_indexes, + dialect, + impl, +): + # dedupe unique indexes vs. constraints, since MySQL / Oracle + # doesn't really have unique constraints as a separate construct. + # but look in the metadata and try to maintain constructs + # that already seem to be defined one way or the other + # on that side. This logic was formerly local to MySQL dialect, + # generalized to Oracle and others. See #276 + + # resolve final rendered name for unique constraints defined in the + # metadata. this includes truncation of long names. naming convention + # names currently should already be set as cons.name, however leave this + # to the sqla_compat to decide. + metadata_cons_names = [ + (sqla_compat._get_constraint_final_name(cons, dialect), cons) + for cons in metadata_unique_constraints + ] + + metadata_uq_names = { + name for name, cons in metadata_cons_names if name is not None + } + + unnamed_metadata_uqs = { + impl._create_metadata_constraint_sig(cons).unnamed + for name, cons in metadata_cons_names + if name is None + } + + metadata_ix_names = { + sqla_compat._get_constraint_final_name(cons, dialect) + for cons in metadata_indexes + if cons.unique + } + + # for reflection side, names are in their final database form + # already since they're from the database + conn_ix_names = {cons.name: cons for cons in conn_indexes if cons.unique} + + uqs_dupe_indexes = { + cons.name: cons + for cons in conn_unique_constraints + if cons.info["duplicates_index"] + } + + for overlap in uqs_dupe_indexes: + if overlap not in metadata_uq_names: + if ( + impl._create_reflected_constraint_sig( + uqs_dupe_indexes[overlap] + ).unnamed + not in unnamed_metadata_uqs + ): + conn_unique_constraints.discard(uqs_dupe_indexes[overlap]) + elif overlap not in metadata_ix_names: + conn_indexes.discard(conn_ix_names[overlap]) + + +_IndexColumnSortingOps: Mapping[str, Any] = util.immutabledict( + { + "asc": expression.asc, + "desc": expression.desc, + "nulls_first": expression.nullsfirst, + "nulls_last": expression.nullslast, + "nullsfirst": expression.nullsfirst, # 1_3 name + "nullslast": expression.nullslast, # 1_3 name + } +) + + +def _make_index( + impl: DefaultImpl, params: ReflectedIndex, conn_table: Table +) -> Optional[Index]: + exprs: list[Union[Column[Any], TextClause]] = [] + sorting = params.get("column_sorting") + + for num, col_name in enumerate(params["column_names"]): + item: Union[Column[Any], TextClause] + if col_name is None: + assert "expressions" in params + name = params["expressions"][num] + item = text(name) + else: + name = col_name + item = conn_table.c[col_name] + if sorting and name in sorting: + for operator in sorting[name]: + if operator in _IndexColumnSortingOps: + item = _IndexColumnSortingOps[operator](item) + exprs.append(item) + ix = sa_schema.Index( + params["name"], + *exprs, + unique=params["unique"], + _table=conn_table, + **impl.adjust_reflected_dialect_options(params, "index"), + ) + if "duplicates_constraint" in params: + ix.info["duplicates_constraint"] = params["duplicates_constraint"] + return ix + + +def _make_unique_constraint( + impl: DefaultImpl, params: ReflectedUniqueConstraint, conn_table: Table +) -> UniqueConstraint: + uq = sa_schema.UniqueConstraint( + *[conn_table.c[cname] for cname in params["column_names"]], + name=params["name"], + **impl.adjust_reflected_dialect_options(params, "unique_constraint"), + ) + if "duplicates_index" in params: + uq.info["duplicates_index"] = params["duplicates_index"] + + return uq + + +def _make_foreign_key( + params: ReflectedForeignKeyConstraint, conn_table: Table +) -> ForeignKeyConstraint: + tname = params["referred_table"] + if params["referred_schema"]: + tname = "%s.%s" % (params["referred_schema"], tname) + + options = params.get("options", {}) + + const = sa_schema.ForeignKeyConstraint( + [conn_table.c[cname] for cname in params["constrained_columns"]], + ["%s.%s" % (tname, n) for n in params["referred_columns"]], + onupdate=options.get("onupdate"), + ondelete=options.get("ondelete"), + deferrable=options.get("deferrable"), + initially=options.get("initially"), + name=params["name"], + ) + + referred_schema = params["referred_schema"] + referred_table = params["referred_table"] + + remote_table_key = sqla_compat._get_table_key( + referred_table, referred_schema + ) + if remote_table_key not in conn_table.metadata: + # create a placeholder table + sa_schema.Table( + referred_table, + conn_table.metadata, + schema=( + referred_schema + if referred_schema is not None + else sa_schema.BLANK_SCHEMA + ), + *[ + sa_schema.Column(remote, conn_table.c[local].type) + for local, remote in zip( + params["constrained_columns"], params["referred_columns"] + ) + ], + info={"alembic_placeholder": True}, + ) + elif conn_table.metadata.tables[remote_table_key].info.get( + "alembic_placeholder" + ): + # table exists and is a placeholder; ensure needed columns are present + placeholder_table = conn_table.metadata.tables[remote_table_key] + for local, remote in zip( + params["constrained_columns"], params["referred_columns"] + ): + if remote not in placeholder_table.c: + placeholder_table.append_column( + sa_schema.Column(remote, conn_table.c[local].type) + ) + + # needed by 0.7 + conn_table.append_constraint(const) + return const + + +def _compare_foreign_keys( + autogen_context: AutogenContext, + modify_table_ops: ModifyTableOps, + schema: Optional[str], + tname: Union[quoted_name, str], + conn_table: Table, + metadata_table: Table, +) -> PriorityDispatchResult: + # if we're doing CREATE TABLE, all FKs are created + # inline within the table def + + if conn_table is None or metadata_table is None: + return PriorityDispatchResult.CONTINUE + + inspector = autogen_context.inspector + metadata_fks = { + fk + for fk in metadata_table.constraints + if isinstance(fk, sa_schema.ForeignKeyConstraint) + } + + conn_fks_list = [ + fk + for fk in _InspectorConv(inspector).get_foreign_keys( + tname, schema=schema + ) + if autogen_context.run_name_filters( + fk["name"], + "foreign_key_constraint", + {"table_name": tname, "schema_name": schema}, + ) + ] + + conn_fks = { + _make_foreign_key(const, conn_table) for const in conn_fks_list + } + + impl = autogen_context.migration_context.impl + + # give the dialect a chance to correct the FKs to match more + # closely + autogen_context.migration_context.impl.correct_for_autogen_foreignkeys( + conn_fks, metadata_fks + ) + + metadata_fks_sig = { + impl._create_metadata_constraint_sig(fk) for fk in metadata_fks + } + + conn_fks_sig = { + impl._create_reflected_constraint_sig(fk) for fk in conn_fks + } + + # check if reflected FKs include options, indicating the backend + # can reflect FK options + if conn_fks_list and "options" in conn_fks_list[0]: + conn_fks_by_sig = {c.unnamed: c for c in conn_fks_sig} + metadata_fks_by_sig = {c.unnamed: c for c in metadata_fks_sig} + else: + # otherwise compare by sig without options added + conn_fks_by_sig = {c.unnamed_no_options: c for c in conn_fks_sig} + metadata_fks_by_sig = { + c.unnamed_no_options: c for c in metadata_fks_sig + } + + metadata_fks_by_name = { + c.name: c for c in metadata_fks_sig if c.name is not None + } + conn_fks_by_name = {c.name: c for c in conn_fks_sig if c.name is not None} + + def _add_fk(obj, compare_to): + if autogen_context.run_object_filters( + obj.const, obj.name, "foreign_key_constraint", False, compare_to + ): + modify_table_ops.ops.append( + ops.CreateForeignKeyOp.from_constraint(const.const) + ) + + log.info( + "Detected added foreign key (%s)(%s) on table %s%s", + ", ".join(obj.source_columns), + ", ".join(obj.target_columns), + "%s." % obj.source_schema if obj.source_schema else "", + obj.source_table, + ) + + def _remove_fk(obj, compare_to): + if autogen_context.run_object_filters( + obj.const, obj.name, "foreign_key_constraint", True, compare_to + ): + modify_table_ops.ops.append( + ops.DropConstraintOp.from_constraint(obj.const) + ) + log.info( + "Detected removed foreign key (%s)(%s) on table %s%s", + ", ".join(obj.source_columns), + ", ".join(obj.target_columns), + "%s." % obj.source_schema if obj.source_schema else "", + obj.source_table, + ) + + # so far it appears we don't need to do this by name at all. + # SQLite doesn't preserve constraint names anyway + + for removed_sig in set(conn_fks_by_sig).difference(metadata_fks_by_sig): + const = conn_fks_by_sig[removed_sig] + if removed_sig not in metadata_fks_by_sig: + compare_to = ( + metadata_fks_by_name[const.name].const + if const.name and const.name in metadata_fks_by_name + else None + ) + _remove_fk(const, compare_to) + + for added_sig in set(metadata_fks_by_sig).difference(conn_fks_by_sig): + const = metadata_fks_by_sig[added_sig] + if added_sig not in conn_fks_by_sig: + compare_to = ( + conn_fks_by_name[const.name].const + if const.name and const.name in conn_fks_by_name + else None + ) + _add_fk(const, compare_to) + + return PriorityDispatchResult.CONTINUE + + +def _compare_nullable( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: Union[quoted_name, str], + cname: Union[quoted_name, str], + conn_col: Column[Any], + metadata_col: Column[Any], +) -> PriorityDispatchResult: + metadata_col_nullable = metadata_col.nullable + conn_col_nullable = conn_col.nullable + alter_column_op.existing_nullable = conn_col_nullable + + if conn_col_nullable is not metadata_col_nullable: + if ( + sqla_compat._server_default_is_computed( + metadata_col.server_default, conn_col.server_default + ) + and sqla_compat._nullability_might_be_unset(metadata_col) + or ( + sqla_compat._server_default_is_identity( + metadata_col.server_default, conn_col.server_default + ) + ) + ): + log.info( + "Ignoring nullable change on identity column '%s.%s'", + tname, + cname, + ) + else: + alter_column_op.modify_nullable = metadata_col_nullable + log.info( + "Detected %s on column '%s.%s'", + "NULL" if metadata_col_nullable else "NOT NULL", + tname, + cname, + ) + # column nullablity changed, no further nullable checks needed + return PriorityDispatchResult.STOP + + return PriorityDispatchResult.CONTINUE + + +def setup(plugin: Plugin) -> None: + plugin.add_autogenerate_comparator( + _compare_indexes_and_uniques, + "table", + "indexes", + ) + plugin.add_autogenerate_comparator( + _compare_foreign_keys, + "table", + "foreignkeys", + ) + plugin.add_autogenerate_comparator( + _compare_nullable, + "column", + "nullable", + ) diff --git a/libs/alembic/autogenerate/compare/schema.py b/libs/alembic/autogenerate/compare/schema.py new file mode 100644 index 0000000000..1f46aff429 --- /dev/null +++ b/libs/alembic/autogenerate/compare/schema.py @@ -0,0 +1,62 @@ +# mypy: allow-untyped-calls + +from __future__ import annotations + +import logging +from typing import Optional +from typing import Set +from typing import TYPE_CHECKING + +from sqlalchemy import inspect + +from ...util import PriorityDispatchResult + +if TYPE_CHECKING: + from sqlalchemy.engine.reflection import Inspector + + from ...autogenerate.api import AutogenContext + from ...operations.ops import UpgradeOps + from ...runtime.plugins import Plugin + + +log = logging.getLogger(__name__) + + +def _produce_net_changes( + autogen_context: AutogenContext, upgrade_ops: UpgradeOps +) -> PriorityDispatchResult: + connection = autogen_context.connection + assert connection is not None + include_schemas = autogen_context.opts.get("include_schemas", False) + + inspector: Inspector = inspect(connection) + + default_schema = connection.dialect.default_schema_name + schemas: Set[Optional[str]] + if include_schemas: + schemas = set(inspector.get_schema_names()) + # replace default schema name with None + schemas.discard("information_schema") + # replace the "default" schema with None + schemas.discard(default_schema) + schemas.add(None) + else: + schemas = {None} + + schemas = { + s for s in schemas if autogen_context.run_name_filters(s, "schema", {}) + } + + assert autogen_context.dialect is not None + autogen_context.comparators.dispatch( + "schema", qualifier=autogen_context.dialect.name + )(autogen_context, upgrade_ops, schemas) + + return PriorityDispatchResult.CONTINUE + + +def setup(plugin: Plugin) -> None: + plugin.add_autogenerate_comparator( + _produce_net_changes, + "autogenerate", + ) diff --git a/libs/alembic/autogenerate/compare/server_defaults.py b/libs/alembic/autogenerate/compare/server_defaults.py new file mode 100644 index 0000000000..1e09e8e21a --- /dev/null +++ b/libs/alembic/autogenerate/compare/server_defaults.py @@ -0,0 +1,344 @@ +from __future__ import annotations + +import logging +import re +from types import NoneType +from typing import Any +from typing import cast +from typing import Optional +from typing import Sequence +from typing import TYPE_CHECKING +from typing import Union + +from sqlalchemy import schema as sa_schema +from sqlalchemy.sql.schema import DefaultClause + +from ... import util +from ...util import DispatchPriority +from ...util import PriorityDispatchResult +from ...util import sqla_compat + +if TYPE_CHECKING: + from sqlalchemy.sql.elements import quoted_name + from sqlalchemy.sql.schema import Column + + from ...autogenerate.api import AutogenContext + from ...operations.ops import AlterColumnOp + from ...runtime.plugins import Plugin + +log = logging.getLogger(__name__) + + +def _render_server_default_for_compare( + metadata_default: Optional[Any], autogen_context: AutogenContext +) -> Optional[str]: + if isinstance(metadata_default, sa_schema.DefaultClause): + if isinstance(metadata_default.arg, str): + metadata_default = metadata_default.arg + else: + metadata_default = str( + metadata_default.arg.compile( + dialect=autogen_context.dialect, + compile_kwargs={"literal_binds": True}, + ) + ) + if isinstance(metadata_default, str): + return metadata_default + else: + return None + + +def _normalize_computed_default(sqltext: str) -> str: + """we want to warn if a computed sql expression has changed. however + we don't want false positives and the warning is not that critical. + so filter out most forms of variability from the SQL text. + + """ + + return re.sub(r"[ \(\)'\"`\[\]\t\r\n]", "", sqltext).lower() + + +def _compare_computed_default( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: str, + cname: str, + conn_col: Column[Any], + metadata_col: Column[Any], +) -> PriorityDispatchResult: + + metadata_default = metadata_col.server_default + conn_col_default = conn_col.server_default + if conn_col_default is None and metadata_default is None: + return PriorityDispatchResult.CONTINUE + + if sqla_compat._server_default_is_computed( + conn_col_default + ) and not sqla_compat._server_default_is_computed(metadata_default): + _warn_computed_not_supported(tname, cname) + return PriorityDispatchResult.STOP + + if not sqla_compat._server_default_is_computed(metadata_default): + return PriorityDispatchResult.CONTINUE + + rendered_metadata_default = str( + cast(sa_schema.Computed, metadata_col.server_default).sqltext.compile( + dialect=autogen_context.dialect, + compile_kwargs={"literal_binds": True}, + ) + ) + + # since we cannot change computed columns, we do only a crude comparison + # here where we try to eliminate syntactical differences in order to + # get a minimal comparison just to emit a warning. + + rendered_metadata_default = _normalize_computed_default( + rendered_metadata_default + ) + + if isinstance(conn_col.server_default, sa_schema.Computed): + rendered_conn_default = str( + conn_col.server_default.sqltext.compile( + dialect=autogen_context.dialect, + compile_kwargs={"literal_binds": True}, + ) + ) + rendered_conn_default = _normalize_computed_default( + rendered_conn_default + ) + else: + rendered_conn_default = "" + + if rendered_metadata_default != rendered_conn_default: + _warn_computed_not_supported(tname, cname) + + return PriorityDispatchResult.STOP + + +def _warn_computed_not_supported(tname: str, cname: str) -> None: + util.warn("Computed default on %s.%s cannot be modified" % (tname, cname)) + + +def _compare_identity_default( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: Union[quoted_name, str], + cname: Union[quoted_name, str], + conn_col: Column[Any], + metadata_col: Column[Any], + skip: Sequence[str] = ( + "order", + "on_null", + "oracle_order", + "oracle_on_null", + ), +) -> PriorityDispatchResult: + + metadata_default = metadata_col.server_default + conn_col_default = conn_col.server_default + if ( + conn_col_default is None + and metadata_default is None + or not sqla_compat._server_default_is_identity( + metadata_default, conn_col_default + ) + ): + return PriorityDispatchResult.CONTINUE + + assert isinstance( + metadata_col.server_default, + (sa_schema.Identity, sa_schema.Sequence, NoneType), + ) + assert isinstance( + conn_col.server_default, + (sa_schema.Identity, sa_schema.Sequence, NoneType), + ) + + impl = autogen_context.migration_context.impl + diff, _, is_alter = impl._compare_identity_default( # type: ignore[no-untyped-call] # noqa: E501 + metadata_col.server_default, conn_col.server_default + ) + + if is_alter: + alter_column_op.modify_server_default = metadata_default + if diff: + log.info( + "Detected server default on column '%s.%s': " + "identity options attributes %s", + tname, + cname, + sorted(diff), + ) + + return PriorityDispatchResult.STOP + + return PriorityDispatchResult.CONTINUE + + +def _user_compare_server_default( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: Union[quoted_name, str], + cname: Union[quoted_name, str], + conn_col: Column[Any], + metadata_col: Column[Any], +) -> PriorityDispatchResult: + + metadata_default = metadata_col.server_default + conn_col_default = conn_col.server_default + if conn_col_default is None and metadata_default is None: + return PriorityDispatchResult.CONTINUE + + alter_column_op.existing_server_default = conn_col_default + + migration_context = autogen_context.migration_context + + if migration_context._user_compare_server_default is False: + return PriorityDispatchResult.STOP + + if not callable(migration_context._user_compare_server_default): + return PriorityDispatchResult.CONTINUE + + rendered_metadata_default = _render_server_default_for_compare( + metadata_default, autogen_context + ) + rendered_conn_default = ( + cast(Any, conn_col_default).arg.text if conn_col_default else None + ) + + is_diff = migration_context._user_compare_server_default( + migration_context, + conn_col, + metadata_col, + rendered_conn_default, + metadata_col.server_default, + rendered_metadata_default, + ) + if is_diff: + alter_column_op.modify_server_default = metadata_default + log.info( + "User defined function %s detected " + "server default on column '%s.%s'", + migration_context._user_compare_server_default, + tname, + cname, + ) + return PriorityDispatchResult.STOP + elif is_diff is False: + # if user compare server_default returns False and not None, + # it means "dont do any more server_default comparison" + return PriorityDispatchResult.STOP + + return PriorityDispatchResult.CONTINUE + + +def _dialect_impl_compare_server_default( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: Union[quoted_name, str], + cname: Union[quoted_name, str], + conn_col: Column[Any], + metadata_col: Column[Any], +) -> PriorityDispatchResult: + """use dialect.impl.compare_server_default. + + This would in theory not be needed. however we dont know if any + third party libraries haven't made their own alembic dialect and + implemented this method. + + """ + metadata_default = metadata_col.server_default + conn_col_default = conn_col.server_default + if conn_col_default is None and metadata_default is None: + return PriorityDispatchResult.CONTINUE + + # this is already done by _user_compare_server_default, + # but doing it here also for unit tests that want to call + # _dialect_impl_compare_server_default directly + alter_column_op.existing_server_default = conn_col_default + + if not isinstance( + metadata_default, (DefaultClause, NoneType) + ) or not isinstance(conn_col_default, (DefaultClause, NoneType)): + return PriorityDispatchResult.CONTINUE + + migration_context = autogen_context.migration_context + + rendered_metadata_default = _render_server_default_for_compare( + metadata_default, autogen_context + ) + rendered_conn_default = ( + cast(Any, conn_col_default).arg.text if conn_col_default else None + ) + + is_diff = migration_context.impl.compare_server_default( # type: ignore[no-untyped-call] # noqa: E501 + conn_col, + metadata_col, + rendered_metadata_default, + rendered_conn_default, + ) + if is_diff: + alter_column_op.modify_server_default = metadata_default + log.info( + "Dialect impl %s detected server default on column '%s.%s'", + migration_context.impl, + tname, + cname, + ) + return PriorityDispatchResult.STOP + return PriorityDispatchResult.CONTINUE + + +def _setup_autoincrement( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: Union[quoted_name, str], + cname: quoted_name, + conn_col: Column[Any], + metadata_col: Column[Any], +) -> PriorityDispatchResult: + if metadata_col.table._autoincrement_column is metadata_col: + alter_column_op.kw["autoincrement"] = True + elif metadata_col.autoincrement is True: + alter_column_op.kw["autoincrement"] = True + elif metadata_col.autoincrement is False: + alter_column_op.kw["autoincrement"] = False + + return PriorityDispatchResult.CONTINUE + + +def setup(plugin: Plugin) -> None: + plugin.add_autogenerate_comparator( + _user_compare_server_default, + "column", + "server_default", + priority=DispatchPriority.FIRST, + ) + plugin.add_autogenerate_comparator( + _compare_computed_default, + "column", + "server_default", + ) + + plugin.add_autogenerate_comparator( + _compare_identity_default, + "column", + "server_default", + ) + + plugin.add_autogenerate_comparator( + _setup_autoincrement, + "column", + "server_default", + ) + plugin.add_autogenerate_comparator( + _dialect_impl_compare_server_default, + "column", + "server_default", + priority=DispatchPriority.LAST, + ) diff --git a/libs/alembic/autogenerate/compare/tables.py b/libs/alembic/autogenerate/compare/tables.py new file mode 100644 index 0000000000..31eddc6b59 --- /dev/null +++ b/libs/alembic/autogenerate/compare/tables.py @@ -0,0 +1,316 @@ +# mypy: allow-untyped-calls + +from __future__ import annotations + +import contextlib +import logging +from typing import Iterator +from typing import Optional +from typing import Set +from typing import Tuple +from typing import TYPE_CHECKING +from typing import Union + +from sqlalchemy import event +from sqlalchemy import schema as sa_schema +from sqlalchemy.util import OrderedSet + +from .util import _InspectorConv +from ...operations import ops +from ...util import PriorityDispatchResult + +if TYPE_CHECKING: + from sqlalchemy.engine.reflection import Inspector + from sqlalchemy.sql.elements import quoted_name + from sqlalchemy.sql.schema import Table + + from ...autogenerate.api import AutogenContext + from ...operations.ops import ModifyTableOps + from ...operations.ops import UpgradeOps + from ...runtime.plugins import Plugin + + +log = logging.getLogger(__name__) + + +def _autogen_for_tables( + autogen_context: AutogenContext, + upgrade_ops: UpgradeOps, + schemas: Set[Optional[str]], +) -> PriorityDispatchResult: + inspector = autogen_context.inspector + + conn_table_names: Set[Tuple[Optional[str], str]] = set() + + version_table_schema = ( + autogen_context.migration_context.version_table_schema + ) + version_table = autogen_context.migration_context.version_table + + for schema_name in schemas: + tables = available = set(inspector.get_table_names(schema=schema_name)) + if schema_name == version_table_schema: + tables = tables.difference( + [autogen_context.migration_context.version_table] + ) + + tablenames = [ + tname + for tname in tables + if autogen_context.run_name_filters( + tname, "table", {"schema_name": schema_name} + ) + ] + + conn_table_names.update((schema_name, tname) for tname in tablenames) + + inspector = autogen_context.inspector + insp = _InspectorConv(inspector) + insp.pre_cache_tables(schema_name, tablenames, available) + + metadata_table_names = OrderedSet( + [(table.schema, table.name) for table in autogen_context.sorted_tables] + ).difference([(version_table_schema, version_table)]) + + _compare_tables( + conn_table_names, + metadata_table_names, + inspector, + upgrade_ops, + autogen_context, + ) + + return PriorityDispatchResult.CONTINUE + + +def _compare_tables( + conn_table_names: set[tuple[str | None, str]], + metadata_table_names: set[tuple[str | None, str]], + inspector: Inspector, + upgrade_ops: UpgradeOps, + autogen_context: AutogenContext, +) -> None: + default_schema = inspector.bind.dialect.default_schema_name + + # tables coming from the connection will not have "schema" + # set if it matches default_schema_name; so we need a list + # of table names from local metadata that also have "None" if schema + # == default_schema_name. Most setups will be like this anyway but + # some are not (see #170) + metadata_table_names_no_dflt_schema = OrderedSet( + [ + (schema if schema != default_schema else None, tname) + for schema, tname in metadata_table_names + ] + ) + + # to adjust for the MetaData collection storing the tables either + # as "schemaname.tablename" or just "tablename", create a new lookup + # which will match the "non-default-schema" keys to the Table object. + tname_to_table = { + no_dflt_schema: autogen_context.table_key_to_table[ + sa_schema._get_table_key(tname, schema) + ] + for no_dflt_schema, (schema, tname) in zip( + metadata_table_names_no_dflt_schema, metadata_table_names + ) + } + metadata_table_names = metadata_table_names_no_dflt_schema + + for s, tname in metadata_table_names.difference(conn_table_names): + name = "%s.%s" % (s, tname) if s else tname + metadata_table = tname_to_table[(s, tname)] + if autogen_context.run_object_filters( + metadata_table, tname, "table", False, None + ): + upgrade_ops.ops.append( + ops.CreateTableOp.from_table(metadata_table) + ) + log.info("Detected added table %r", name) + modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) + + autogen_context.comparators.dispatch( + "table", qualifier=autogen_context.dialect.name + )( + autogen_context, + modify_table_ops, + s, + tname, + None, + metadata_table, + ) + if not modify_table_ops.is_empty(): + upgrade_ops.ops.append(modify_table_ops) + + removal_metadata = sa_schema.MetaData() + for s, tname in conn_table_names.difference(metadata_table_names): + name = sa_schema._get_table_key(tname, s) + + # a name might be present already if a previous reflection pulled + # this table in via foreign key constraint + exists = name in removal_metadata.tables + t = sa_schema.Table(tname, removal_metadata, schema=s) + + if not exists: + event.listen( + t, + "column_reflect", + # fmt: off + autogen_context.migration_context.impl. + _compat_autogen_column_reflect + (inspector), + # fmt: on + ) + _InspectorConv(inspector).reflect_table(t) + if autogen_context.run_object_filters(t, tname, "table", True, None): + modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) + + autogen_context.comparators.dispatch( + "table", qualifier=autogen_context.dialect.name + )(autogen_context, modify_table_ops, s, tname, t, None) + if not modify_table_ops.is_empty(): + upgrade_ops.ops.append(modify_table_ops) + + upgrade_ops.ops.append(ops.DropTableOp.from_table(t)) + log.info("Detected removed table %r", name) + + existing_tables = conn_table_names.intersection(metadata_table_names) + + existing_metadata = sa_schema.MetaData() + conn_column_info = {} + for s, tname in existing_tables: + name = sa_schema._get_table_key(tname, s) + exists = name in existing_metadata.tables + + # a name might be present already if a previous reflection pulled + # this table in via foreign key constraint + t = sa_schema.Table(tname, existing_metadata, schema=s) + if not exists: + event.listen( + t, + "column_reflect", + # fmt: off + autogen_context.migration_context.impl. + _compat_autogen_column_reflect(inspector), + # fmt: on + ) + _InspectorConv(inspector).reflect_table(t) + + conn_column_info[(s, tname)] = t + + for s, tname in sorted(existing_tables, key=lambda x: (x[0] or "", x[1])): + s = s or None + name = "%s.%s" % (s, tname) if s else tname + metadata_table = tname_to_table[(s, tname)] + conn_table = existing_metadata.tables[name] + + if autogen_context.run_object_filters( + metadata_table, tname, "table", False, conn_table + ): + modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) + with _compare_columns( + s, + tname, + conn_table, + metadata_table, + modify_table_ops, + autogen_context, + inspector, + ): + autogen_context.comparators.dispatch( + "table", qualifier=autogen_context.dialect.name + )( + autogen_context, + modify_table_ops, + s, + tname, + conn_table, + metadata_table, + ) + + if not modify_table_ops.is_empty(): + upgrade_ops.ops.append(modify_table_ops) + + +@contextlib.contextmanager +def _compare_columns( + schema: Optional[str], + tname: Union[quoted_name, str], + conn_table: Table, + metadata_table: Table, + modify_table_ops: ModifyTableOps, + autogen_context: AutogenContext, + inspector: Inspector, +) -> Iterator[None]: + name = "%s.%s" % (schema, tname) if schema else tname + metadata_col_names = OrderedSet( + c.name for c in metadata_table.c if not c.system + ) + metadata_cols_by_name = { + c.name: c for c in metadata_table.c if not c.system + } + + conn_col_names = { + c.name: c + for c in conn_table.c + if autogen_context.run_name_filters( + c.name, "column", {"table_name": tname, "schema_name": schema} + ) + } + + for cname in metadata_col_names.difference(conn_col_names): + if autogen_context.run_object_filters( + metadata_cols_by_name[cname], cname, "column", False, None + ): + modify_table_ops.ops.append( + ops.AddColumnOp.from_column_and_tablename( + schema, tname, metadata_cols_by_name[cname] + ) + ) + log.info("Detected added column '%s.%s'", name, cname) + + for colname in metadata_col_names.intersection(conn_col_names): + metadata_col = metadata_cols_by_name[colname] + conn_col = conn_table.c[colname] + if not autogen_context.run_object_filters( + metadata_col, colname, "column", False, conn_col + ): + continue + alter_column_op = ops.AlterColumnOp(tname, colname, schema=schema) + + autogen_context.comparators.dispatch( + "column", qualifier=autogen_context.dialect.name + )( + autogen_context, + alter_column_op, + schema, + tname, + colname, + conn_col, + metadata_col, + ) + + if alter_column_op.has_changes(): + modify_table_ops.ops.append(alter_column_op) + + yield + + for cname in set(conn_col_names).difference(metadata_col_names): + if autogen_context.run_object_filters( + conn_table.c[cname], cname, "column", True, None + ): + modify_table_ops.ops.append( + ops.DropColumnOp.from_column_and_tablename( + schema, tname, conn_table.c[cname] + ) + ) + log.info("Detected removed column '%s.%s'", name, cname) + + +def setup(plugin: Plugin) -> None: + + plugin.add_autogenerate_comparator( + _autogen_for_tables, + "schema", + "tables", + ) diff --git a/libs/alembic/autogenerate/compare/types.py b/libs/alembic/autogenerate/compare/types.py new file mode 100644 index 0000000000..1d5d160a35 --- /dev/null +++ b/libs/alembic/autogenerate/compare/types.py @@ -0,0 +1,147 @@ +from __future__ import annotations + +import logging +from typing import Any +from typing import Optional +from typing import TYPE_CHECKING +from typing import Union + +from sqlalchemy import types as sqltypes + +from ...util import DispatchPriority +from ...util import PriorityDispatchResult + +if TYPE_CHECKING: + from sqlalchemy.sql.elements import quoted_name + from sqlalchemy.sql.schema import Column + + from ...autogenerate.api import AutogenContext + from ...operations.ops import AlterColumnOp + from ...runtime.plugins import Plugin + + +log = logging.getLogger(__name__) + + +def _compare_type_setup( + alter_column_op: AlterColumnOp, + tname: Union[quoted_name, str], + cname: Union[quoted_name, str], + conn_col: Column[Any], + metadata_col: Column[Any], +) -> bool: + + conn_type = conn_col.type + alter_column_op.existing_type = conn_type + metadata_type = metadata_col.type + if conn_type._type_affinity is sqltypes.NullType: + log.info( + "Couldn't determine database type for column '%s.%s'", + tname, + cname, + ) + return False + if metadata_type._type_affinity is sqltypes.NullType: + log.info( + "Column '%s.%s' has no type within the model; can't compare", + tname, + cname, + ) + return False + + return True + + +def _user_compare_type( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: Union[quoted_name, str], + cname: Union[quoted_name, str], + conn_col: Column[Any], + metadata_col: Column[Any], +) -> PriorityDispatchResult: + + migration_context = autogen_context.migration_context + + if migration_context._user_compare_type is False: + return PriorityDispatchResult.STOP + + if not _compare_type_setup( + alter_column_op, tname, cname, conn_col, metadata_col + ): + return PriorityDispatchResult.CONTINUE + + if not callable(migration_context._user_compare_type): + return PriorityDispatchResult.CONTINUE + + is_diff = migration_context._user_compare_type( + migration_context, + conn_col, + metadata_col, + conn_col.type, + metadata_col.type, + ) + if is_diff: + alter_column_op.modify_type = metadata_col.type + log.info( + "Detected type change from %r to %r on '%s.%s'", + conn_col.type, + metadata_col.type, + tname, + cname, + ) + return PriorityDispatchResult.STOP + elif is_diff is False: + # if user compare type returns False and not None, + # it means "dont do any more type comparison" + return PriorityDispatchResult.STOP + + return PriorityDispatchResult.CONTINUE + + +def _dialect_impl_compare_type( + autogen_context: AutogenContext, + alter_column_op: AlterColumnOp, + schema: Optional[str], + tname: Union[quoted_name, str], + cname: Union[quoted_name, str], + conn_col: Column[Any], + metadata_col: Column[Any], +) -> PriorityDispatchResult: + + if not _compare_type_setup( + alter_column_op, tname, cname, conn_col, metadata_col + ): + return PriorityDispatchResult.CONTINUE + + migration_context = autogen_context.migration_context + is_diff = migration_context.impl.compare_type(conn_col, metadata_col) + + if is_diff: + alter_column_op.modify_type = metadata_col.type + log.info( + "Detected type change from %r to %r on '%s.%s'", + conn_col.type, + metadata_col.type, + tname, + cname, + ) + return PriorityDispatchResult.STOP + + return PriorityDispatchResult.CONTINUE + + +def setup(plugin: Plugin) -> None: + plugin.add_autogenerate_comparator( + _user_compare_type, + "column", + "types", + priority=DispatchPriority.FIRST, + ) + plugin.add_autogenerate_comparator( + _dialect_impl_compare_type, + "column", + "types", + priority=DispatchPriority.LAST, + ) diff --git a/libs/alembic/autogenerate/compare/util.py b/libs/alembic/autogenerate/compare/util.py new file mode 100644 index 0000000000..41829c0e0b --- /dev/null +++ b/libs/alembic/autogenerate/compare/util.py @@ -0,0 +1,314 @@ +# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls +# mypy: no-warn-return-any, allow-any-generics +from __future__ import annotations + +from typing import Any +from typing import cast +from typing import Collection +from typing import TYPE_CHECKING + +from sqlalchemy.sql.elements import conv +from typing_extensions import Self + +from ...util import sqla_compat + +if TYPE_CHECKING: + from sqlalchemy import Table + from sqlalchemy.engine import Inspector + from sqlalchemy.engine.interfaces import ReflectedForeignKeyConstraint + from sqlalchemy.engine.interfaces import ReflectedIndex + from sqlalchemy.engine.interfaces import ReflectedUniqueConstraint + from sqlalchemy.engine.reflection import _ReflectionInfo + +_INSP_KEYS = ( + "columns", + "pk_constraint", + "foreign_keys", + "indexes", + "unique_constraints", + "table_comment", + "check_constraints", + "table_options", +) +_CONSTRAINT_INSP_KEYS = ( + "pk_constraint", + "foreign_keys", + "indexes", + "unique_constraints", + "check_constraints", +) + + +class _InspectorConv: + __slots__ = ("inspector",) + + def __new__(cls, inspector: Inspector) -> Self: + obj: Any + if sqla_compat.sqla_2: + obj = object.__new__(_SQLA2InspectorConv) + _SQLA2InspectorConv.__init__(obj, inspector) + else: + obj = object.__new__(_LegacyInspectorConv) + _LegacyInspectorConv.__init__(obj, inspector) + return cast(Self, obj) + + def __init__(self, inspector: Inspector): + self.inspector = inspector + + def pre_cache_tables( + self, + schema: str | None, + tablenames: list[str], + all_available_tablenames: Collection[str], + ) -> None: + pass + + def get_unique_constraints( + self, tname: str, schema: str | None + ) -> list[ReflectedUniqueConstraint]: + raise NotImplementedError() + + def get_indexes( + self, tname: str, schema: str | None + ) -> list[ReflectedIndex]: + raise NotImplementedError() + + def get_foreign_keys( + self, tname: str, schema: str | None + ) -> list[ReflectedForeignKeyConstraint]: + raise NotImplementedError() + + def reflect_table(self, table: Table) -> None: + raise NotImplementedError() + + +class _LegacyInspectorConv(_InspectorConv): + + def _apply_reflectinfo_conv(self, consts): + if not consts: + return consts + for const in consts: + if const["name"] is not None and not isinstance( + const["name"], conv + ): + const["name"] = conv(const["name"]) + return consts + + def _apply_constraint_conv(self, consts): + if not consts: + return consts + for const in consts: + if const.name is not None and not isinstance(const.name, conv): + const.name = conv(const.name) + return consts + + def get_indexes( + self, tname: str, schema: str | None + ) -> list[ReflectedIndex]: + return self._apply_reflectinfo_conv( + self.inspector.get_indexes(tname, schema=schema) + ) + + def get_unique_constraints( + self, tname: str, schema: str | None + ) -> list[ReflectedUniqueConstraint]: + return self._apply_reflectinfo_conv( + self.inspector.get_unique_constraints(tname, schema=schema) + ) + + def get_foreign_keys( + self, tname: str, schema: str | None + ) -> list[ReflectedForeignKeyConstraint]: + return self._apply_reflectinfo_conv( + self.inspector.get_foreign_keys(tname, schema=schema) + ) + + def reflect_table(self, table: Table) -> None: + self.inspector.reflect_table(table, include_columns=None) + + self._apply_constraint_conv(table.constraints) + self._apply_constraint_conv(table.indexes) + + +class _SQLA2InspectorConv(_InspectorConv): + + def _pre_cache( + self, + schema: str | None, + tablenames: list[str], + all_available_tablenames: Collection[str], + info_key: str, + inspector_method: Any, + ) -> None: + + if info_key in self.inspector.info_cache: + return + + # heuristic vendored from SQLAlchemy 2.0 + # if more than 50% of the tables in the db are in filter_names load all + # the tables, since it's most likely faster to avoid a filter on that + # many tables. also if a dialect doesnt have a "multi" method then + # return the filter names + if tablenames and all_available_tablenames and len(tablenames) > 100: + fraction = len(tablenames) / len(all_available_tablenames) + else: + fraction = None + + if ( + fraction is None + or fraction <= 0.5 + or not self.inspector.dialect._overrides_default( + inspector_method.__name__ + ) + ): + optimized_filter_names = tablenames + else: + optimized_filter_names = None + + try: + elements = inspector_method( + schema=schema, filter_names=optimized_filter_names + ) + except NotImplementedError: + self.inspector.info_cache[info_key] = NotImplementedError + else: + self.inspector.info_cache[info_key] = elements + + def _return_from_cache( + self, + tname: str, + schema: str | None, + info_key: str, + inspector_method: Any, + apply_constraint_conv: bool = False, + optional=True, + ) -> Any: + not_in_cache = object() + + if info_key in self.inspector.info_cache: + cache = self.inspector.info_cache[info_key] + if cache is NotImplementedError: + if optional: + return {} + else: + # maintain NotImplementedError as alembic compare + # uses these to determine classes of construct that it + # should not compare to DB elements + raise NotImplementedError() + + individual = cache.get((schema, tname), not_in_cache) + + if individual is not not_in_cache: + if apply_constraint_conv and individual is not None: + return self._apply_reflectinfo_conv(individual) + else: + return individual + + try: + data = inspector_method(tname, schema=schema) + except NotImplementedError: + if optional: + return {} + else: + raise + + if apply_constraint_conv: + return self._apply_reflectinfo_conv(data) + else: + return data + + def get_unique_constraints( + self, tname: str, schema: str | None + ) -> list[ReflectedUniqueConstraint]: + return self._return_from_cache( + tname, + schema, + "alembic_unique_constraints", + self.inspector.get_unique_constraints, + apply_constraint_conv=True, + optional=False, + ) + + def get_indexes( + self, tname: str, schema: str | None + ) -> list[ReflectedIndex]: + return self._return_from_cache( + tname, + schema, + "alembic_indexes", + self.inspector.get_indexes, + apply_constraint_conv=True, + optional=False, + ) + + def get_foreign_keys( + self, tname: str, schema: str | None + ) -> list[ReflectedForeignKeyConstraint]: + return self._return_from_cache( + tname, + schema, + "alembic_foreign_keys", + self.inspector.get_foreign_keys, + apply_constraint_conv=True, + ) + + def _apply_reflectinfo_conv(self, consts): + if not consts: + return consts + for const in consts if not isinstance(consts, dict) else [consts]: + if const["name"] is not None and not isinstance( + const["name"], conv + ): + const["name"] = conv(const["name"]) + return consts + + def pre_cache_tables( + self, + schema: str | None, + tablenames: list[str], + all_available_tablenames: Collection[str], + ) -> None: + for key in _INSP_KEYS: + keyname = f"alembic_{key}" + meth = getattr(self.inspector, f"get_multi_{key}") + + self._pre_cache( + schema, + tablenames, + all_available_tablenames, + keyname, + meth, + ) + + def _make_reflection_info( + self, tname: str, schema: str | None + ) -> _ReflectionInfo: + from sqlalchemy.engine.reflection import _ReflectionInfo + + table_key = (schema, tname) + + return _ReflectionInfo( + unreflectable={}, + **{ + key: { + table_key: self._return_from_cache( + tname, + schema, + f"alembic_{key}", + getattr(self.inspector, f"get_{key}"), + apply_constraint_conv=(key in _CONSTRAINT_INSP_KEYS), + ) + } + for key in _INSP_KEYS + }, + ) + + def reflect_table(self, table: Table) -> None: + ri = self._make_reflection_info(table.name, table.schema) + + self.inspector.reflect_table( + table, + include_columns=None, + resolve_fks=False, + _reflect_info=ri, + ) diff --git a/libs/alembic/autogenerate/render.py b/libs/alembic/autogenerate/render.py index 38bdbfca26..7f32838df7 100644 --- a/libs/alembic/autogenerate/render.py +++ b/libs/alembic/autogenerate/render.py @@ -18,7 +18,9 @@ from sqlalchemy import schema as sa_schema from sqlalchemy import sql from sqlalchemy import types as sqltypes +from sqlalchemy.sql.base import _DialectArgView from sqlalchemy.sql.elements import conv +from sqlalchemy.sql.elements import Label from sqlalchemy.sql.elements import quoted_name from .. import util @@ -28,7 +30,8 @@ if TYPE_CHECKING: from typing import Literal - from sqlalchemy.sql.base import DialectKWArgs + from sqlalchemy import Computed + from sqlalchemy import Identity from sqlalchemy.sql.elements import ColumnElement from sqlalchemy.sql.elements import TextClause from sqlalchemy.sql.schema import CheckConstraint @@ -48,8 +51,6 @@ from alembic.config import Config from alembic.operations.ops import MigrationScript from alembic.operations.ops import ModifyTableOps - from alembic.util.sqla_compat import Computed - from alembic.util.sqla_compat import Identity MAX_PYTHON_ARGS = 255 @@ -303,11 +304,11 @@ def _drop_table(autogen_context: AutogenContext, op: ops.DropTableOp) -> str: def _render_dialect_kwargs_items( - autogen_context: AutogenContext, item: DialectKWArgs + autogen_context: AutogenContext, dialect_kwargs: _DialectArgView ) -> list[str]: return [ f"{key}={_render_potential_expr(val, autogen_context)}" - for key, val in item.dialect_kwargs.items() + for key, val in dialect_kwargs.items() ] @@ -330,7 +331,7 @@ def _add_index(autogen_context: AutogenContext, op: ops.CreateIndexOp) -> str: assert index.table is not None - opts = _render_dialect_kwargs_items(autogen_context, index) + opts = _render_dialect_kwargs_items(autogen_context, index.dialect_kwargs) if op.if_not_exists is not None: opts.append("if_not_exists=%r" % bool(op.if_not_exists)) text = tmpl % { @@ -364,7 +365,7 @@ def _drop_index(autogen_context: AutogenContext, op: ops.DropIndexOp) -> str: "%(prefix)sdrop_index(%(name)r, " "table_name=%(table_name)r%(schema)s%(kwargs)s)" ) - opts = _render_dialect_kwargs_items(autogen_context, index) + opts = _render_dialect_kwargs_items(autogen_context, index.dialect_kwargs) if op.if_exists is not None: opts.append("if_exists=%r" % bool(op.if_exists)) text = tmpl % { @@ -388,6 +389,7 @@ def _add_unique_constraint( def _add_fk_constraint( autogen_context: AutogenContext, op: ops.CreateForeignKeyOp ) -> str: + constraint = op.to_constraint() args = [repr(_render_gen_name(autogen_context, op.constraint_name))] if not autogen_context._has_batch: args.append(repr(_ident(op.source_table))) @@ -417,9 +419,16 @@ def _add_fk_constraint( if value is not None: args.append("%s=%r" % (k, value)) - return "%(prefix)screate_foreign_key(%(args)s)" % { + dialect_kwargs = _render_dialect_kwargs_items( + autogen_context, constraint.dialect_kwargs + ) + + return "%(prefix)screate_foreign_key(%(args)s%(dialect_kwargs)s)" % { "prefix": _alembic_autogenerate_prefix(autogen_context), "args": ", ".join(args), + "dialect_kwargs": ( + ", " + ", ".join(dialect_kwargs) if dialect_kwargs else "" + ), } @@ -441,7 +450,7 @@ def _drop_constraint( name = _render_gen_name(autogen_context, op.constraint_name) schema = _ident(op.schema) if op.schema else None type_ = _ident(op.constraint_type) if op.constraint_type else None - + if_exists = op.if_exists params_strs = [] params_strs.append(repr(name)) if not autogen_context._has_batch: @@ -450,32 +459,47 @@ def _drop_constraint( params_strs.append(f"schema={schema!r}") if type_ is not None: params_strs.append(f"type_={type_!r}") + if if_exists is not None: + params_strs.append(f"if_exists={if_exists}") return f"{prefix}drop_constraint({', '.join(params_strs)})" @renderers.dispatch_for(ops.AddColumnOp) def _add_column(autogen_context: AutogenContext, op: ops.AddColumnOp) -> str: - schema, tname, column = op.schema, op.table_name, op.column + schema, tname, column, if_not_exists = ( + op.schema, + op.table_name, + op.column, + op.if_not_exists, + ) if autogen_context._has_batch: template = "%(prefix)sadd_column(%(column)s)" else: template = "%(prefix)sadd_column(%(tname)r, %(column)s" if schema: template += ", schema=%(schema)r" + if if_not_exists is not None: + template += ", if_not_exists=%(if_not_exists)r" template += ")" text = template % { "prefix": _alembic_autogenerate_prefix(autogen_context), "tname": tname, "column": _render_column(column, autogen_context), "schema": schema, + "if_not_exists": if_not_exists, } return text @renderers.dispatch_for(ops.DropColumnOp) def _drop_column(autogen_context: AutogenContext, op: ops.DropColumnOp) -> str: - schema, tname, column_name = op.schema, op.table_name, op.column_name + schema, tname, column_name, if_exists = ( + op.schema, + op.table_name, + op.column_name, + op.if_exists, + ) if autogen_context._has_batch: template = "%(prefix)sdrop_column(%(cname)r)" @@ -483,6 +507,8 @@ def _drop_column(autogen_context: AutogenContext, op: ops.DropColumnOp) -> str: template = "%(prefix)sdrop_column(%(tname)r, %(cname)r" if schema: template += ", schema=%(schema)r" + if if_exists is not None: + template += ", if_exists=%(if_exists)r" template += ")" text = template % { @@ -490,6 +516,7 @@ def _drop_column(autogen_context: AutogenContext, op: ops.DropColumnOp) -> str: "tname": _ident(tname), "cname": _ident(column_name), "schema": _ident(schema), + "if_exists": if_exists, } return text @@ -504,6 +531,7 @@ def _alter_column( type_ = op.modify_type nullable = op.modify_nullable comment = op.modify_comment + newname = op.modify_name autoincrement = op.kw.get("autoincrement", None) existing_type = op.existing_type existing_nullable = op.existing_nullable @@ -532,6 +560,8 @@ def _alter_column( rendered = _render_server_default(server_default, autogen_context) text += ",\n%sserver_default=%s" % (indent, rendered) + if newname is not None: + text += ",\n%snew_column_name=%r" % (indent, newname) if type_ is not None: text += ",\n%stype_=%s" % (indent, _repr_type(type_, autogen_context)) if nullable is not None: @@ -584,23 +614,28 @@ def _render_potential_expr( value: Any, autogen_context: AutogenContext, *, - wrap_in_text: bool = True, + wrap_in_element: bool = True, is_server_default: bool = False, is_index: bool = False, ) -> str: if isinstance(value, sql.ClauseElement): - if wrap_in_text: - template = "%(prefix)stext(%(sql)r)" + sql_text = autogen_context.migration_context.impl.render_ddl_sql_expr( + value, is_server_default=is_server_default, is_index=is_index + ) + if wrap_in_element: + prefix = _sqlalchemy_autogenerate_prefix(autogen_context) + element = "literal_column" if is_index else "text" + value_str = f"{prefix}{element}({sql_text!r})" + if ( + is_index + and isinstance(value, Label) + and type(value.name) is str + ): + return value_str + f".label({value.name!r})" + else: + return value_str else: - template = "%(sql)r" - - return template % { - "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), - "sql": autogen_context.migration_context.impl.render_ddl_sql_expr( - value, is_server_default=is_server_default, is_index=is_index - ), - } - + return repr(sql_text) else: return repr(value) @@ -628,16 +663,18 @@ def _uq_constraint( has_batch = autogen_context._has_batch if constraint.deferrable: - opts.append(("deferrable", str(constraint.deferrable))) + opts.append(("deferrable", constraint.deferrable)) if constraint.initially: - opts.append(("initially", str(constraint.initially))) + opts.append(("initially", constraint.initially)) if not has_batch and alter and constraint.table.schema: opts.append(("schema", _ident(constraint.table.schema))) if not alter and constraint.name: opts.append( ("name", _render_gen_name(autogen_context, constraint.name)) ) - dialect_options = _render_dialect_kwargs_items(autogen_context, constraint) + dialect_options = _render_dialect_kwargs_items( + autogen_context, constraint.dialect_kwargs + ) if alter: args = [repr(_render_gen_name(autogen_context, constraint.name))] @@ -741,7 +778,7 @@ def _render_column( + [ "%s=%s" % (key, _render_potential_expr(val, autogen_context)) - for key, val in sqla_compat._column_kwargs(column).items() + for key, val in column.kwargs.items() ] ) ), @@ -776,6 +813,8 @@ def _render_server_default( return _render_potential_expr( default.arg, autogen_context, is_server_default=True ) + elif isinstance(default, sa_schema.FetchedValue): + return _render_fetched_value(autogen_context) if isinstance(default, str) and repr_: default = repr(re.sub(r"^'|'$", "", default)) @@ -787,7 +826,7 @@ def _render_computed( computed: Computed, autogen_context: AutogenContext ) -> str: text = _render_potential_expr( - computed.sqltext, autogen_context, wrap_in_text=False + computed.sqltext, autogen_context, wrap_in_element=False ) kwargs = {} @@ -813,6 +852,12 @@ def _render_identity( } +def _render_fetched_value(autogen_context: AutogenContext) -> str: + return "%(prefix)sFetchedValue()" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + } + + def _repr_type( type_: TypeEngine, autogen_context: AutogenContext, @@ -831,7 +876,10 @@ def _repr_type( mod = type(type_).__module__ imports = autogen_context.imports - if mod.startswith("sqlalchemy.dialects"): + + if not _skip_variants and sqla_compat._type_has_variants(type_): + return _render_Variant_type(type_, autogen_context) + elif mod.startswith("sqlalchemy.dialects"): match = re.match(r"sqlalchemy\.dialects\.(\w+)", mod) assert match is not None dname = match.group(1) @@ -843,8 +891,6 @@ def _repr_type( return "%s.%r" % (dname, type_) elif impl_rt: return impl_rt - elif not _skip_variants and sqla_compat._type_has_variants(type_): - return _render_Variant_type(type_, autogen_context) elif mod.startswith("sqlalchemy."): if "_render_%s_type" % type_.__visit_name__ in globals(): fn = globals()["_render_%s_type" % type_.__visit_name__] @@ -962,7 +1008,7 @@ def _render_primary_key( def _fk_colspec( fk: ForeignKey, metadata_schema: Optional[str], - namespace_metadata: MetaData, + namespace_metadata: Optional[MetaData], ) -> str: """Implement a 'safe' version of ForeignKey._get_colspec() that won't fail if the remote table can't be resolved. @@ -986,7 +1032,10 @@ def _fk_colspec( # the FK constraint needs to be rendered in terms of the column # name. - if table_fullname in namespace_metadata.tables: + if ( + namespace_metadata is not None + and table_fullname in namespace_metadata.tables + ): col = namespace_metadata.tables[table_fullname].c.get(colname) if col is not None: colname = _ident(col.name) # type: ignore[assignment] @@ -1017,7 +1066,7 @@ def _populate_render_fk_opts( def _render_foreign_key( constraint: ForeignKeyConstraint, autogen_context: AutogenContext, - namespace_metadata: MetaData, + namespace_metadata: Optional[MetaData], ) -> Optional[str]: rendered = _user_defined_render("foreign_key", constraint, autogen_context) if rendered is not False: @@ -1031,7 +1080,9 @@ def _render_foreign_key( _populate_render_fk_opts(constraint, opts) - apply_metadata_schema = namespace_metadata.schema + apply_metadata_schema = ( + namespace_metadata.schema if namespace_metadata is not None else None + ) return ( "%(prefix)sForeignKeyConstraint([%(cols)s], " "[%(refcols)s], %(args)s)" @@ -1100,7 +1151,7 @@ def _render_check_constraint( else "" ), "sqltext": _render_potential_expr( - constraint.sqltext, autogen_context, wrap_in_text=False + constraint.sqltext, autogen_context, wrap_in_element=False ), } @@ -1112,7 +1163,10 @@ def _execute_sql(autogen_context: AutogenContext, op: ops.ExecuteSQLOp) -> str: "Autogenerate rendering of SQL Expression language constructs " "not supported here; please use a plain SQL string" ) - return "op.execute(%r)" % op.sqltext + return "{prefix}execute({sqltext!r})".format( + prefix=_alembic_autogenerate_prefix(autogen_context), + sqltext=op.sqltext, + ) renderers = default_renderers.branch() diff --git a/libs/alembic/autogenerate/rewriter.py b/libs/alembic/autogenerate/rewriter.py index 8994dcf823..1d44b5c340 100644 --- a/libs/alembic/autogenerate/rewriter.py +++ b/libs/alembic/autogenerate/rewriter.py @@ -177,7 +177,7 @@ def _traverse_script( ) upgrade_ops_list.append(ret[0]) - directive.upgrade_ops = upgrade_ops_list # type: ignore + directive.upgrade_ops = upgrade_ops_list downgrade_ops_list: List[DowngradeOps] = [] for downgrade_ops in directive.downgrade_ops_list: @@ -187,7 +187,7 @@ def _traverse_script( "Can only return single object for DowngradeOps traverse" ) downgrade_ops_list.append(ret[0]) - directive.downgrade_ops = downgrade_ops_list # type: ignore + directive.downgrade_ops = downgrade_ops_list @_traverse.dispatch_for(ops.OpContainer) def _traverse_op_container( diff --git a/libs/alembic/command.py b/libs/alembic/command.py index 89c12354a6..4897c0d9c2 100644 --- a/libs/alembic/command.py +++ b/libs/alembic/command.py @@ -3,6 +3,7 @@ from __future__ import annotations import os +import pathlib from typing import List from typing import Optional from typing import TYPE_CHECKING @@ -12,6 +13,7 @@ from . import util from .runtime.environment import EnvironmentContext from .script import ScriptDirectory +from .util import compat if TYPE_CHECKING: from alembic.config import Config @@ -28,12 +30,10 @@ def list_templates(config: Config) -> None: """ config.print_stdout("Available templates:\n") - for tempname in os.listdir(config.get_template_directory()): - with open( - os.path.join(config.get_template_directory(), tempname, "README") - ) as readme: + for tempname in config._get_template_path().iterdir(): + with (tempname / "README").open() as readme: synopsis = next(readme).rstrip() - config.print_stdout("%s - %s", tempname, synopsis) + config.print_stdout("%s - %s", tempname.name, synopsis) config.print_stdout("\nTemplates are used via the 'init' command, e.g.:") config.print_stdout("\n alembic init --template generic ./scripts") @@ -59,65 +59,136 @@ def init( """ - if os.access(directory, os.F_OK) and os.listdir(directory): + directory_path = pathlib.Path(directory) + if directory_path.exists() and list(directory_path.iterdir()): raise util.CommandError( - "Directory %s already exists and is not empty" % directory + "Directory %s already exists and is not empty" % directory_path ) - template_dir = os.path.join(config.get_template_directory(), template) - if not os.access(template_dir, os.F_OK): - raise util.CommandError("No such template %r" % template) + template_path = config._get_template_path() / template - if not os.access(directory, os.F_OK): + if not template_path.exists(): + raise util.CommandError(f"No such template {template_path}") + + # left as os.access() to suit unit test mocking + if not os.access(directory_path, os.F_OK): with util.status( - f"Creating directory {os.path.abspath(directory)!r}", + f"Creating directory {directory_path.absolute()}", **config.messaging_opts, ): - os.makedirs(directory) + os.makedirs(directory_path) - versions = os.path.join(directory, "versions") + versions = directory_path / "versions" with util.status( - f"Creating directory {os.path.abspath(versions)!r}", + f"Creating directory {versions.absolute()}", **config.messaging_opts, ): os.makedirs(versions) - script = ScriptDirectory(directory) + if not directory_path.is_absolute(): + # for non-absolute path, state config file in .ini / pyproject + # as relative to the %(here)s token, which is where the config + # file itself would be + + if config._config_file_path is not None: + rel_dir = compat.path_relative_to( + directory_path.absolute(), + config._config_file_path.absolute().parent, + walk_up=True, + ) + ini_script_location_directory = ("%(here)s" / rel_dir).as_posix() + if config._toml_file_path is not None: + rel_dir = compat.path_relative_to( + directory_path.absolute(), + config._toml_file_path.absolute().parent, + walk_up=True, + ) + toml_script_location_directory = ("%(here)s" / rel_dir).as_posix() + + else: + ini_script_location_directory = directory_path.as_posix() + toml_script_location_directory = directory_path.as_posix() + + script = ScriptDirectory(directory_path) + + has_toml = False - config_file: str | None = None - for file_ in os.listdir(template_dir): - file_path = os.path.join(template_dir, file_) + config_file: pathlib.Path | None = None + + for file_path in template_path.iterdir(): + file_ = file_path.name if file_ == "alembic.ini.mako": assert config.config_file_name is not None - config_file = os.path.abspath(config.config_file_name) - if os.access(config_file, os.F_OK): + config_file = pathlib.Path(config.config_file_name).absolute() + if config_file.exists(): util.msg( - f"File {config_file!r} already exists, skipping", + f"File {config_file} already exists, skipping", **config.messaging_opts, ) else: script._generate_template( - file_path, config_file, script_location=directory + file_path, + config_file, + script_location=ini_script_location_directory, + ) + elif file_ == "pyproject.toml.mako": + has_toml = True + assert config._toml_file_path is not None + toml_path = config._toml_file_path.absolute() + + if toml_path.exists(): + # left as open() to suit unit test mocking + with open(toml_path, "rb") as f: + toml_data = compat.tomllib.load(f) + if "tool" in toml_data and "alembic" in toml_data["tool"]: + + util.msg( + f"File {toml_path} already exists " + "and already has a [tool.alembic] section, " + "skipping", + ) + continue + script._append_template( + file_path, + toml_path, + script_location=toml_script_location_directory, + ) + else: + script._generate_template( + file_path, + toml_path, + script_location=toml_script_location_directory, ) - elif os.path.isfile(file_path): - output_file = os.path.join(directory, file_) + + elif file_path.is_file(): + output_file = directory_path / file_ script._copy_file(file_path, output_file) if package: for path in [ - os.path.join(os.path.abspath(directory), "__init__.py"), - os.path.join(os.path.abspath(versions), "__init__.py"), + directory_path.absolute() / "__init__.py", + versions.absolute() / "__init__.py", ]: - with util.status(f"Adding {path!r}", **config.messaging_opts): + with util.status(f"Adding {path!s}", **config.messaging_opts): + # left as open() to suit unit test mocking with open(path, "w"): pass assert config_file is not None - util.msg( - "Please edit configuration/connection/logging " - f"settings in {config_file!r} before proceeding.", - **config.messaging_opts, - ) + + if has_toml: + util.msg( + f"Please edit configuration settings in {toml_path} and " + "configuration/connection/logging " + f"settings in {config_file} before proceeding.", + **config.messaging_opts, + ) + else: + util.msg( + "Please edit configuration/connection/logging " + f"settings in {config_file} before proceeding.", + **config.messaging_opts, + ) def revision( @@ -128,7 +199,7 @@ def revision( head: str = "head", splice: bool = False, branch_label: Optional[_RevIdType] = None, - version_path: Optional[str] = None, + version_path: Union[str, os.PathLike[str], None] = None, rev_id: Optional[str] = None, depends_on: Optional[str] = None, process_revision_directives: Optional[ProcessRevisionDirectiveFn] = None, @@ -198,7 +269,9 @@ def revision( process_revision_directives=process_revision_directives, ) - environment = util.asbool(config.get_main_option("revision_environment")) + environment = util.asbool( + config.get_alembic_option("revision_environment") + ) if autogenerate: environment = True @@ -298,7 +371,9 @@ def retrieve_migrations(rev, context): if diffs: raise util.AutogenerateDiffsDetected( - f"New upgrade operations detected: {diffs}" + f"New upgrade operations detected: {diffs}", + revision_context=revision_context, + diffs=diffs, ) else: config.print_stdout("No new upgrade operations detected.") @@ -336,7 +411,9 @@ def merge( # e.g. multiple databases } - environment = util.asbool(config.get_main_option("revision_environment")) + environment = util.asbool( + config.get_alembic_option("revision_environment") + ) if environment: @@ -509,7 +586,7 @@ def history( base = head = None environment = ( - util.asbool(config.get_main_option("revision_environment")) + util.asbool(config.get_alembic_option("revision_environment")) or indicate_current ) @@ -604,11 +681,18 @@ def branches(config: Config, verbose: bool = False) -> None: ) -def current(config: Config, verbose: bool = False) -> None: +def current( + config: Config, check_heads: bool = False, verbose: bool = False +) -> None: """Display the current revision for a database. :param config: a :class:`.Config` instance. + :param check_heads: Check if all head revisions are applied to the + database. Raises :class:`.DatabaseNotAtHead` if this is not the case. + + .. versionadded:: 1.17.1 + :param verbose: output in verbose mode. """ @@ -621,6 +705,12 @@ def display_version(rev, context): "Current revision(s) for %s:", util.obfuscate_url_pw(context.connection.engine.url), ) + if check_heads and ( + set(context.get_current_heads()) != set(script.get_heads()) + ): + raise util.DatabaseNotAtHead( + "Database is not on all head revisions" + ) for rev in script.get_all_current(rev): config.print_stdout(rev.cmd_format(verbose)) diff --git a/libs/alembic/config.py b/libs/alembic/config.py index 2c52e7cd13..121a4459cd 100644 --- a/libs/alembic/config.py +++ b/libs/alembic/config.py @@ -4,7 +4,10 @@ from argparse import Namespace from configparser import ConfigParser import inspect +import logging import os +from pathlib import Path +import re import sys from typing import Any from typing import cast @@ -12,6 +15,7 @@ from typing import Mapping from typing import Optional from typing import overload +from typing import Protocol from typing import Sequence from typing import TextIO from typing import Union @@ -22,6 +26,10 @@ from . import command from . import util from .util import compat +from .util.pyfiles import _preserving_path_as_str + + +log = logging.getLogger(__name__) class Config: @@ -71,7 +79,20 @@ class Config: alembic_cfg.attributes['connection'] = connection command.upgrade(alembic_cfg, "head") - :param file\_: name of the .ini file to open. + :param file\_: name of the .ini file to open if an ``alembic.ini`` is + to be used. This should refer to the ``alembic.ini`` file, either as + a filename or a full path to the file. This filename if passed must refer + to an **ini file in ConfigParser format** only. + + :param toml\_file: name of the pyproject.toml file to open if a + ``pyproject.toml`` file is to be used. This should refer to the + ``pyproject.toml`` file, either as a filename or a full path to the file. + This file must be in toml format. Both :paramref:`.Config.file\_` and + :paramref:`.Config.toml\_file` may be passed simultaneously, or + exclusively. + + .. versionadded:: 1.16.0 + :param ini_section: name of the main Alembic section within the .ini file :param output_buffer: optional file-like input buffer which @@ -81,12 +102,13 @@ class Config: Defaults to ``sys.stdout``. :param config_args: A dictionary of keys and values that will be used - for substitution in the alembic config file. The dictionary as given - is **copied** to a new one, stored locally as the attribute - ``.config_args``. When the :attr:`.Config.file_config` attribute is - first invoked, the replacement variable ``here`` will be added to this - dictionary before the dictionary is passed to ``ConfigParser()`` - to parse the .ini file. + for substitution in the alembic config file, as well as the pyproject.toml + file, depending on which / both are used. The dictionary as given is + **copied** to two new, independent dictionaries, stored locally under the + attributes ``.config_args`` and ``.toml_args``. Both of these + dictionaries will also be populated with the replacement variable + ``%(here)s``, which refers to the location of the .ini and/or .toml file + as appropriate. :param attributes: optional dictionary of arbitrary Python keys/values, which will be populated into the :attr:`.Config.attributes` dictionary. @@ -100,6 +122,7 @@ class Config: def __init__( self, file_: Union[str, os.PathLike[str], None] = None, + toml_file: Union[str, os.PathLike[str], None] = None, ini_section: str = "alembic", output_buffer: Optional[TextIO] = None, stdout: TextIO = sys.stdout, @@ -108,12 +131,18 @@ def __init__( attributes: Optional[Dict[str, Any]] = None, ) -> None: """Construct a new :class:`.Config`""" - self.config_file_name = file_ + self.config_file_name = ( + _preserving_path_as_str(file_) if file_ else None + ) + self.toml_file_name = ( + _preserving_path_as_str(toml_file) if toml_file else None + ) self.config_ini_section = ini_section self.output_buffer = output_buffer self.stdout = stdout self.cmd_opts = cmd_opts self.config_args = dict(config_args) + self.toml_args = dict(config_args) if attributes: self.attributes.update(attributes) @@ -129,9 +158,28 @@ def __init__( """ - config_file_name: Union[str, os.PathLike[str], None] = None + config_file_name: Optional[str] = None """Filesystem path to the .ini file in use.""" + toml_file_name: Optional[str] = None + """Filesystem path to the pyproject.toml file in use. + + .. versionadded:: 1.16.0 + + """ + + @property + def _config_file_path(self) -> Optional[Path]: + if self.config_file_name is None: + return None + return Path(self.config_file_name) + + @property + def _toml_file_path(self) -> Optional[Path]: + if self.toml_file_name is None: + return None + return Path(self.toml_file_name) + config_ini_section: str = None # type:ignore[assignment] """Name of the config file section to read basic configuration from. Defaults to ``alembic``, that is the ``[alembic]`` section @@ -187,25 +235,55 @@ def print_stdout(self, text: str, *arg: Any) -> None: def file_config(self) -> ConfigParser: """Return the underlying ``ConfigParser`` object. - Direct access to the .ini file is available here, + Dir*-ect access to the .ini file is available here, though the :meth:`.Config.get_section` and :meth:`.Config.get_main_option` methods provide a possibly simpler interface. """ - if self.config_file_name: - here = os.path.abspath(os.path.dirname(self.config_file_name)) + if self._config_file_path: + here = self._config_file_path.absolute().parent else: - here = "" - self.config_args["here"] = here + here = Path() + self.config_args["here"] = here.as_posix() file_config = ConfigParser(self.config_args) - if self.config_file_name: - compat.read_config_parser(file_config, [self.config_file_name]) + + verbose = getattr(self.cmd_opts, "verbose", False) + if self._config_file_path: + compat.read_config_parser(file_config, [self._config_file_path]) + if verbose: + log.info( + "Loading config from file: %s", self._config_file_path + ) else: file_config.add_section(self.config_ini_section) + if verbose: + log.info( + "No config file provided; using in-memory default config" + ) return file_config + @util.memoized_property + def toml_alembic_config(self) -> Mapping[str, Any]: + """Return a dictionary of the [tool.alembic] section from + pyproject.toml""" + + if self._toml_file_path and self._toml_file_path.exists(): + + here = self._toml_file_path.absolute().parent + self.toml_args["here"] = here.as_posix() + + with open(self._toml_file_path, "rb") as f: + toml_data = compat.tomllib.load(f) + data = toml_data.get("tool", {}).get("alembic", {}) + if not isinstance(data, dict): + raise util.CommandError("Incorrect TOML format") + return data + + else: + return {} + def get_template_directory(self) -> str: """Return the directory where Alembic setup templates are found. @@ -215,8 +293,19 @@ def get_template_directory(self) -> str: """ import alembic - package_dir = os.path.abspath(os.path.dirname(alembic.__file__)) - return os.path.join(package_dir, "templates") + package_dir = Path(alembic.__file__).absolute().parent + return str(package_dir / "templates") + + def _get_template_path(self) -> Path: + """Return the directory where Alembic setup templates are found. + + This method is used by the alembic ``init`` and ``list_templates`` + commands. + + .. versionadded:: 1.16.0 + + """ + return Path(self.get_template_directory()) @overload def get_section( @@ -278,6 +367,12 @@ def set_section_option(self, section: str, name: str, value: str) -> None: The value here will override whatever was in the .ini file. + Does **NOT** consume from the pyproject.toml file. + + .. seealso:: + + :meth:`.Config.get_alembic_option` - includes pyproject support + :param section: name of the section :param name: name of the value @@ -326,9 +421,106 @@ def get_main_option( section, unless the ``-n/--name`` flag were used to indicate a different section. + Does **NOT** consume from the pyproject.toml file. + + .. seealso:: + + :meth:`.Config.get_alembic_option` - includes pyproject support + """ return self.get_section_option(self.config_ini_section, name, default) + @overload + def get_alembic_option(self, name: str, default: str) -> str: ... + + @overload + def get_alembic_option( + self, name: str, default: Optional[str] = None + ) -> Optional[str]: ... + + def get_alembic_option( + self, name: str, default: Optional[str] = None + ) -> Union[ + None, str, list[str], dict[str, str], list[dict[str, str]], int + ]: + """Return an option from the "[alembic]" or "[tool.alembic]" section + of the configparser-parsed .ini file (e.g. ``alembic.ini``) or + toml-parsed ``pyproject.toml`` file. + + The value returned is expected to be None, string, list of strings, + or dictionary of strings. Within each type of string value, the + ``%(here)s`` token is substituted out with the absolute path of the + ``pyproject.toml`` file, as are other tokens which are extracted from + the :paramref:`.Config.config_args` dictionary. + + Searches always prioritize the configparser namespace first, before + searching in the toml namespace. + + If Alembic was run using the ``-n/--name`` flag to indicate an + alternate main section name, this is taken into account **only** for + the configparser-parsed .ini file. The section name in toml is always + ``[tool.alembic]``. + + + .. versionadded:: 1.16.0 + + """ + + if self.file_config.has_option(self.config_ini_section, name): + return self.file_config.get(self.config_ini_section, name) + else: + return self._get_toml_config_value(name, default=default) + + def get_alembic_boolean_option(self, name: str) -> bool: + if self.file_config.has_option(self.config_ini_section, name): + return ( + self.file_config.get(self.config_ini_section, name) == "true" + ) + else: + value = self.toml_alembic_config.get(name, False) + if not isinstance(value, bool): + raise util.CommandError( + f"boolean value expected for TOML parameter {name!r}" + ) + return value + + def _get_toml_config_value( + self, name: str, default: Optional[Any] = None + ) -> Union[ + None, str, list[str], dict[str, str], list[dict[str, str]], int + ]: + USE_DEFAULT = object() + value: Union[None, str, list[str], dict[str, str], int] = ( + self.toml_alembic_config.get(name, USE_DEFAULT) + ) + if value is USE_DEFAULT: + return default + if value is not None: + if isinstance(value, str): + value = value % (self.toml_args) + elif isinstance(value, list): + if value and isinstance(value[0], dict): + value = [ + {k: v % (self.toml_args) for k, v in dv.items()} + for dv in value + ] + else: + value = cast( + "list[str]", [v % (self.toml_args) for v in value] + ) + elif isinstance(value, dict): + value = cast( + "dict[str, str]", + {k: v % (self.toml_args) for k, v in value.items()}, + ) + elif isinstance(value, int): + return value + else: + raise util.CommandError( + f"unsupported TOML value type for key: {name!r}" + ) + return value + @util.memoized_property def messaging_opts(self) -> MessagingOptions: """The messaging options.""" @@ -339,181 +531,324 @@ def messaging_opts(self) -> MessagingOptions: ), ) + def _get_file_separator_char(self, *names: str) -> Optional[str]: + for name in names: + separator = self.get_main_option(name) + if separator is not None: + break + else: + return None + + split_on_path = { + "space": " ", + "newline": "\n", + "os": os.pathsep, + ":": ":", + ";": ";", + } + + try: + sep = split_on_path[separator] + except KeyError as ke: + raise ValueError( + "'%s' is not a valid value for %s; " + "expected 'space', 'newline', 'os', ':', ';'" + % (separator, name) + ) from ke + else: + if name == "version_path_separator": + util.warn_deprecated( + "The version_path_separator configuration parameter " + "is deprecated; please use path_separator" + ) + return sep + + def get_version_locations_list(self) -> Optional[list[str]]: + + version_locations_str = self.file_config.get( + self.config_ini_section, "version_locations", fallback=None + ) + + if version_locations_str: + split_char = self._get_file_separator_char( + "path_separator", "version_path_separator" + ) + + if split_char is None: + + # legacy behaviour for backwards compatibility + util.warn_deprecated( + "No path_separator found in configuration; " + "falling back to legacy splitting on spaces/commas " + "for version_locations. Consider adding " + "path_separator=os to Alembic config." + ) + + _split_on_space_comma = re.compile(r", *|(?: +)") + return _split_on_space_comma.split(version_locations_str) + else: + return [ + x.strip() + for x in version_locations_str.split(split_char) + if x + ] + else: + return cast( + "list[str]", + self._get_toml_config_value("version_locations", None), + ) + + def get_prepend_sys_paths_list(self) -> Optional[list[str]]: + prepend_sys_path_str = self.file_config.get( + self.config_ini_section, "prepend_sys_path", fallback=None + ) + + if prepend_sys_path_str: + split_char = self._get_file_separator_char("path_separator") + + if split_char is None: + + # legacy behaviour for backwards compatibility + util.warn_deprecated( + "No path_separator found in configuration; " + "falling back to legacy splitting on spaces, commas, " + "and colons for prepend_sys_path. Consider adding " + "path_separator=os to Alembic config." + ) + + _split_on_space_comma_colon = re.compile(r", *|(?: +)|\:") + return _split_on_space_comma_colon.split(prepend_sys_path_str) + else: + return [ + x.strip() + for x in prepend_sys_path_str.split(split_char) + if x + ] + else: + return cast( + "list[str]", + self._get_toml_config_value("prepend_sys_path", None), + ) + + def get_hooks_list(self) -> list[PostWriteHookConfig]: + + hooks: list[PostWriteHookConfig] = [] + + if not self.file_config.has_section("post_write_hooks"): + toml_hook_config = cast( + "list[dict[str, str]]", + self._get_toml_config_value("post_write_hooks", []), + ) + for cfg in toml_hook_config: + opts = dict(cfg) + opts["_hook_name"] = opts.pop("name") + hooks.append(opts) + + else: + _split_on_space_comma = re.compile(r", *|(?: +)") + ini_hook_config = self.get_section("post_write_hooks", {}) + names = _split_on_space_comma.split( + ini_hook_config.get("hooks", "") + ) + + for name in names: + if not name: + continue + opts = { + key[len(name) + 1 :]: ini_hook_config[key] + for key in ini_hook_config + if key.startswith(name + ".") + } + + opts["_hook_name"] = name + hooks.append(opts) + + return hooks + + +PostWriteHookConfig = Mapping[str, str] + class MessagingOptions(TypedDict, total=False): quiet: bool +class CommandFunction(Protocol): + """A function that may be registered in the CLI as an alembic command. + It must be a named function and it must accept a :class:`.Config` object + as the first argument. + + .. versionadded:: 1.15.3 + + """ + + __name__: str + + def __call__(self, config: Config, *args: Any, **kwargs: Any) -> Any: ... + + class CommandLine: + """Provides the command line interface to Alembic.""" + def __init__(self, prog: Optional[str] = None) -> None: self._generate_args(prog) - def _generate_args(self, prog: Optional[str]) -> None: - def add_options( - fn: Any, parser: Any, positional: Any, kwargs: Any - ) -> None: - kwargs_opts = { - "template": ( - "-t", - "--template", - dict( - default="generic", - type=str, - help="Setup template for use with 'init'", - ), - ), - "message": ( - "-m", - "--message", - dict( - type=str, help="Message string to use with 'revision'" - ), - ), - "sql": ( - "--sql", - dict( - action="store_true", - help="Don't emit SQL to database - dump to " - "standard output/file instead. See docs on " - "offline mode.", - ), - ), - "tag": ( - "--tag", - dict( - type=str, - help="Arbitrary 'tag' name - can be used by " - "custom env.py scripts.", - ), - ), - "head": ( - "--head", - dict( - type=str, - help="Specify head revision or @head " - "to base new revision on.", - ), - ), - "splice": ( - "--splice", - dict( - action="store_true", - help="Allow a non-head revision as the " - "'head' to splice onto", - ), - ), - "depends_on": ( - "--depends-on", - dict( - action="append", - help="Specify one or more revision identifiers " - "which this revision should depend on.", - ), - ), - "rev_id": ( - "--rev-id", - dict( - type=str, - help="Specify a hardcoded revision id instead of " - "generating one", - ), - ), - "version_path": ( - "--version-path", - dict( - type=str, - help="Specify specific path from config for " - "version file", - ), - ), - "branch_label": ( - "--branch-label", - dict( - type=str, - help="Specify a branch label to apply to the " - "new revision", - ), - ), - "verbose": ( - "-v", - "--verbose", - dict(action="store_true", help="Use more verbose output"), - ), - "resolve_dependencies": ( - "--resolve-dependencies", - dict( - action="store_true", - help="Treat dependency versions as down revisions", - ), - ), - "autogenerate": ( - "--autogenerate", - dict( - action="store_true", - help="Populate revision script with candidate " - "migration operations, based on comparison " - "of database to model.", - ), - ), - "rev_range": ( - "-r", - "--rev-range", - dict( - action="store", - help="Specify a revision range; " - "format is [start]:[end]", - ), - ), - "indicate_current": ( - "-i", - "--indicate-current", - dict( - action="store_true", - help="Indicate the current revision", - ), - ), - "purge": ( - "--purge", - dict( - action="store_true", - help="Unconditionally erase the version table " - "before stamping", - ), - ), - "package": ( - "--package", - dict( - action="store_true", - help="Write empty __init__.py files to the " - "environment and version locations", - ), + _KWARGS_OPTS = { + "template": ( + "-t", + "--template", + dict( + default="generic", + type=str, + help="Setup template for use with 'init'", + ), + ), + "message": ( + "-m", + "--message", + dict(type=str, help="Message string to use with 'revision'"), + ), + "sql": ( + "--sql", + dict( + action="store_true", + help="Don't emit SQL to database - dump to " + "standard output/file instead. See docs on " + "offline mode.", + ), + ), + "tag": ( + "--tag", + dict( + type=str, + help="Arbitrary 'tag' name - can be used by " + "custom env.py scripts.", + ), + ), + "head": ( + "--head", + dict( + type=str, + help="Specify head revision or @head " + "to base new revision on.", + ), + ), + "splice": ( + "--splice", + dict( + action="store_true", + help="Allow a non-head revision as the 'head' to splice onto", + ), + ), + "depends_on": ( + "--depends-on", + dict( + action="append", + help="Specify one or more revision identifiers " + "which this revision should depend on.", + ), + ), + "rev_id": ( + "--rev-id", + dict( + type=str, + help="Specify a hardcoded revision id instead of " + "generating one", + ), + ), + "version_path": ( + "--version-path", + dict( + type=str, + help="Specify specific path from config for version file", + ), + ), + "branch_label": ( + "--branch-label", + dict( + type=str, + help="Specify a branch label to apply to the new revision", + ), + ), + "verbose": ( + "-v", + "--verbose", + dict(action="store_true", help="Use more verbose output"), + ), + "resolve_dependencies": ( + "--resolve-dependencies", + dict( + action="store_true", + help="Treat dependency versions as down revisions", + ), + ), + "autogenerate": ( + "--autogenerate", + dict( + action="store_true", + help="Populate revision script with candidate " + "migration operations, based on comparison " + "of database to model.", + ), + ), + "rev_range": ( + "-r", + "--rev-range", + dict( + action="store", + help="Specify a revision range; format is [start]:[end]", + ), + ), + "indicate_current": ( + "-i", + "--indicate-current", + dict( + action="store_true", + help="Indicate the current revision", + ), + ), + "purge": ( + "--purge", + dict( + action="store_true", + help="Unconditionally erase the version table before stamping", + ), + ), + "package": ( + "--package", + dict( + action="store_true", + help="Write empty __init__.py files to the " + "environment and version locations", + ), + ), + "check_heads": ( + "-c", + "--check-heads", + dict( + action="store_true", + help=( + "Check if all head revisions are applied to the database. " + "Exit with an error code if this is not the case." ), - } - positional_help = { - "directory": "location of scripts directory", - "revision": "revision identifier", - "revisions": "one or more revisions, or 'heads' for all heads", - } - for arg in kwargs: - if arg in kwargs_opts: - args = kwargs_opts[arg] - args, kw = args[0:-1], args[-1] - parser.add_argument(*args, **kw) - - for arg in positional: - if ( - arg == "revisions" - or fn in positional_translations - and positional_translations[fn][arg] == "revisions" - ): - subparser.add_argument( - "revisions", - nargs="+", - help=positional_help.get("revisions"), - ) - else: - subparser.add_argument(arg, help=positional_help.get(arg)) + ), + ), + } + _POSITIONAL_OPTS = { + "directory": dict(help="location of scripts directory"), + "revision": dict( + help="revision identifier", + ), + "revisions": dict( + nargs="+", + help="one or more revisions, or 'heads' for all heads", + ), + } + _POSITIONAL_TRANSLATIONS: dict[Any, dict[str, str]] = { + command.stamp: {"revision": "revisions"} + } + def _generate_args(self, prog: Optional[str]) -> None: parser = ArgumentParser(prog=prog) parser.add_argument( @@ -522,17 +857,19 @@ def add_options( parser.add_argument( "-c", "--config", - type=str, - default=os.environ.get("ALEMBIC_CONFIG", "alembic.ini"), + action="append", help="Alternate config file; defaults to value of " - 'ALEMBIC_CONFIG environment variable, or "alembic.ini"', + 'ALEMBIC_CONFIG environment variable, or "alembic.ini". ' + "May also refer to pyproject.toml file. May be specified twice " + "to reference both files separately", ) parser.add_argument( "-n", "--name", type=str, default="alembic", - help="Name of section in .ini file to " "use for Alembic config", + help="Name of section in .ini file to use for Alembic config " + "(only applies to configparser config, not toml)", ) parser.add_argument( "-x", @@ -552,50 +889,81 @@ def add_options( action="store_true", help="Do not log to std output.", ) - subparsers = parser.add_subparsers() - - positional_translations: Dict[Any, Any] = { - command.stamp: {"revision": "revisions"} - } - for fn in [getattr(command, n) for n in dir(command)]: + self.subparsers = parser.add_subparsers() + alembic_commands = ( + cast(CommandFunction, fn) + for fn in (getattr(command, name) for name in dir(command)) if ( inspect.isfunction(fn) and fn.__name__[0] != "_" and fn.__module__ == "alembic.command" - ): - spec = compat.inspect_getfullargspec(fn) - if spec[3] is not None: - positional = spec[0][1 : -len(spec[3])] - kwarg = spec[0][-len(spec[3]) :] - else: - positional = spec[0][1:] - kwarg = [] + ) + ) - if fn in positional_translations: - positional = [ - positional_translations[fn].get(name, name) - for name in positional - ] + for fn in alembic_commands: + self.register_command(fn) - # parse first line(s) of helptext without a line break - help_ = fn.__doc__ - if help_: - help_text = [] - for line in help_.split("\n"): - if not line.strip(): - break - else: - help_text.append(line.strip()) - else: - help_text = [] - subparser = subparsers.add_parser( - fn.__name__, help=" ".join(help_text) - ) - add_options(fn, subparser, positional, kwarg) - subparser.set_defaults(cmd=(fn, positional, kwarg)) self.parser = parser + def register_command(self, fn: CommandFunction) -> None: + """Registers a function as a CLI subcommand. The subcommand name + matches the function name, the arguments are extracted from the + signature and the help text is read from the docstring. + + .. versionadded:: 1.15.3 + + .. seealso:: + + :ref:`custom_commandline` + """ + + positional, kwarg, help_text = self._inspect_function(fn) + + subparser = self.subparsers.add_parser(fn.__name__, help=help_text) + subparser.set_defaults(cmd=(fn, positional, kwarg)) + + for arg in kwarg: + if arg in self._KWARGS_OPTS: + kwarg_opt = self._KWARGS_OPTS[arg] + args, opts = kwarg_opt[0:-1], kwarg_opt[-1] + subparser.add_argument(*args, **opts) # type:ignore + + for arg in positional: + opts = self._POSITIONAL_OPTS.get(arg, {}) + subparser.add_argument(arg, **opts) # type:ignore + + def _inspect_function(self, fn: CommandFunction) -> tuple[Any, Any, str]: + spec = compat.inspect_getfullargspec(fn) + if spec[3] is not None: + positional = spec[0][1 : -len(spec[3])] + kwarg = spec[0][-len(spec[3]) :] + else: + positional = spec[0][1:] + kwarg = [] + + if fn in self._POSITIONAL_TRANSLATIONS: + positional = [ + self._POSITIONAL_TRANSLATIONS[fn].get(name, name) + for name in positional + ] + + # parse first line(s) of helptext without a line break + help_ = fn.__doc__ + if help_: + help_lines = [] + for line in help_.split("\n"): + if not line.strip(): + break + else: + help_lines.append(line.strip()) + else: + help_lines = [] + + help_text = " ".join(help_lines) + + return positional, kwarg, help_text + def run_cmd(self, config: Config, options: Namespace) -> None: fn, positional, kwarg = options.cmd @@ -611,15 +979,58 @@ def run_cmd(self, config: Config, options: Namespace) -> None: else: util.err(str(e), **config.messaging_opts) + def _inis_from_config(self, options: Namespace) -> tuple[str, str]: + names = options.config + + alembic_config_env = os.environ.get("ALEMBIC_CONFIG") + if ( + alembic_config_env + and os.path.basename(alembic_config_env) == "pyproject.toml" + ): + default_pyproject_toml = alembic_config_env + default_alembic_config = "alembic.ini" + elif alembic_config_env: + default_pyproject_toml = "pyproject.toml" + default_alembic_config = alembic_config_env + else: + default_alembic_config = "alembic.ini" + default_pyproject_toml = "pyproject.toml" + + if not names: + return default_pyproject_toml, default_alembic_config + + toml = ini = None + + for name in names: + if os.path.basename(name) == "pyproject.toml": + if toml is not None: + raise util.CommandError( + "pyproject.toml indicated more than once" + ) + toml = name + else: + if ini is not None: + raise util.CommandError( + "only one ini file may be indicated" + ) + ini = name + + return toml if toml else default_pyproject_toml, ( + ini if ini else default_alembic_config + ) + def main(self, argv: Optional[Sequence[str]] = None) -> None: + """Executes the command line with the provided arguments.""" options = self.parser.parse_args(argv) if not hasattr(options, "cmd"): # see http://bugs.python.org/issue9253, argparse # behavior changed incompatibly in py3.3 self.parser.error("too few arguments") else: + toml, ini = self._inis_from_config(options) cfg = Config( - file_=options.config, + file_=ini, + toml_file=toml, ini_section=options.name, cmd_opts=options, ) diff --git a/libs/alembic/context.pyi b/libs/alembic/context.pyi index 80619fb24f..6045d8b3da 100644 --- a/libs/alembic/context.pyi +++ b/libs/alembic/context.pyi @@ -5,7 +5,6 @@ from __future__ import annotations from typing import Any from typing import Callable from typing import Collection -from typing import ContextManager from typing import Dict from typing import Iterable from typing import List @@ -20,6 +19,8 @@ from typing import Tuple from typing import TYPE_CHECKING from typing import Union +from typing_extensions import ContextManager + if TYPE_CHECKING: from sqlalchemy.engine.base import Connection from sqlalchemy.engine.url import URL @@ -40,7 +41,9 @@ if TYPE_CHECKING: ### end imports ### -def begin_transaction() -> Union[_ProxyTransaction, ContextManager[None]]: +def begin_transaction() -> ( + Union[_ProxyTransaction, ContextManager[None, Optional[bool]]] +): """Return a context manager that will enclose an operation within a "transaction", as defined by the environment's offline @@ -200,6 +203,7 @@ def configure( None, ] ] = None, + autogenerate_plugins: Optional[Sequence[str]] = None, **kw: Any, ) -> None: """Configure a :class:`.MigrationContext` within this @@ -619,6 +623,25 @@ def configure( :paramref:`.command.revision.process_revision_directives` + :param autogenerate_plugins: A list of string names of "plugins" that + should participate in this autogenerate run. Defaults to the list + ``["alembic.autogenerate.*"]``, which indicates that Alembic's default + autogeneration plugins will be used. + + See the section :ref:`plugins_autogenerate` for complete background + on how to use this parameter. + + .. versionadded:: 1.18.0 Added a new plugin system for autogenerate + compare directives. + + .. seealso:: + + :ref:`plugins_autogenerate` - background on enabling/disabling + autogenerate plugins + + :ref:`alembic.plugins.toplevel` - Introduction and documentation + to the plugin system + Parameters specific to individual backends: :param mssql_batch_separator: The "batch separator" which will diff --git a/libs/alembic/ddl/base.py b/libs/alembic/ddl/base.py index 6fbe95245c..30a3a15a39 100644 --- a/libs/alembic/ddl/base.py +++ b/libs/alembic/ddl/base.py @@ -4,6 +4,7 @@ from __future__ import annotations import functools +from typing import Any from typing import Optional from typing import TYPE_CHECKING from typing import Union @@ -14,7 +15,10 @@ from sqlalchemy.ext.compiler import compiles from sqlalchemy.schema import Column from sqlalchemy.schema import DDLElement +from sqlalchemy.sql.elements import ColumnElement from sqlalchemy.sql.elements import quoted_name +from sqlalchemy.sql.elements import TextClause +from sqlalchemy.sql.schema import FetchedValue from ..util.sqla_compat import _columns_for_constraint # noqa from ..util.sqla_compat import _find_columns # noqa @@ -23,20 +27,16 @@ from ..util.sqla_compat import _table_for_constraint # noqa if TYPE_CHECKING: - from typing import Any + from sqlalchemy import Computed + from sqlalchemy import Identity from sqlalchemy.sql.compiler import Compiled from sqlalchemy.sql.compiler import DDLCompiler - from sqlalchemy.sql.elements import TextClause - from sqlalchemy.sql.functions import Function - from sqlalchemy.sql.schema import FetchedValue from sqlalchemy.sql.type_api import TypeEngine from .impl import DefaultImpl - from ..util.sqla_compat import Computed - from ..util.sqla_compat import Identity -_ServerDefault = Union["TextClause", "FetchedValue", "Function[Any]", str] +_ServerDefaultType = Union[FetchedValue, str, TextClause, ColumnElement[Any]] class AlterTable(DDLElement): @@ -75,7 +75,7 @@ def __init__( schema: Optional[str] = None, existing_type: Optional[TypeEngine] = None, existing_nullable: Optional[bool] = None, - existing_server_default: Optional[_ServerDefault] = None, + existing_server_default: Optional[_ServerDefaultType] = None, existing_comment: Optional[str] = None, ) -> None: super().__init__(name, schema=schema) @@ -119,7 +119,7 @@ def __init__( self, name: str, column_name: str, - default: Optional[_ServerDefault], + default: Optional[_ServerDefaultType], **kw, ) -> None: super().__init__(name, column_name, **kw) @@ -154,17 +154,28 @@ def __init__( name: str, column: Column[Any], schema: Optional[Union[quoted_name, str]] = None, + if_not_exists: Optional[bool] = None, + inline_references: Optional[bool] = None, + inline_primary_key: Optional[bool] = None, ) -> None: super().__init__(name, schema=schema) self.column = column + self.if_not_exists = if_not_exists + self.inline_references = inline_references + self.inline_primary_key = inline_primary_key class DropColumn(AlterTable): def __init__( - self, name: str, column: Column[Any], schema: Optional[str] = None + self, + name: str, + column: Column[Any], + schema: Optional[str] = None, + if_exists: Optional[bool] = None, ) -> None: super().__init__(name, schema=schema) self.column = column + self.if_exists = if_exists class ColumnComment(AlterColumn): @@ -189,7 +200,14 @@ def visit_rename_table( def visit_add_column(element: AddColumn, compiler: DDLCompiler, **kw) -> str: return "%s %s" % ( alter_table(compiler, element.table_name, element.schema), - add_column(compiler, element.column, **kw), + add_column( + compiler, + element.column, + if_not_exists=element.if_not_exists, + inline_references=element.inline_references, + inline_primary_key=element.inline_primary_key, + **kw, + ), ) @@ -197,7 +215,9 @@ def visit_add_column(element: AddColumn, compiler: DDLCompiler, **kw) -> str: def visit_drop_column(element: DropColumn, compiler: DDLCompiler, **kw) -> str: return "%s %s" % ( alter_table(compiler, element.table_name, element.schema), - drop_column(compiler, element.column.name, **kw), + drop_column( + compiler, element.column.name, if_exists=element.if_exists, **kw + ), ) @@ -297,11 +317,15 @@ def format_column_name( def format_server_default( compiler: DDLCompiler, - default: Optional[_ServerDefault], + default: Optional[_ServerDefaultType], ) -> str: - return compiler.get_column_default_string( + # this can be updated to use compiler.render_default_string + # for SQLAlchemy 2.0 and above; not in 1.4 + default_str = compiler.get_column_default_string( Column("x", Integer, server_default=default) ) + assert default_str is not None + return default_str def format_type(compiler: DDLCompiler, type_: TypeEngine) -> str: @@ -316,16 +340,62 @@ def alter_table( return "ALTER TABLE %s" % format_table_name(compiler, name, schema) -def drop_column(compiler: DDLCompiler, name: str, **kw) -> str: - return "DROP COLUMN %s" % format_column_name(compiler, name) +def drop_column( + compiler: DDLCompiler, name: str, if_exists: Optional[bool] = None, **kw +) -> str: + return "DROP COLUMN %s%s" % ( + "IF EXISTS " if if_exists else "", + format_column_name(compiler, name), + ) def alter_column(compiler: DDLCompiler, name: str) -> str: return "ALTER COLUMN %s" % format_column_name(compiler, name) -def add_column(compiler: DDLCompiler, column: Column[Any], **kw) -> str: - text = "ADD COLUMN %s" % compiler.get_column_specification(column, **kw) +def add_column( + compiler: DDLCompiler, + column: Column[Any], + if_not_exists: Optional[bool] = None, + inline_references: Optional[bool] = None, + inline_primary_key: Optional[bool] = None, + **kw, +) -> str: + text = "ADD COLUMN %s%s" % ( + "IF NOT EXISTS " if if_not_exists else "", + compiler.get_column_specification(column, **kw), + ) + + if inline_primary_key and column.primary_key: + text += " PRIMARY KEY" + + # Handle inline REFERENCES if requested + # Only render inline if there's exactly one foreign key AND the + # ForeignKeyConstraint is single-column, to avoid non-deterministic + # behavior with sets and to ensure proper syntax + if ( + inline_references + and len(column.foreign_keys) == 1 + and (fk := list(column.foreign_keys)[0]) + and fk.constraint is not None + and len(fk.constraint.columns) == 1 + ): + ref_col = fk.column + ref_table = ref_col.table + + # Format with proper quoting + if ref_table.schema: + table_name = "%s.%s" % ( + compiler.preparer.quote_schema(ref_table.schema), + compiler.preparer.quote(ref_table.name), + ) + else: + table_name = compiler.preparer.quote(ref_table.name) + + text += " REFERENCES %s (%s)" % ( + table_name, + compiler.preparer.quote(ref_col.name), + ) const = " ".join( compiler.process(constraint) for constraint in column.constraints diff --git a/libs/alembic/ddl/impl.py b/libs/alembic/ddl/impl.py index 2609a62dec..964cd1f30b 100644 --- a/libs/alembic/ddl/impl.py +++ b/libs/alembic/ddl/impl.py @@ -43,10 +43,13 @@ from sqlalchemy.engine import Connection from sqlalchemy.engine import Dialect from sqlalchemy.engine.cursor import CursorResult + from sqlalchemy.engine.interfaces import ReflectedForeignKeyConstraint + from sqlalchemy.engine.interfaces import ReflectedIndex + from sqlalchemy.engine.interfaces import ReflectedPrimaryKeyConstraint + from sqlalchemy.engine.interfaces import ReflectedUniqueConstraint from sqlalchemy.engine.reflection import Inspector from sqlalchemy.sql import ClauseElement from sqlalchemy.sql import Executable - from sqlalchemy.sql.elements import ColumnElement from sqlalchemy.sql.elements import quoted_name from sqlalchemy.sql.schema import Constraint from sqlalchemy.sql.schema import ForeignKeyConstraint @@ -55,11 +58,17 @@ from sqlalchemy.sql.selectable import TableClause from sqlalchemy.sql.type_api import TypeEngine - from .base import _ServerDefault + from .base import _ServerDefaultType from ..autogenerate.api import AutogenContext from ..operations.batch import ApplyBatchImpl from ..operations.batch import BatchOperationsImpl + _ReflectedConstraint = ( + ReflectedForeignKeyConstraint + | ReflectedPrimaryKeyConstraint + | ReflectedIndex + | ReflectedUniqueConstraint + ) log = logging.getLogger(__name__) @@ -257,8 +266,11 @@ def alter_column( self, table_name: str, column_name: str, + *, nullable: Optional[bool] = None, - server_default: Union[_ServerDefault, Literal[False]] = False, + server_default: Optional[ + Union[_ServerDefaultType, Literal[False]] + ] = False, name: Optional[str] = None, type_: Optional[TypeEngine] = None, schema: Optional[str] = None, @@ -266,7 +278,9 @@ def alter_column( comment: Optional[Union[str, Literal[False]]] = False, existing_comment: Optional[str] = None, existing_type: Optional[TypeEngine] = None, - existing_server_default: Optional[_ServerDefault] = None, + existing_server_default: Optional[ + Union[_ServerDefaultType, Literal[False]] + ] = None, existing_nullable: Optional[bool] = None, existing_autoincrement: Optional[bool] = None, **kw: Any, @@ -369,25 +383,47 @@ def add_column( self, table_name: str, column: Column[Any], + *, schema: Optional[Union[str, quoted_name]] = None, + if_not_exists: Optional[bool] = None, + inline_references: Optional[bool] = None, + inline_primary_key: Optional[bool] = None, ) -> None: - self._exec(base.AddColumn(table_name, column, schema=schema)) + self._exec( + base.AddColumn( + table_name, + column, + schema=schema, + if_not_exists=if_not_exists, + inline_references=inline_references, + inline_primary_key=inline_primary_key, + ) + ) def drop_column( self, table_name: str, column: Column[Any], + *, schema: Optional[str] = None, + if_exists: Optional[bool] = None, **kw, ) -> None: - self._exec(base.DropColumn(table_name, column, schema=schema)) + self._exec( + base.DropColumn( + table_name, column, schema=schema, if_exists=if_exists + ) + ) - def add_constraint(self, const: Any) -> None: + def add_constraint(self, const: Any, **kw: Any) -> None: if const._create_rule is None or const._create_rule(self): - self._exec(schema.AddConstraint(const)) + if sqla_compat.sqla_2_1: + # this should be the default already + kw.setdefault("isolate_from_table", True) + self._exec(schema.AddConstraint(const, **kw)) - def drop_constraint(self, const: Constraint) -> None: - self._exec(schema.DropConstraint(const)) + def drop_constraint(self, const: Constraint, **kw: Any) -> None: + self._exec(schema.DropConstraint(const, **kw)) def rename_table( self, @@ -440,7 +476,7 @@ def create_table_comment(self, table: Table) -> None: def drop_table_comment(self, table: Table) -> None: self._exec(schema.DropTableComment(table)) - def create_column_comment(self, column: ColumnElement[Any]) -> None: + def create_column_comment(self, column: Column[Any]) -> None: self._exec(schema.SetColumnComment(column)) def drop_index(self, index: Index, **kw: Any) -> None: @@ -459,7 +495,9 @@ def bulk_insert( if self.as_sql: for row in rows: self._exec( - sqla_compat._insert_inline(table).values( + table.insert() + .inline() + .values( **{ k: ( sqla_compat._literal_bindparam( @@ -477,14 +515,10 @@ def bulk_insert( else: if rows: if multiinsert: - self._exec( - sqla_compat._insert_inline(table), multiparams=rows - ) + self._exec(table.insert().inline(), multiparams=rows) else: for row in rows: - self._exec( - sqla_compat._insert_inline(table).values(**row) - ) + self._exec(table.insert().inline().values(**row)) def _tokenize_column_type(self, column: Column) -> Params: definition: str @@ -693,7 +727,7 @@ def _compare_identity_default(self, metadata_identity, inspector_identity): diff, ignored = _compare_identity_options( metadata_identity, inspector_identity, - sqla_compat.Identity(), + schema.Identity(), skip={"always"}, ) @@ -824,9 +858,9 @@ def _skip_functional_indexes(self, metadata_indexes, conn_indexes): metadata_indexes.discard(idx) def adjust_reflected_dialect_options( - self, reflected_object: Dict[str, Any], kind: str + self, reflected_object: _ReflectedConstraint, kind: str ) -> Dict[str, Any]: - return reflected_object.get("dialect_options", {}) + return reflected_object.get("dialect_options", {}) # type: ignore[return-value] # noqa: E501 class Params(NamedTuple): @@ -874,12 +908,13 @@ def check_dicts( set(meta_d).union(insp_d), ) if sqla_compat.identity_has_dialect_kwargs: + assert hasattr(default_io, "dialect_kwargs") # use only the dialect kwargs in inspector_io since metadata_io # can have options for many backends check_dicts( getattr(metadata_io, "dialect_kwargs", {}), getattr(inspector_io, "dialect_kwargs", {}), - default_io.dialect_kwargs, # type: ignore[union-attr] + default_io.dialect_kwargs, getattr(inspector_io, "dialect_kwargs", {}), ) diff --git a/libs/alembic/ddl/mssql.py b/libs/alembic/ddl/mssql.py index baa43d5e73..91cd9e428d 100644 --- a/libs/alembic/ddl/mssql.py +++ b/libs/alembic/ddl/mssql.py @@ -20,6 +20,7 @@ from .base import AddColumn from .base import alter_column from .base import alter_table +from .base import ColumnComment from .base import ColumnDefault from .base import ColumnName from .base import ColumnNullable @@ -45,7 +46,8 @@ from sqlalchemy.sql.selectable import TableClause from sqlalchemy.sql.type_api import TypeEngine - from .base import _ServerDefault + from .base import _ServerDefaultType + from .impl import _ReflectedConstraint class MSSQLImpl(DefaultImpl): @@ -83,19 +85,22 @@ def emit_commit(self) -> None: if self.as_sql and self.batch_separator: self.static_output(self.batch_separator) - def alter_column( # type:ignore[override] + def alter_column( self, table_name: str, column_name: str, + *, nullable: Optional[bool] = None, server_default: Optional[ - Union[_ServerDefault, Literal[False]] + Union[_ServerDefaultType, Literal[False]] ] = False, name: Optional[str] = None, type_: Optional[TypeEngine] = None, schema: Optional[str] = None, existing_type: Optional[TypeEngine] = None, - existing_server_default: Optional[_ServerDefault] = None, + existing_server_default: Union[ + _ServerDefaultType, Literal[False], None + ] = None, existing_nullable: Optional[bool] = None, **kw: Any, ) -> None: @@ -137,6 +142,27 @@ def alter_column( # type:ignore[override] kw["server_default"] = server_default kw["existing_server_default"] = existing_server_default + # drop existing default constraints before changing type + # or default, see issue #1744 + if ( + server_default is not False + and used_default is False + and ( + existing_server_default is not False or server_default is None + ) + ): + self._exec( + _ExecDropConstraint( + table_name, + column_name, + "sys.default_constraints", + schema, + ) + ) + + # TODO: see why these two alter_columns can't be called + # at once. joining them works but some of the mssql tests + # seem to expect something different super().alter_column( table_name, column_name, @@ -149,15 +175,6 @@ def alter_column( # type:ignore[override] ) if server_default is not False and used_default is False: - if existing_server_default is not False or server_default is None: - self._exec( - _ExecDropConstraint( - table_name, - column_name, - "sys.default_constraints", - schema, - ) - ) if server_default is not None: super().alter_column( table_name, @@ -202,6 +219,7 @@ def drop_column( self, table_name: str, column: Column[Any], + *, schema: Optional[str] = None, **kw, ) -> None: @@ -265,10 +283,10 @@ def _compare_identity_default(self, metadata_identity, inspector_identity): return diff, ignored, is_alter def adjust_reflected_dialect_options( - self, reflected_object: Dict[str, Any], kind: str + self, reflected_object: _ReflectedConstraint, kind: str ) -> Dict[str, Any]: options: Dict[str, Any] - options = reflected_object.get("dialect_options", {}).copy() + options = reflected_object.get("dialect_options", {}).copy() # type: ignore[attr-defined] # noqa: E501 if not options.get("mssql_include"): options.pop("mssql_include", None) if not options.get("mssql_clustered"): @@ -417,3 +435,89 @@ def visit_rename_table( format_table_name(compiler, element.table_name, element.schema), format_table_name(compiler, element.new_table_name, None), ) + + +def _add_column_comment( + compiler: MSDDLCompiler, + schema: Optional[str], + tname: str, + cname: str, + comment: str, +) -> str: + schema_name = schema if schema else compiler.dialect.default_schema_name + assert schema_name + return ( + "exec sp_addextendedproperty 'MS_Description', {}, " + "'schema', {}, 'table', {}, 'column', {}".format( + compiler.sql_compiler.render_literal_value( + comment, sqltypes.NVARCHAR() + ), + compiler.preparer.quote_schema(schema_name), + compiler.preparer.quote(tname), + compiler.preparer.quote(cname), + ) + ) + + +def _update_column_comment( + compiler: MSDDLCompiler, + schema: Optional[str], + tname: str, + cname: str, + comment: str, +) -> str: + schema_name = schema if schema else compiler.dialect.default_schema_name + assert schema_name + return ( + "exec sp_updateextendedproperty 'MS_Description', {}, " + "'schema', {}, 'table', {}, 'column', {}".format( + compiler.sql_compiler.render_literal_value( + comment, sqltypes.NVARCHAR() + ), + compiler.preparer.quote_schema(schema_name), + compiler.preparer.quote(tname), + compiler.preparer.quote(cname), + ) + ) + + +def _drop_column_comment( + compiler: MSDDLCompiler, schema: Optional[str], tname: str, cname: str +) -> str: + schema_name = schema if schema else compiler.dialect.default_schema_name + assert schema_name + return ( + "exec sp_dropextendedproperty 'MS_Description', " + "'schema', {}, 'table', {}, 'column', {}".format( + compiler.preparer.quote_schema(schema_name), + compiler.preparer.quote(tname), + compiler.preparer.quote(cname), + ) + ) + + +@compiles(ColumnComment, "mssql") +def visit_column_comment( + element: ColumnComment, compiler: MSDDLCompiler, **kw: Any +) -> str: + if element.comment is not None: + if element.existing_comment is not None: + return _update_column_comment( + compiler, + element.schema, + element.table_name, + element.column_name, + element.comment, + ) + else: + return _add_column_comment( + compiler, + element.schema, + element.table_name, + element.column_name, + element.comment, + ) + else: + return _drop_column_comment( + compiler, element.schema, element.table_name, element.column_name + ) diff --git a/libs/alembic/ddl/mysql.py b/libs/alembic/ddl/mysql.py index 3482f672da..27f808b050 100644 --- a/libs/alembic/ddl/mysql.py +++ b/libs/alembic/ddl/mysql.py @@ -11,6 +11,9 @@ from sqlalchemy import schema from sqlalchemy import types as sqltypes +from sqlalchemy.sql import elements +from sqlalchemy.sql import functions +from sqlalchemy.sql import operators from .base import alter_table from .base import AlterColumn @@ -23,7 +26,6 @@ from .impl import DefaultImpl from .. import util from ..util import sqla_compat -from ..util.sqla_compat import _is_mariadb from ..util.sqla_compat import _is_type_bound from ..util.sqla_compat import compiles @@ -32,10 +34,11 @@ from sqlalchemy.dialects.mysql.base import MySQLDDLCompiler from sqlalchemy.sql.ddl import DropConstraint + from sqlalchemy.sql.elements import ClauseElement from sqlalchemy.sql.schema import Constraint from sqlalchemy.sql.type_api import TypeEngine - from .base import _ServerDefault + from .base import _ServerDefaultType class MySQLImpl(DefaultImpl): @@ -48,17 +51,47 @@ class MySQLImpl(DefaultImpl): ) type_arg_extract = [r"character set ([\w\-_]+)", r"collate ([\w\-_]+)"] - def alter_column( # type:ignore[override] + def render_ddl_sql_expr( + self, + expr: ClauseElement, + is_server_default: bool = False, + is_index: bool = False, + **kw: Any, + ) -> str: + # apply Grouping to index expressions; + # see https://github.com/sqlalchemy/sqlalchemy/blob/ + # 36da2eaf3e23269f2cf28420ae73674beafd0661/ + # lib/sqlalchemy/dialects/mysql/base.py#L2191 + if is_index and ( + isinstance(expr, elements.BinaryExpression) + or ( + isinstance(expr, elements.UnaryExpression) + and expr.modifier not in (operators.desc_op, operators.asc_op) + ) + or isinstance(expr, functions.FunctionElement) + ): + expr = elements.Grouping(expr) + + return super().render_ddl_sql_expr( + expr, is_server_default=is_server_default, is_index=is_index, **kw + ) + + def alter_column( self, table_name: str, column_name: str, + *, nullable: Optional[bool] = None, - server_default: Union[_ServerDefault, Literal[False]] = False, + server_default: Optional[ + Union[_ServerDefaultType, Literal[False]] + ] = False, name: Optional[str] = None, type_: Optional[TypeEngine] = None, schema: Optional[str] = None, existing_type: Optional[TypeEngine] = None, - existing_server_default: Optional[_ServerDefault] = None, + existing_server_default: Optional[ + Union[_ServerDefaultType, Literal[False]] + ] = None, existing_nullable: Optional[bool] = None, autoincrement: Optional[bool] = None, existing_autoincrement: Optional[bool] = None, @@ -166,6 +199,7 @@ def alter_column( # type:ignore[override] def drop_constraint( self, const: Constraint, + **kw: Any, ) -> None: if isinstance(const, schema.CheckConstraint) and _is_type_bound(const): return @@ -175,7 +209,7 @@ def drop_constraint( def _is_mysql_allowed_functional_default( self, type_: Optional[TypeEngine], - server_default: Union[_ServerDefault, Literal[False]], + server_default: Optional[Union[_ServerDefaultType, Literal[False]]], ) -> bool: return ( type_ is not None @@ -326,7 +360,7 @@ def __init__( self, name: str, column_name: str, - default: _ServerDefault, + default: Optional[_ServerDefaultType], schema: Optional[str] = None, ) -> None: super(AlterColumn, self).__init__(name, schema=schema) @@ -343,7 +377,7 @@ def __init__( newname: Optional[str] = None, type_: Optional[TypeEngine] = None, nullable: Optional[bool] = None, - default: Optional[Union[_ServerDefault, Literal[False]]] = False, + default: Optional[Union[_ServerDefaultType, Literal[False]]] = False, autoincrement: Optional[bool] = None, comment: Optional[Union[str, Literal[False]]] = False, ) -> None: @@ -432,7 +466,7 @@ def _mysql_change_column( def _mysql_colspec( compiler: MySQLDDLCompiler, nullable: Optional[bool], - server_default: Optional[Union[_ServerDefault, Literal[False]]], + server_default: Optional[Union[_ServerDefaultType, Literal[False]]], type_: TypeEngine, autoincrement: Optional[bool], comment: Optional[Union[str, Literal[False]]], @@ -475,7 +509,7 @@ def _mysql_drop_constraint( # note that SQLAlchemy as of 1.2 does not yet support # DROP CONSTRAINT for MySQL/MariaDB, so we implement fully # here. - if _is_mariadb(compiler.dialect): + if compiler.dialect.is_mariadb: return "ALTER TABLE %s DROP CONSTRAINT %s" % ( compiler.preparer.format_table(constraint.table), compiler.preparer.format_constraint(constraint), diff --git a/libs/alembic/ddl/postgresql.py b/libs/alembic/ddl/postgresql.py index de64a4e05b..cc03f45346 100644 --- a/libs/alembic/ddl/postgresql.py +++ b/libs/alembic/ddl/postgresql.py @@ -16,8 +16,11 @@ from typing import Union from sqlalchemy import Column +from sqlalchemy import Float +from sqlalchemy import Identity from sqlalchemy import literal_column from sqlalchemy import Numeric +from sqlalchemy import select from sqlalchemy import text from sqlalchemy import types as sqltypes from sqlalchemy.dialects.postgresql import BIGINT @@ -49,6 +52,7 @@ from ..util import sqla_compat from ..util.sqla_compat import compiles + if TYPE_CHECKING: from typing import Literal @@ -66,12 +70,12 @@ from sqlalchemy.sql.schema import Table from sqlalchemy.sql.type_api import TypeEngine - from .base import _ServerDefault + from .base import _ServerDefaultType + from .impl import _ReflectedConstraint from ..autogenerate.api import AutogenContext from ..autogenerate.render import _f_name from ..runtime.migration import MigrationContext - log = logging.getLogger(__name__) @@ -109,6 +113,7 @@ def compare_server_default( rendered_metadata_default, rendered_inspector_default, ): + # don't do defaults for SERIAL columns if ( metadata_column.primary_key @@ -118,6 +123,11 @@ def compare_server_default( conn_col_default = rendered_inspector_default + if conn_col_default and re.match( + r"nextval\('(.+?)'::regclass\)", conn_col_default + ): + conn_col_default = conn_col_default.replace("::regclass", "") + defaults_equal = conn_col_default == rendered_metadata_default if defaults_equal: return False @@ -132,33 +142,38 @@ def compare_server_default( metadata_default = metadata_column.server_default.arg if isinstance(metadata_default, str): - if not isinstance(inspector_column.type, Numeric): + if not isinstance(inspector_column.type, (Numeric, Float)): metadata_default = re.sub(r"^'|'$", "", metadata_default) metadata_default = f"'{metadata_default}'" metadata_default = literal_column(metadata_default) # run a real compare against the server + # TODO: this seems quite a bad idea for a default that's a SQL + # function! SQL functions are not deterministic! conn = self.connection assert conn is not None return not conn.scalar( - sqla_compat._select( - literal_column(conn_col_default) == metadata_default - ) + select(literal_column(conn_col_default) == metadata_default) ) - def alter_column( # type:ignore[override] + def alter_column( self, table_name: str, column_name: str, + *, nullable: Optional[bool] = None, - server_default: Union[_ServerDefault, Literal[False]] = False, + server_default: Optional[ + Union[_ServerDefaultType, Literal[False]] + ] = False, name: Optional[str] = None, type_: Optional[TypeEngine] = None, schema: Optional[str] = None, autoincrement: Optional[bool] = None, existing_type: Optional[TypeEngine] = None, - existing_server_default: Optional[_ServerDefault] = None, + existing_server_default: Optional[ + Union[_ServerDefaultType, Literal[False]] + ] = None, existing_nullable: Optional[bool] = None, existing_autoincrement: Optional[bool] = None, **kw: Any, @@ -314,7 +329,7 @@ def _dialect_options( self, item: Union[Index, UniqueConstraint] ) -> Tuple[Any, ...]: # only the positive case is returned by sqlalchemy reflection so - # None and False are threated the same + # None and False are treated the same if item.dialect_kwargs.get("postgresql_nulls_not_distinct"): return ("nulls_not_distinct",) return () @@ -408,10 +423,10 @@ def compare_unique_constraint( return ComparisonResult.Equal() def adjust_reflected_dialect_options( - self, reflected_options: Dict[str, Any], kind: str + self, reflected_object: _ReflectedConstraint, kind: str ) -> Dict[str, Any]: options: Dict[str, Any] - options = reflected_options.get("dialect_options", {}).copy() + options = reflected_object.get("dialect_options", {}).copy() # type: ignore[attr-defined] # noqa: E501 if not options.get("postgresql_include"): options.pop("postgresql_include", None) return options @@ -585,7 +600,7 @@ def visit_identity_column( ) else: text += "SET %s " % compiler.get_identity_options( - sqla_compat.Identity(**{attr: getattr(identity, attr)}) + Identity(**{attr: getattr(identity, attr)}) ) return text @@ -845,5 +860,5 @@ def _render_potential_column( return render._render_potential_expr( value, autogen_context, - wrap_in_text=isinstance(value, (TextClause, FunctionElement)), + wrap_in_element=isinstance(value, (TextClause, FunctionElement)), ) diff --git a/libs/alembic/ddl/sqlite.py b/libs/alembic/ddl/sqlite.py index 762e8ca198..c260d53faa 100644 --- a/libs/alembic/ddl/sqlite.py +++ b/libs/alembic/ddl/sqlite.py @@ -11,11 +11,14 @@ from typing import Union from sqlalchemy import cast +from sqlalchemy import Computed from sqlalchemy import JSON from sqlalchemy import schema from sqlalchemy import sql from .base import alter_table +from .base import ColumnName +from .base import format_column_name from .base import format_table_name from .base import RenameTable from .impl import DefaultImpl @@ -62,7 +65,7 @@ def requires_recreate_in_batch( ) and isinstance(col.server_default.arg, sql.ClauseElement): return True elif ( - isinstance(col.server_default, util.sqla_compat.Computed) + isinstance(col.server_default, Computed) and col.server_default.persisted ): return True @@ -71,7 +74,7 @@ def requires_recreate_in_batch( else: return False - def add_constraint(self, const: Constraint): + def add_constraint(self, const: Constraint, **kw: Any): # attempt to distinguish between an # auto-gen constraint and an explicit one if const._create_rule is None: @@ -88,7 +91,7 @@ def add_constraint(self, const: Constraint): "SQLite migrations using a copy-and-move strategy." ) - def drop_constraint(self, const: Constraint): + def drop_constraint(self, const: Constraint, **kw: Any): if const._create_rule is None: raise NotImplementedError( "No support for ALTER of constraints in SQLite dialect. " @@ -207,6 +210,15 @@ def visit_rename_table( ) +@compiles(ColumnName, "sqlite") +def visit_column_name(element: ColumnName, compiler: DDLCompiler, **kw) -> str: + return "%s RENAME COLUMN %s TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + format_column_name(compiler, element.newname), + ) + + # @compiles(AddColumn, 'sqlite') # def visit_add_column(element, compiler, **kw): # return "%s %s" % ( diff --git a/libs/alembic/op.pyi b/libs/alembic/op.pyi index 920444696e..7fadaf5fe6 100644 --- a/libs/alembic/op.pyi +++ b/libs/alembic/op.pyi @@ -27,15 +27,13 @@ if TYPE_CHECKING: from sqlalchemy.sql.elements import conv from sqlalchemy.sql.elements import TextClause from sqlalchemy.sql.expression import TableClause - from sqlalchemy.sql.functions import Function from sqlalchemy.sql.schema import Column - from sqlalchemy.sql.schema import Computed - from sqlalchemy.sql.schema import Identity from sqlalchemy.sql.schema import SchemaItem from sqlalchemy.sql.schema import Table from sqlalchemy.sql.type_api import TypeEngine from sqlalchemy.util import immutabledict + from .ddl.base import _ServerDefaultType from .operations.base import BatchOperations from .operations.ops import AddColumnOp from .operations.ops import AddConstraintOp @@ -61,7 +59,13 @@ _C = TypeVar("_C", bound=Callable[..., Any]) ### end imports ### def add_column( - table_name: str, column: Column[Any], *, schema: Optional[str] = None + table_name: str, + column: Column[Any], + *, + schema: Optional[str] = None, + if_not_exists: Optional[bool] = None, + inline_references: Optional[bool] = None, + inline_primary_key: Optional[bool] = None, ) -> None: """Issue an "add column" instruction using the current migration context. @@ -76,36 +80,64 @@ def add_column( The :meth:`.Operations.add_column` method typically corresponds to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope of this command, the column's name, datatype, nullability, - and optional server-generated defaults may be indicated. + and optional server-generated defaults may be indicated. Options + also exist for control of single-column primary key and foreign key + constraints to be generated. .. note:: - With the exception of NOT NULL constraints or single-column FOREIGN - KEY constraints, other kinds of constraints such as PRIMARY KEY, - UNIQUE or CHECK constraints **cannot** be generated using this - method; for these constraints, refer to operations such as - :meth:`.Operations.create_primary_key` and - :meth:`.Operations.create_check_constraint`. In particular, the - following :class:`~sqlalchemy.schema.Column` parameters are - **ignored**: - - * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases - typically do not support an ALTER operation that can add - individual columns one at a time to an existing primary key - constraint, therefore it's less ambiguous to use the - :meth:`.Operations.create_primary_key` method, which assumes no - existing primary key constraint is present. + Not all contraint types may be indicated with this directive. + NOT NULL, FOREIGN KEY, and CHECK are honored, PRIMARY KEY + is conditionally honored, UNIQUE + is currently not. + + As of 1.18.2, the following :class:`~sqlalchemy.schema.Column` + parameters are **ignored**: + * :paramref:`~sqlalchemy.schema.Column.unique` - use the :meth:`.Operations.create_unique_constraint` method * :paramref:`~sqlalchemy.schema.Column.index` - use the :meth:`.Operations.create_index` method + **PRIMARY KEY support** + + The provided :class:`~sqlalchemy.schema.Column` object may include a + ``primary_key=True`` directive, indicating the column intends to be + part of a primary key constraint. However by default, the inline + "PRIMARY KEY" directive is not emitted, and it's assumed that a + separate :meth:`.Operations.create_primary_key` directive will be used + to create this constraint, which may potentially include other columns + as well as have an explicit name. To instead render an inline + "PRIMARY KEY" directive, the + :paramref:`.AddColumnOp.inline_primary_key` parameter may be indicated + at the same time as the ``primary_key`` parameter (both are needed):: + + from alembic import op + from sqlalchemy import Column, INTEGER + + op.add_column( + "organization", + Column("id", INTEGER, primary_key=True), + inline_primary_key=True + ) + + The ``primary_key=True`` parameter on + :class:`~sqlalchemy.schema.Column` also indicates behaviors such as + using the ``SERIAL`` datatype with the PostgreSQL database, which is + why two separate, independent parameters are provided to support all + combinations. + + .. versionadded:: 1.18.4 Added + :paramref:`.AddColumnOp.inline_primary_key` + to control use of the ``PRIMARY KEY`` inline directive. + + **FOREIGN KEY support** The provided :class:`~sqlalchemy.schema.Column` object may include a :class:`~sqlalchemy.schema.ForeignKey` constraint directive, - referencing a remote table name. For this specific type of constraint, - Alembic will automatically emit a second ALTER statement in order to - add the single-column FOREIGN KEY constraint separately:: + referencing a remote table name. By default, Alembic will automatically + emit a second ALTER statement in order to add the single-column FOREIGN + KEY constraint separately:: from alembic import op from sqlalchemy import Column, INTEGER, ForeignKey @@ -115,6 +147,22 @@ def add_column( Column("account_id", INTEGER, ForeignKey("accounts.id")), ) + To render the FOREIGN KEY constraint inline within the ADD COLUMN + directive, use the ``inline_references`` parameter. This can improve + performance on large tables since the constraint is marked as valid + immediately for nullable columns:: + + from alembic import op + from sqlalchemy import Column, INTEGER, ForeignKey + + op.add_column( + "organization", + Column("account_id", INTEGER, ForeignKey("accounts.id")), + inline_references=True, + ) + + **Indicating server side defaults** + The column argument passed to :meth:`.Operations.add_column` is a :class:`~sqlalchemy.schema.Column` construct, used in the same way it's used in SQLAlchemy. In particular, values or functions to be indicated @@ -138,6 +186,27 @@ def add_column( quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + :param if_not_exists: If True, adds ``IF NOT EXISTS`` operator + when creating the new column for compatible dialects + + .. versionadded:: 1.16.0 + + :param inline_references: If True, renders ``FOREIGN KEY`` constraints + inline within the ``ADD COLUMN`` directive using ``REFERENCES`` + syntax, rather than as a separate ``ALTER TABLE ADD CONSTRAINT`` + statement. This is supported by PostgreSQL, Oracle, MySQL 5.7+, and + MariaDB 10.5+. + + .. versionadded:: 1.18.2 + + :param inline_primary_key: If True, renders the ``PRIMARY KEY`` phrase + inline within the ``ADD COLUMN`` directive. When not present or + False, ``PRIMARY KEY`` is not emitted; it is assumed that the + migration script will include an additional + :meth:`.Operations.create_primary_key` directive to create a full + primary key constraint. + + .. versionadded:: 1.18.4 """ @@ -147,12 +216,12 @@ def alter_column( *, nullable: Optional[bool] = None, comment: Union[str, Literal[False], None] = False, - server_default: Any = False, + server_default: Union[_ServerDefaultType, None, Literal[False]] = False, new_column_name: Optional[str] = None, type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None, existing_type: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None, existing_server_default: Union[ - str, bool, Identity, Computed, None + _ServerDefaultType, None, Literal[False] ] = False, existing_nullable: Optional[bool] = None, existing_comment: Optional[str] = None, @@ -247,7 +316,7 @@ def batch_alter_table( table_name: str, schema: Optional[str] = None, recreate: Literal["auto", "always", "never"] = "auto", - partial_reordering: Optional[Tuple[Any, ...]] = None, + partial_reordering: list[tuple[str, ...]] | None = None, copy_from: Optional[Table] = None, table_args: Tuple[Any, ...] = (), table_kwargs: Mapping[str, Any] = immutabledict({}), @@ -650,7 +719,7 @@ def create_foreign_key( def create_index( index_name: Optional[str], table_name: str, - columns: Sequence[Union[str, TextClause, Function[Any]]], + columns: Sequence[Union[str, TextClause, ColumnElement[Any]]], *, schema: Optional[str] = None, unique: bool = False, @@ -926,6 +995,11 @@ def drop_column( quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + :param if_exists: If True, adds IF EXISTS operator when + dropping the new column for compatible dialects + + .. versionadded:: 1.16.0 + :param mssql_drop_check: Optional boolean. When ``True``, on Microsoft SQL Server only, first drop the CHECK constraint on the column using a @@ -947,7 +1021,6 @@ def drop_column( then exec's a separate DROP CONSTRAINT for that default. Only works if the column has exactly one FK constraint which refers to it, at the moment. - """ def drop_constraint( @@ -956,6 +1029,7 @@ def drop_constraint( type_: Optional[str] = None, *, schema: Optional[str] = None, + if_exists: Optional[bool] = None, ) -> None: r"""Drop a constraint of the given name, typically via DROP CONSTRAINT. @@ -967,6 +1041,10 @@ def drop_constraint( quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + :param if_exists: If True, adds IF EXISTS operator when + dropping the constraint + + .. versionadded:: 1.16.0 """ @@ -1166,7 +1244,7 @@ def f(name: str) -> conv: names will be converted along conventions. If the ``target_metadata`` contains the naming convention ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the - output of the following: + output of the following:: op.add_column("t", "x", Boolean(name="x")) @@ -1196,14 +1274,27 @@ def get_context() -> MigrationContext: """ -def implementation_for(op_cls: Any) -> Callable[[_C], _C]: +def implementation_for( + op_cls: Any, replace: bool = False +) -> Callable[[_C], _C]: """Register an implementation for a given :class:`.MigrateOperation`. + :param replace: when True, allows replacement of an already + registered implementation for the given operation class. This + enables customization of built-in operations such as + :class:`.CreateTableOp` by providing an alternate implementation + that can augment, modify, or conditionally invoke the default + behavior. + + .. versionadded:: 1.17.2 + This is part of the operation extensibility API. .. seealso:: - :ref:`operation_plugins` - example of use + :ref:`operation_plugins` + + :ref:`operations_extending_builtin` """ @@ -1270,7 +1361,7 @@ def invoke( BulkInsertOp, DropTableOp, ExecuteSQLOp, - ] + ], ) -> None: ... @overload def invoke(operation: MigrateOperation) -> Any: diff --git a/libs/alembic/operations/base.py b/libs/alembic/operations/base.py index 9b52fa6f29..b9e6107fba 100644 --- a/libs/alembic/operations/base.py +++ b/libs/alembic/operations/base.py @@ -27,13 +27,13 @@ from . import batch from . import schemaobj from .. import util +from ..ddl.base import _ServerDefaultType from ..util import sqla_compat from ..util.compat import formatannotation_fwdref from ..util.compat import inspect_formatargspec from ..util.compat import inspect_getfullargspec from ..util.sqla_compat import _literal_bindparam - if TYPE_CHECKING: from typing import Literal @@ -43,10 +43,7 @@ from sqlalchemy.sql.expression import ColumnElement from sqlalchemy.sql.expression import TableClause from sqlalchemy.sql.expression import TextClause - from sqlalchemy.sql.functions import Function from sqlalchemy.sql.schema import Column - from sqlalchemy.sql.schema import Computed - from sqlalchemy.sql.schema import Identity from sqlalchemy.sql.schema import SchemaItem from sqlalchemy.types import TypeEngine @@ -203,19 +200,32 @@ def %(name)s%(args)s: return register @classmethod - def implementation_for(cls, op_cls: Any) -> Callable[[_C], _C]: + def implementation_for( + cls, op_cls: Any, replace: bool = False + ) -> Callable[[_C], _C]: """Register an implementation for a given :class:`.MigrateOperation`. + :param replace: when True, allows replacement of an already + registered implementation for the given operation class. This + enables customization of built-in operations such as + :class:`.CreateTableOp` by providing an alternate implementation + that can augment, modify, or conditionally invoke the default + behavior. + + .. versionadded:: 1.17.2 + This is part of the operation extensibility API. .. seealso:: - :ref:`operation_plugins` - example of use + :ref:`operation_plugins` + + :ref:`operations_extending_builtin` """ def decorate(fn: _C) -> _C: - cls._to_impl.dispatch_for(op_cls)(fn) + cls._to_impl.dispatch_for(op_cls, replace=replace)(fn) return fn return decorate @@ -236,7 +246,7 @@ def batch_alter_table( table_name: str, schema: Optional[str] = None, recreate: Literal["auto", "always", "never"] = "auto", - partial_reordering: Optional[Tuple[Any, ...]] = None, + partial_reordering: list[tuple[str, ...]] | None = None, copy_from: Optional[Table] = None, table_args: Tuple[Any, ...] = (), table_kwargs: Mapping[str, Any] = util.immutabledict(), @@ -465,7 +475,7 @@ def f(self, name: str) -> conv: names will be converted along conventions. If the ``target_metadata`` contains the naming convention ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the - output of the following: + output of the following:: op.add_column("t", "x", Boolean(name="x")) @@ -619,6 +629,9 @@ def add_column( column: Column[Any], *, schema: Optional[str] = None, + if_not_exists: Optional[bool] = None, + inline_references: Optional[bool] = None, + inline_primary_key: Optional[bool] = None, ) -> None: """Issue an "add column" instruction using the current migration context. @@ -633,36 +646,77 @@ def add_column( The :meth:`.Operations.add_column` method typically corresponds to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope of this command, the column's name, datatype, nullability, - and optional server-generated defaults may be indicated. + and optional server-generated defaults may be indicated. Options + also exist for control of single-column primary key and foreign key + constraints to be generated. .. note:: - With the exception of NOT NULL constraints or single-column FOREIGN - KEY constraints, other kinds of constraints such as PRIMARY KEY, - UNIQUE or CHECK constraints **cannot** be generated using this - method; for these constraints, refer to operations such as - :meth:`.Operations.create_primary_key` and - :meth:`.Operations.create_check_constraint`. In particular, the - following :class:`~sqlalchemy.schema.Column` parameters are - **ignored**: - - * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases - typically do not support an ALTER operation that can add - individual columns one at a time to an existing primary key - constraint, therefore it's less ambiguous to use the - :meth:`.Operations.create_primary_key` method, which assumes no - existing primary key constraint is present. + Not all contraint types may be indicated with this directive. + NOT NULL, FOREIGN KEY, and CHECK are honored, PRIMARY KEY + is conditionally honored, UNIQUE + is currently not. + + As of 1.18.2, the following :class:`~sqlalchemy.schema.Column` + parameters are **ignored**: + * :paramref:`~sqlalchemy.schema.Column.unique` - use the :meth:`.Operations.create_unique_constraint` method * :paramref:`~sqlalchemy.schema.Column.index` - use the :meth:`.Operations.create_index` method + **PRIMARY KEY support** + + The provided :class:`~sqlalchemy.schema.Column` object may include a + ``primary_key=True`` directive, indicating the column intends to be + part of a primary key constraint. However by default, the inline + "PRIMARY KEY" directive is not emitted, and it's assumed that a + separate :meth:`.Operations.create_primary_key` directive will be used + to create this constraint, which may potentially include other columns + as well as have an explicit name. To instead render an inline + "PRIMARY KEY" directive, the + :paramref:`.AddColumnOp.inline_primary_key` parameter may be indicated + at the same time as the ``primary_key`` parameter (both are needed):: + + from alembic import op + from sqlalchemy import Column, INTEGER + + op.add_column( + "organization", + Column("id", INTEGER, primary_key=True), + inline_primary_key=True + ) + + The ``primary_key=True`` parameter on + :class:`~sqlalchemy.schema.Column` also indicates behaviors such as + using the ``SERIAL`` datatype with the PostgreSQL database, which is + why two separate, independent parameters are provided to support all + combinations. + + .. versionadded:: 1.18.4 Added + :paramref:`.AddColumnOp.inline_primary_key` + to control use of the ``PRIMARY KEY`` inline directive. + + **FOREIGN KEY support** The provided :class:`~sqlalchemy.schema.Column` object may include a :class:`~sqlalchemy.schema.ForeignKey` constraint directive, - referencing a remote table name. For this specific type of constraint, - Alembic will automatically emit a second ALTER statement in order to - add the single-column FOREIGN KEY constraint separately:: + referencing a remote table name. By default, Alembic will automatically + emit a second ALTER statement in order to add the single-column FOREIGN + KEY constraint separately:: + + from alembic import op + from sqlalchemy import Column, INTEGER, ForeignKey + + op.add_column( + "organization", + Column("account_id", INTEGER, ForeignKey("accounts.id")), + ) + + To render the FOREIGN KEY constraint inline within the ADD COLUMN + directive, use the ``inline_references`` parameter. This can improve + performance on large tables since the constraint is marked as valid + immediately for nullable columns:: from alembic import op from sqlalchemy import Column, INTEGER, ForeignKey @@ -670,8 +724,11 @@ def add_column( op.add_column( "organization", Column("account_id", INTEGER, ForeignKey("accounts.id")), + inline_references=True, ) + **Indicating server side defaults** + The column argument passed to :meth:`.Operations.add_column` is a :class:`~sqlalchemy.schema.Column` construct, used in the same way it's used in SQLAlchemy. In particular, values or functions to be indicated @@ -695,6 +752,27 @@ def add_column( quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + :param if_not_exists: If True, adds ``IF NOT EXISTS`` operator + when creating the new column for compatible dialects + + .. versionadded:: 1.16.0 + + :param inline_references: If True, renders ``FOREIGN KEY`` constraints + inline within the ``ADD COLUMN`` directive using ``REFERENCES`` + syntax, rather than as a separate ``ALTER TABLE ADD CONSTRAINT`` + statement. This is supported by PostgreSQL, Oracle, MySQL 5.7+, and + MariaDB 10.5+. + + .. versionadded:: 1.18.2 + + :param inline_primary_key: If True, renders the ``PRIMARY KEY`` phrase + inline within the ``ADD COLUMN`` directive. When not present or + False, ``PRIMARY KEY`` is not emitted; it is assumed that the + migration script will include an additional + :meth:`.Operations.create_primary_key` directive to create a full + primary key constraint. + + .. versionadded:: 1.18.4 """ # noqa: E501 ... @@ -706,14 +784,16 @@ def alter_column( *, nullable: Optional[bool] = None, comment: Union[str, Literal[False], None] = False, - server_default: Any = False, + server_default: Union[ + _ServerDefaultType, None, Literal[False] + ] = False, new_column_name: Optional[str] = None, type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None, existing_type: Union[ TypeEngine[Any], Type[TypeEngine[Any]], None ] = None, existing_server_default: Union[ - str, bool, Identity, Computed, None + _ServerDefaultType, None, Literal[False] ] = False, existing_nullable: Optional[bool] = None, existing_comment: Optional[str] = None, @@ -1074,7 +1154,7 @@ def create_index( self, index_name: Optional[str], table_name: str, - columns: Sequence[Union[str, TextClause, Function[Any]]], + columns: Sequence[Union[str, TextClause, ColumnElement[Any]]], *, schema: Optional[str] = None, unique: bool = False, @@ -1360,6 +1440,11 @@ def drop_column( quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + :param if_exists: If True, adds IF EXISTS operator when + dropping the new column for compatible dialects + + .. versionadded:: 1.16.0 + :param mssql_drop_check: Optional boolean. When ``True``, on Microsoft SQL Server only, first drop the CHECK constraint on the column using a @@ -1381,7 +1466,6 @@ def drop_column( then exec's a separate DROP CONSTRAINT for that default. Only works if the column has exactly one FK constraint which refers to it, at the moment. - """ # noqa: E501 ... @@ -1392,6 +1476,7 @@ def drop_constraint( type_: Optional[str] = None, *, schema: Optional[str] = None, + if_exists: Optional[bool] = None, ) -> None: r"""Drop a constraint of the given name, typically via DROP CONSTRAINT. @@ -1403,6 +1488,10 @@ def drop_constraint( quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + :param if_exists: If True, adds IF EXISTS operator when + dropping the constraint + + .. versionadded:: 1.16.0 """ # noqa: E501 ... @@ -1645,6 +1734,9 @@ def add_column( *, insert_before: Optional[str] = None, insert_after: Optional[str] = None, + if_not_exists: Optional[bool] = None, + inline_references: Optional[bool] = None, + inline_primary_key: Optional[bool] = None, ) -> None: """Issue an "add column" instruction using the current batch migration context. @@ -1662,14 +1754,16 @@ def alter_column( *, nullable: Optional[bool] = None, comment: Union[str, Literal[False], None] = False, - server_default: Any = False, + server_default: Union[ + _ServerDefaultType, None, Literal[False] + ] = False, new_column_name: Optional[str] = None, type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None, existing_type: Union[ TypeEngine[Any], Type[TypeEngine[Any]], None ] = None, existing_server_default: Union[ - str, bool, Identity, Computed, None + _ServerDefaultType, None, Literal[False] ] = False, existing_nullable: Optional[bool] = None, existing_comment: Optional[str] = None, diff --git a/libs/alembic/operations/batch.py b/libs/alembic/operations/batch.py index fd7ab99030..9b48be5986 100644 --- a/libs/alembic/operations/batch.py +++ b/libs/alembic/operations/batch.py @@ -18,6 +18,7 @@ from sqlalchemy import MetaData from sqlalchemy import PrimaryKeyConstraint from sqlalchemy import schema as sql_schema +from sqlalchemy import select from sqlalchemy import Table from sqlalchemy import types as sqltypes from sqlalchemy.sql.schema import SchemaEventTarget @@ -31,11 +32,9 @@ from ..util.sqla_compat import _ensure_scope_for_ddl from ..util.sqla_compat import _fk_is_self_referential from ..util.sqla_compat import _idx_table_bound_expressions -from ..util.sqla_compat import _insert_inline from ..util.sqla_compat import _is_type_bound from ..util.sqla_compat import _remove_column_from_collection from ..util.sqla_compat import _resolve_for_variant -from ..util.sqla_compat import _select from ..util.sqla_compat import constraint_name_defined from ..util.sqla_compat import constraint_name_string @@ -45,10 +44,10 @@ from sqlalchemy.engine import Dialect from sqlalchemy.sql.elements import ColumnClause from sqlalchemy.sql.elements import quoted_name - from sqlalchemy.sql.functions import Function from sqlalchemy.sql.schema import Constraint from sqlalchemy.sql.type_api import TypeEngine + from ..ddl.base import _ServerDefaultType from ..ddl.impl import DefaultImpl @@ -449,13 +448,15 @@ def _create(self, op_impl: DefaultImpl) -> None: try: op_impl._exec( - _insert_inline(self.new_table).from_select( + self.new_table.insert() + .inline() + .from_select( list( k for k, transfer in self.column_transfers.items() if "expr" in transfer ), - _select( + select( *[ transfer["expr"] for transfer in self.column_transfers.values() @@ -484,7 +485,9 @@ def alter_column( table_name: str, column_name: str, nullable: Optional[bool] = None, - server_default: Optional[Union[Function[Any], str, bool]] = False, + server_default: Union[ + _ServerDefaultType, None, Literal[False] + ] = False, name: Optional[str] = None, type_: Optional[TypeEngine] = None, autoincrement: Optional[Union[bool, Literal["auto"]]] = None, diff --git a/libs/alembic/operations/ops.py b/libs/alembic/operations/ops.py index 60b856a8f7..7eda50d672 100644 --- a/libs/alembic/operations/ops.py +++ b/libs/alembic/operations/ops.py @@ -1,6 +1,8 @@ from __future__ import annotations from abc import abstractmethod +import os +import pathlib import re from typing import Any from typing import Callable @@ -35,13 +37,10 @@ from sqlalchemy.sql.elements import conv from sqlalchemy.sql.elements import quoted_name from sqlalchemy.sql.elements import TextClause - from sqlalchemy.sql.functions import Function from sqlalchemy.sql.schema import CheckConstraint from sqlalchemy.sql.schema import Column - from sqlalchemy.sql.schema import Computed from sqlalchemy.sql.schema import Constraint from sqlalchemy.sql.schema import ForeignKeyConstraint - from sqlalchemy.sql.schema import Identity from sqlalchemy.sql.schema import Index from sqlalchemy.sql.schema import MetaData from sqlalchemy.sql.schema import PrimaryKeyConstraint @@ -52,6 +51,7 @@ from sqlalchemy.sql.type_api import TypeEngine from ..autogenerate.rewriter import Rewriter + from ..ddl.base import _ServerDefaultType from ..runtime.migration import MigrationContext from ..script.revision import _RevIdType @@ -141,12 +141,14 @@ def __init__( type_: Optional[str] = None, *, schema: Optional[str] = None, + if_exists: Optional[bool] = None, _reverse: Optional[AddConstraintOp] = None, ) -> None: self.constraint_name = constraint_name self.table_name = table_name self.constraint_type = type_ self.schema = schema + self.if_exists = if_exists self._reverse = _reverse def reverse(self) -> AddConstraintOp: @@ -204,6 +206,7 @@ def drop_constraint( type_: Optional[str] = None, *, schema: Optional[str] = None, + if_exists: Optional[bool] = None, ) -> None: r"""Drop a constraint of the given name, typically via DROP CONSTRAINT. @@ -215,10 +218,20 @@ def drop_constraint( quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + :param if_exists: If True, adds IF EXISTS operator when + dropping the constraint + + .. versionadded:: 1.16.0 """ - op = cls(constraint_name, table_name, type_=type_, schema=schema) + op = cls( + constraint_name, + table_name, + type_=type_, + schema=schema, + if_exists=if_exists, + ) return operations.invoke(op) @classmethod @@ -933,7 +946,7 @@ def create_index( operations: Operations, index_name: Optional[str], table_name: str, - columns: Sequence[Union[str, TextClause, Function[Any]]], + columns: Sequence[Union[str, TextClause, ColumnElement[Any]]], *, schema: Optional[str] = None, unique: bool = False, @@ -1682,7 +1695,9 @@ def __init__( *, schema: Optional[str] = None, existing_type: Optional[Any] = None, - existing_server_default: Any = False, + existing_server_default: Union[ + _ServerDefaultType, None, Literal[False] + ] = False, existing_nullable: Optional[bool] = None, existing_comment: Optional[str] = None, modify_nullable: Optional[bool] = None, @@ -1841,14 +1856,16 @@ def alter_column( *, nullable: Optional[bool] = None, comment: Optional[Union[str, Literal[False]]] = False, - server_default: Any = False, + server_default: Union[ + _ServerDefaultType, None, Literal[False] + ] = False, new_column_name: Optional[str] = None, type_: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = None, existing_type: Optional[ Union[TypeEngine[Any], Type[TypeEngine[Any]]] ] = None, - existing_server_default: Optional[ - Union[str, bool, Identity, Computed] + existing_server_default: Union[ + _ServerDefaultType, None, Literal[False] ] = False, existing_nullable: Optional[bool] = None, existing_comment: Optional[str] = None, @@ -1964,14 +1981,16 @@ def batch_alter_column( *, nullable: Optional[bool] = None, comment: Optional[Union[str, Literal[False]]] = False, - server_default: Any = False, + server_default: Union[ + _ServerDefaultType, None, Literal[False] + ] = False, new_column_name: Optional[str] = None, type_: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = None, existing_type: Optional[ Union[TypeEngine[Any], Type[TypeEngine[Any]]] ] = None, - existing_server_default: Optional[ - Union[str, bool, Identity, Computed] + existing_server_default: Union[ + _ServerDefaultType, None, Literal[False] ] = False, existing_nullable: Optional[bool] = None, existing_comment: Optional[str] = None, @@ -2033,16 +2052,24 @@ def __init__( column: Column[Any], *, schema: Optional[str] = None, + if_not_exists: Optional[bool] = None, + inline_references: Optional[bool] = None, + inline_primary_key: Optional[bool] = None, **kw: Any, ) -> None: super().__init__(table_name, schema=schema) self.column = column + self.if_not_exists = if_not_exists + self.inline_references = inline_references + self.inline_primary_key = inline_primary_key self.kw = kw def reverse(self) -> DropColumnOp: - return DropColumnOp.from_column_and_tablename( + op = DropColumnOp.from_column_and_tablename( self.schema, self.table_name, self.column ) + op.if_exists = self.if_not_exists + return op def to_diff_tuple( self, @@ -2073,6 +2100,9 @@ def add_column( column: Column[Any], *, schema: Optional[str] = None, + if_not_exists: Optional[bool] = None, + inline_references: Optional[bool] = None, + inline_primary_key: Optional[bool] = None, ) -> None: """Issue an "add column" instruction using the current migration context. @@ -2087,36 +2117,77 @@ def add_column( The :meth:`.Operations.add_column` method typically corresponds to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope of this command, the column's name, datatype, nullability, - and optional server-generated defaults may be indicated. + and optional server-generated defaults may be indicated. Options + also exist for control of single-column primary key and foreign key + constraints to be generated. .. note:: - With the exception of NOT NULL constraints or single-column FOREIGN - KEY constraints, other kinds of constraints such as PRIMARY KEY, - UNIQUE or CHECK constraints **cannot** be generated using this - method; for these constraints, refer to operations such as - :meth:`.Operations.create_primary_key` and - :meth:`.Operations.create_check_constraint`. In particular, the - following :class:`~sqlalchemy.schema.Column` parameters are - **ignored**: - - * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases - typically do not support an ALTER operation that can add - individual columns one at a time to an existing primary key - constraint, therefore it's less ambiguous to use the - :meth:`.Operations.create_primary_key` method, which assumes no - existing primary key constraint is present. + Not all contraint types may be indicated with this directive. + NOT NULL, FOREIGN KEY, and CHECK are honored, PRIMARY KEY + is conditionally honored, UNIQUE + is currently not. + + As of 1.18.2, the following :class:`~sqlalchemy.schema.Column` + parameters are **ignored**: + * :paramref:`~sqlalchemy.schema.Column.unique` - use the :meth:`.Operations.create_unique_constraint` method * :paramref:`~sqlalchemy.schema.Column.index` - use the :meth:`.Operations.create_index` method + **PRIMARY KEY support** + + The provided :class:`~sqlalchemy.schema.Column` object may include a + ``primary_key=True`` directive, indicating the column intends to be + part of a primary key constraint. However by default, the inline + "PRIMARY KEY" directive is not emitted, and it's assumed that a + separate :meth:`.Operations.create_primary_key` directive will be used + to create this constraint, which may potentially include other columns + as well as have an explicit name. To instead render an inline + "PRIMARY KEY" directive, the + :paramref:`.AddColumnOp.inline_primary_key` parameter may be indicated + at the same time as the ``primary_key`` parameter (both are needed):: + + from alembic import op + from sqlalchemy import Column, INTEGER + + op.add_column( + "organization", + Column("id", INTEGER, primary_key=True), + inline_primary_key=True + ) + + The ``primary_key=True`` parameter on + :class:`~sqlalchemy.schema.Column` also indicates behaviors such as + using the ``SERIAL`` datatype with the PostgreSQL database, which is + why two separate, independent parameters are provided to support all + combinations. + + .. versionadded:: 1.18.4 Added + :paramref:`.AddColumnOp.inline_primary_key` + to control use of the ``PRIMARY KEY`` inline directive. + + **FOREIGN KEY support** The provided :class:`~sqlalchemy.schema.Column` object may include a :class:`~sqlalchemy.schema.ForeignKey` constraint directive, - referencing a remote table name. For this specific type of constraint, - Alembic will automatically emit a second ALTER statement in order to - add the single-column FOREIGN KEY constraint separately:: + referencing a remote table name. By default, Alembic will automatically + emit a second ALTER statement in order to add the single-column FOREIGN + KEY constraint separately:: + + from alembic import op + from sqlalchemy import Column, INTEGER, ForeignKey + + op.add_column( + "organization", + Column("account_id", INTEGER, ForeignKey("accounts.id")), + ) + + To render the FOREIGN KEY constraint inline within the ADD COLUMN + directive, use the ``inline_references`` parameter. This can improve + performance on large tables since the constraint is marked as valid + immediately for nullable columns:: from alembic import op from sqlalchemy import Column, INTEGER, ForeignKey @@ -2124,8 +2195,11 @@ def add_column( op.add_column( "organization", Column("account_id", INTEGER, ForeignKey("accounts.id")), + inline_references=True, ) + **Indicating server side defaults** + The column argument passed to :meth:`.Operations.add_column` is a :class:`~sqlalchemy.schema.Column` construct, used in the same way it's used in SQLAlchemy. In particular, values or functions to be indicated @@ -2149,10 +2223,38 @@ def add_column( quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + :param if_not_exists: If True, adds ``IF NOT EXISTS`` operator + when creating the new column for compatible dialects + + .. versionadded:: 1.16.0 + + :param inline_references: If True, renders ``FOREIGN KEY`` constraints + inline within the ``ADD COLUMN`` directive using ``REFERENCES`` + syntax, rather than as a separate ``ALTER TABLE ADD CONSTRAINT`` + statement. This is supported by PostgreSQL, Oracle, MySQL 5.7+, and + MariaDB 10.5+. + + .. versionadded:: 1.18.2 + + :param inline_primary_key: If True, renders the ``PRIMARY KEY`` phrase + inline within the ``ADD COLUMN`` directive. When not present or + False, ``PRIMARY KEY`` is not emitted; it is assumed that the + migration script will include an additional + :meth:`.Operations.create_primary_key` directive to create a full + primary key constraint. + + .. versionadded:: 1.18.4 """ - op = cls(table_name, column, schema=schema) + op = cls( + table_name, + column, + schema=schema, + if_not_exists=if_not_exists, + inline_references=inline_references, + inline_primary_key=inline_primary_key, + ) return operations.invoke(op) @classmethod @@ -2163,6 +2265,9 @@ def batch_add_column( *, insert_before: Optional[str] = None, insert_after: Optional[str] = None, + if_not_exists: Optional[bool] = None, + inline_references: Optional[bool] = None, + inline_primary_key: Optional[bool] = None, ) -> None: """Issue an "add column" instruction using the current batch migration context. @@ -2183,6 +2288,9 @@ def batch_add_column( operations.impl.table_name, column, schema=operations.impl.schema, + if_not_exists=if_not_exists, + inline_references=inline_references, + inline_primary_key=inline_primary_key, **kw, ) return operations.invoke(op) @@ -2199,12 +2307,14 @@ def __init__( column_name: str, *, schema: Optional[str] = None, + if_exists: Optional[bool] = None, _reverse: Optional[AddColumnOp] = None, **kw: Any, ) -> None: super().__init__(table_name, schema=schema) self.column_name = column_name self.kw = kw + self.if_exists = if_exists self._reverse = _reverse def to_diff_tuple( @@ -2224,9 +2334,11 @@ def reverse(self) -> AddColumnOp: "original column is not present" ) - return AddColumnOp.from_column_and_tablename( + op = AddColumnOp.from_column_and_tablename( self.schema, self.table_name, self._reverse.column ) + op.if_not_exists = self.if_exists + return op @classmethod def from_column_and_tablename( @@ -2273,6 +2385,11 @@ def drop_column( quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + :param if_exists: If True, adds IF EXISTS operator when + dropping the new column for compatible dialects + + .. versionadded:: 1.16.0 + :param mssql_drop_check: Optional boolean. When ``True``, on Microsoft SQL Server only, first drop the CHECK constraint on the column using a @@ -2294,7 +2411,6 @@ def drop_column( then exec's a separate DROP CONSTRAINT for that default. Only works if the column has exactly one FK constraint which refers to it, at the moment. - """ op = cls(table_name, column_name, schema=schema, **kw) @@ -2709,7 +2825,7 @@ def __init__( head: Optional[str] = None, splice: Optional[bool] = None, branch_label: Optional[_RevIdType] = None, - version_path: Optional[str] = None, + version_path: Union[str, os.PathLike[str], None] = None, depends_on: Optional[_RevIdType] = None, ) -> None: self.rev_id = rev_id @@ -2718,7 +2834,9 @@ def __init__( self.head = head self.splice = splice self.branch_label = branch_label - self.version_path = version_path + self.version_path = ( + pathlib.Path(version_path).as_posix() if version_path else None + ) self.depends_on = depends_on self.upgrade_ops = upgrade_ops self.downgrade_ops = downgrade_ops diff --git a/libs/alembic/operations/toimpl.py b/libs/alembic/operations/toimpl.py index 4b960049c7..85b9d8a324 100644 --- a/libs/alembic/operations/toimpl.py +++ b/libs/alembic/operations/toimpl.py @@ -8,7 +8,7 @@ from . import ops from .base import Operations from ..util.sqla_compat import _copy -from ..util.sqla_compat import sqla_14 +from ..util.sqla_compat import sqla_2 if TYPE_CHECKING: from sqlalchemy.sql.schema import Table @@ -50,6 +50,11 @@ def _count_constraint(constraint): if _count_constraint(constraint): operations.impl.drop_constraint(constraint) + # some weird pyright quirk here, these have Literal[False] + # in their types, not sure why pyright thinks they could be True + assert existing_server_default is not True # type: ignore[comparison-overlap] # noqa: E501 + assert comment is not True # type: ignore[comparison-overlap] + operations.impl.alter_column( table_name, column_name, @@ -81,9 +86,6 @@ def _count_constraint(constraint): def drop_table(operations: "Operations", operation: "ops.DropTableOp") -> None: kw = {} if operation.if_exists is not None: - if not sqla_14: - raise NotImplementedError("SQLAlchemy 1.4+ required") - kw["if_exists"] = operation.if_exists operations.impl.drop_table( operation.to_table(operations.migration_context), **kw @@ -96,7 +98,11 @@ def drop_column( ) -> None: column = operation.to_column(operations.migration_context) operations.impl.drop_column( - operation.table_name, column, schema=operation.schema, **operation.kw + operation.table_name, + column, + schema=operation.schema, + if_exists=operation.if_exists, + **operation.kw, ) @@ -107,9 +113,6 @@ def create_index( idx = operation.to_index(operations.migration_context) kw = {} if operation.if_not_exists is not None: - if not sqla_14: - raise NotImplementedError("SQLAlchemy 1.4+ required") - kw["if_not_exists"] = operation.if_not_exists operations.impl.create_index(idx, **kw) @@ -118,9 +121,6 @@ def create_index( def drop_index(operations: "Operations", operation: "ops.DropIndexOp") -> None: kw = {} if operation.if_exists is not None: - if not sqla_14: - raise NotImplementedError("SQLAlchemy 1.4+ required") - kw["if_exists"] = operation.if_exists operations.impl.drop_index( @@ -135,9 +135,6 @@ def create_table( ) -> "Table": kw = {} if operation.if_not_exists is not None: - if not sqla_14: - raise NotImplementedError("SQLAlchemy 1.4+ required") - kw["if_not_exists"] = operation.if_not_exists table = operation.to_table(operations.migration_context) operations.impl.create_table(table, **kw) @@ -175,15 +172,35 @@ def add_column(operations: "Operations", operation: "ops.AddColumnOp") -> None: column = operation.column schema = operation.schema kw = operation.kw + inline_references = operation.inline_references + inline_primary_key = operation.inline_primary_key if column.table is not None: column = _copy(column) t = operations.schema_obj.table(table_name, column, schema=schema) - operations.impl.add_column(table_name, column, schema=schema, **kw) + operations.impl.add_column( + table_name, + column, + schema=schema, + if_not_exists=operation.if_not_exists, + inline_references=inline_references, + inline_primary_key=inline_primary_key, + **kw, + ) for constraint in t.constraints: if not isinstance(constraint, sa_schema.PrimaryKeyConstraint): + # Skip ForeignKeyConstraint if it was rendered inline + # This only happens when inline_references=True AND there's exactly + # one FK AND the constraint is single-column + if ( + inline_references + and isinstance(constraint, sa_schema.ForeignKeyConstraint) + and len(column.foreign_keys) == 1 + and len(constraint.columns) == 1 + ): + continue operations.impl.add_constraint(constraint) for index in t.indexes: operations.impl.create_index(index) @@ -210,13 +227,19 @@ def create_constraint( def drop_constraint( operations: "Operations", operation: "ops.DropConstraintOp" ) -> None: + kw = {} + if operation.if_exists is not None: + if not sqla_2: + raise NotImplementedError("SQLAlchemy 2.0 required") + kw["if_exists"] = operation.if_exists operations.impl.drop_constraint( operations.schema_obj.generic_constraint( operation.constraint_name, operation.table_name, operation.constraint_type, schema=operation.schema, - ) + ), + **kw, ) diff --git a/libs/alembic/runtime/environment.py b/libs/alembic/runtime/environment.py index a30972ec91..5817e2d9fd 100644 --- a/libs/alembic/runtime/environment.py +++ b/libs/alembic/runtime/environment.py @@ -3,7 +3,6 @@ from typing import Any from typing import Callable from typing import Collection -from typing import ContextManager from typing import Dict from typing import List from typing import Mapping @@ -18,6 +17,7 @@ from sqlalchemy.sql.schema import Column from sqlalchemy.sql.schema import FetchedValue +from typing_extensions import ContextManager from typing_extensions import Literal from .migration import _ProxyTransaction @@ -338,7 +338,7 @@ def get_tag_argument(self) -> Optional[str]: line. """ - return self.context_opts.get("tag", None) # type: ignore[no-any-return] # noqa: E501 + return self.context_opts.get("tag", None) @overload def get_x_argument(self, as_dictionary: Literal[False]) -> List[str]: ... @@ -441,6 +441,7 @@ def configure( sqlalchemy_module_prefix: str = "sa.", user_module_prefix: Optional[str] = None, on_version_apply: Optional[OnVersionApplyFn] = None, + autogenerate_plugins: Sequence[str] | None = None, **kw: Any, ) -> None: """Configure a :class:`.MigrationContext` within this @@ -860,6 +861,25 @@ def process_revision_directives(context, revision, directives): :paramref:`.command.revision.process_revision_directives` + :param autogenerate_plugins: A list of string names of "plugins" that + should participate in this autogenerate run. Defaults to the list + ``["alembic.autogenerate.*"]``, which indicates that Alembic's default + autogeneration plugins will be used. + + See the section :ref:`plugins_autogenerate` for complete background + on how to use this parameter. + + .. versionadded:: 1.18.0 Added a new plugin system for autogenerate + compare directives. + + .. seealso:: + + :ref:`plugins_autogenerate` - background on enabling/disabling + autogenerate plugins + + :ref:`alembic.plugins.toplevel` - Introduction and documentation + to the plugin system + Parameters specific to individual backends: :param mssql_batch_separator: The "batch separator" which will @@ -903,6 +923,9 @@ def process_revision_directives(context, revision, directives): opts["process_revision_directives"] = process_revision_directives opts["on_version_apply"] = util.to_tuple(on_version_apply, default=()) + if autogenerate_plugins is not None: + opts["autogenerate_plugins"] = autogenerate_plugins + if render_item is not None: opts["render_item"] = render_item opts["compare_type"] = compare_type @@ -976,7 +999,7 @@ def static_output(self, text: str) -> None: def begin_transaction( self, - ) -> Union[_ProxyTransaction, ContextManager[None]]: + ) -> Union[_ProxyTransaction, ContextManager[None, Optional[bool]]]: """Return a context manager that will enclose an operation within a "transaction", as defined by the environment's offline diff --git a/libs/alembic/runtime/migration.py b/libs/alembic/runtime/migration.py index 28f01c3b30..3fccf22a67 100644 --- a/libs/alembic/runtime/migration.py +++ b/libs/alembic/runtime/migration.py @@ -11,7 +11,6 @@ from typing import Callable from typing import cast from typing import Collection -from typing import ContextManager from typing import Dict from typing import Iterable from typing import Iterator @@ -22,17 +21,17 @@ from typing import TYPE_CHECKING from typing import Union -from sqlalchemy import Column from sqlalchemy import literal_column +from sqlalchemy import select from sqlalchemy.engine import Engine from sqlalchemy.engine import url as sqla_url from sqlalchemy.engine.strategies import MockEngineStrategy +from typing_extensions import ContextManager from .. import ddl from .. import util from ..util import sqla_compat from ..util.compat import EncodedIO -from ..util.sqla_compat import _select if TYPE_CHECKING: from sqlalchemy.engine import Dialect @@ -175,7 +174,11 @@ def __init__( opts["output_encoding"], ) else: - self.output_buffer = opts.get("output_buffer", sys.stdout) + self.output_buffer = opts.get( + "output_buffer", sys.stdout + ) # type:ignore[assignment] # noqa: E501 + + self.transactional_ddl = transactional_ddl self._user_compare_type = opts.get("compare_type", True) self._user_compare_server_default = opts.get( @@ -368,7 +371,7 @@ def upgrade(): def begin_transaction( self, _per_migration: bool = False - ) -> Union[_ProxyTransaction, ContextManager[None]]: + ) -> Union[_ProxyTransaction, ContextManager[None, Optional[bool]]]: """Begin a logical transaction for migration operations. This method is used within an ``env.py`` script to demarcate where @@ -534,7 +537,7 @@ def get_current_heads(self) -> Tuple[str, ...]: return tuple( row[0] for row in self.connection.execute( - _select(self._version.c.version_num) + select(self._version.c.version_num) ) ) @@ -702,54 +705,6 @@ def config(self) -> Optional[Config]: else: return None - def _compare_type( - self, inspector_column: Column[Any], metadata_column: Column - ) -> bool: - if self._user_compare_type is False: - return False - - if callable(self._user_compare_type): - user_value = self._user_compare_type( - self, - inspector_column, - metadata_column, - inspector_column.type, - metadata_column.type, - ) - if user_value is not None: - return user_value - - return self.impl.compare_type(inspector_column, metadata_column) - - def _compare_server_default( - self, - inspector_column: Column[Any], - metadata_column: Column[Any], - rendered_metadata_default: Optional[str], - rendered_column_default: Optional[str], - ) -> bool: - if self._user_compare_server_default is False: - return False - - if callable(self._user_compare_server_default): - user_value = self._user_compare_server_default( - self, - inspector_column, - metadata_column, - rendered_column_default, - metadata_column.server_default, - rendered_metadata_default, - ) - if user_value is not None: - return user_value - - return self.impl.compare_server_default( - inspector_column, - metadata_column, - rendered_metadata_default, - rendered_column_default, - ) - class HeadMaintainer: def __init__(self, context: MigrationContext, heads: Any) -> None: diff --git a/libs/alembic/runtime/plugins.py b/libs/alembic/runtime/plugins.py new file mode 100644 index 0000000000..be1d590f55 --- /dev/null +++ b/libs/alembic/runtime/plugins.py @@ -0,0 +1,179 @@ +from __future__ import annotations + +from importlib import metadata +import logging +import re +from types import ModuleType +from typing import Callable +from typing import Pattern +from typing import TYPE_CHECKING + +from .. import util +from ..util import DispatchPriority +from ..util import PriorityDispatcher + +if TYPE_CHECKING: + from ..util import PriorityDispatchResult + +_all_plugins = {} + + +log = logging.getLogger(__name__) + + +class Plugin: + """Describe a series of functions that are pulled in as a plugin. + + This is initially to provide for portable lists of autogenerate + comparison functions, however the setup for a plugin can run any + other kinds of global registration as well. + + .. versionadded:: 1.18.0 + + """ + + def __init__(self, name: str): + self.name = name + log.info("setup plugin %s", name) + if name in _all_plugins: + raise ValueError(f"A plugin named {name} is already registered") + _all_plugins[name] = self + self.autogenerate_comparators = PriorityDispatcher() + + def remove(self) -> None: + """remove this plugin""" + + del _all_plugins[self.name] + + def add_autogenerate_comparator( + self, + fn: Callable[..., PriorityDispatchResult], + compare_target: str, + compare_element: str | None = None, + *, + qualifier: str = "default", + priority: DispatchPriority = DispatchPriority.MEDIUM, + ) -> None: + """Register an autogenerate comparison function. + + See the section :ref:`plugins_registering_autogenerate` for detailed + examples on how to use this method. + + :param fn: The comparison function to register. The function receives + arguments specific to the type of comparison being performed and + should return a :class:`.PriorityDispatchResult` value. + + :param compare_target: The type of comparison being performed + (e.g., ``"table"``, ``"column"``, ``"type"``). + + :param compare_element: Optional sub-element being compared within + the target type. + + :param qualifier: Database dialect qualifier. Use ``"default"`` for + all dialects, or specify a dialect name like ``"postgresql"`` to + register a dialect-specific handler. Defaults to ``"default"``. + + :param priority: Execution priority for this comparison function. + Functions are executed in priority order from + :attr:`.DispatchPriority.FIRST` to :attr:`.DispatchPriority.LAST`. + Defaults to :attr:`.DispatchPriority.MEDIUM`. + + """ + self.autogenerate_comparators.dispatch_for( + compare_target, + subgroup=compare_element, + priority=priority, + qualifier=qualifier, + )(fn) + + @classmethod + def populate_autogenerate_priority_dispatch( + cls, comparators: PriorityDispatcher, include_plugins: list[str] + ) -> None: + """Populate all current autogenerate comparison functions into + a given PriorityDispatcher.""" + + exclude: set[Pattern[str]] = set() + include: dict[str, Pattern[str]] = {} + + matched_expressions: set[str] = set() + + for name in include_plugins: + if name.startswith("~"): + exclude.add(_make_re(name[1:])) + else: + include[name] = _make_re(name) + + for plugin in _all_plugins.values(): + if any(excl.match(plugin.name) for excl in exclude): + continue + + include_matches = [ + incl for incl in include if include[incl].match(plugin.name) + ] + if not include_matches: + continue + else: + matched_expressions.update(include_matches) + + log.info("setting up autogenerate plugin %s", plugin.name) + comparators.populate_with(plugin.autogenerate_comparators) + + never_matched = set(include).difference(matched_expressions) + if never_matched: + raise util.CommandError( + f"Did not locate plugins: {', '.join(never_matched)}" + ) + + @classmethod + def setup_plugin_from_module(cls, module: ModuleType, name: str) -> None: + """Call the ``setup()`` function of a plugin module, identified by + passing the module object itself. + + E.g.:: + + from alembic.runtime.plugins import Plugin + import myproject.alembic_plugin + + # Register the plugin manually + Plugin.setup_plugin_from_module( + myproject.alembic_plugin, + "myproject.custom_operations" + ) + + This will generate a new :class:`.Plugin` object with the given + name, which will register itself in the global list of plugins. + Then the module's ``setup()`` function is invoked, passing that + :class:`.Plugin` object. + + This exact process is invoked automatically at import time for any + plugin module that is published via the ``alembic.plugins`` entrypoint. + + """ + module.setup(Plugin(name)) + + +def _make_re(name: str) -> Pattern[str]: + tokens = name.split(".") + + reg = r"" + for token in tokens: + if token == "*": + reg += r"\..+?" + elif token.isidentifier(): + reg += r"\." + token + else: + raise ValueError(f"Invalid plugin expression {name!r}") + + # omit leading r'\.' + return re.compile(f"^{reg[2:]}$") + + +def _setup() -> None: + # setup third party plugins + for entrypoint in metadata.entry_points(group="alembic.plugins"): + for mod in entrypoint.load(): + Plugin.setup_plugin_from_module(mod, entrypoint.name) + + +_setup() diff --git a/libs/alembic/script/base.py b/libs/alembic/script/base.py index 30df6ddb2b..f841708598 100644 --- a/libs/alembic/script/base.py +++ b/libs/alembic/script/base.py @@ -3,6 +3,7 @@ from contextlib import contextmanager import datetime import os +from pathlib import Path import re import shutil import sys @@ -11,7 +12,6 @@ from typing import cast from typing import Iterator from typing import List -from typing import Mapping from typing import Optional from typing import Sequence from typing import Set @@ -25,6 +25,7 @@ from ..runtime import migration from ..util import compat from ..util import not_none +from ..util.pyfiles import _preserving_path_as_str if TYPE_CHECKING: from .revision import _GetRevArg @@ -32,16 +33,13 @@ from .revision import Revision from ..config import Config from ..config import MessagingOptions + from ..config import PostWriteHookConfig from ..runtime.migration import RevisionStep from ..runtime.migration import StampStep try: - if compat.py39: - from zoneinfo import ZoneInfo - from zoneinfo import ZoneInfoNotFoundError - else: - from backports.zoneinfo import ZoneInfo # type: ignore[import-not-found,no-redef] # noqa: E501 - from backports.zoneinfo import ZoneInfoNotFoundError # type: ignore[no-redef] # noqa: E501 + from zoneinfo import ZoneInfo + from zoneinfo import ZoneInfoNotFoundError except ImportError: ZoneInfo = None # type: ignore[assignment, misc] @@ -50,9 +48,6 @@ _legacy_rev = re.compile(r"([a-f0-9]+)\.py$") _slug_re = re.compile(r"\w+") _default_file_template = "%(rev)s_%(slug)s" -_split_on_space_comma = re.compile(r", *|(?: +)") - -_split_on_space_comma_colon = re.compile(r", *|(?: +)|\:") class ScriptDirectory: @@ -77,40 +72,55 @@ class ScriptDirectory: def __init__( self, - dir: str, # noqa + dir: Union[str, os.PathLike[str]], # noqa: A002 file_template: str = _default_file_template, truncate_slug_length: Optional[int] = 40, - version_locations: Optional[List[str]] = None, + version_locations: Optional[ + Sequence[Union[str, os.PathLike[str]]] + ] = None, sourceless: bool = False, output_encoding: str = "utf-8", timezone: Optional[str] = None, - hook_config: Optional[Mapping[str, str]] = None, + hooks: list[PostWriteHookConfig] = [], recursive_version_locations: bool = False, messaging_opts: MessagingOptions = cast( "MessagingOptions", util.EMPTY_DICT ), ) -> None: - self.dir = dir + self.dir = _preserving_path_as_str(dir) + self.version_locations = [ + _preserving_path_as_str(p) for p in version_locations or () + ] self.file_template = file_template - self.version_locations = version_locations self.truncate_slug_length = truncate_slug_length or 40 self.sourceless = sourceless self.output_encoding = output_encoding self.revision_map = revision.RevisionMap(self._load_revisions) self.timezone = timezone - self.hook_config = hook_config + self.hooks = hooks self.recursive_version_locations = recursive_version_locations self.messaging_opts = messaging_opts if not os.access(dir, os.F_OK): raise util.CommandError( - "Path doesn't exist: %r. Please use " + f"Path doesn't exist: {dir}. Please use " "the 'init' command to create a new " - "scripts folder." % os.path.abspath(dir) + "scripts folder." ) @property def versions(self) -> str: + """return a single version location based on the sole path passed + within version_locations. + + If multiple version locations are configured, an error is raised. + + + """ + return str(self._singular_version_location) + + @util.memoized_property + def _singular_version_location(self) -> Path: loc = self._version_locations if len(loc) > 1: raise util.CommandError("Multiple version_locations present") @@ -118,40 +128,31 @@ def versions(self) -> str: return loc[0] @util.memoized_property - def _version_locations(self) -> Sequence[str]: + def _version_locations(self) -> Sequence[Path]: if self.version_locations: return [ - os.path.abspath(util.coerce_resource_to_filename(location)) + util.coerce_resource_to_filename(location).absolute() for location in self.version_locations ] else: - return (os.path.abspath(os.path.join(self.dir, "versions")),) + return [Path(self.dir, "versions").absolute()] def _load_revisions(self) -> Iterator[Script]: - if self.version_locations: - paths = [ - vers - for vers in self._version_locations - if os.path.exists(vers) - ] - else: - paths = [self.versions] + paths = [vers for vers in self._version_locations if vers.exists()] dupes = set() for vers in paths: for file_path in Script._list_py_dir(self, vers): - real_path = os.path.realpath(file_path) + real_path = file_path.resolve() if real_path in dupes: util.warn( - "File %s loaded twice! ignoring. Please ensure " - "version_locations is unique." % real_path + f"File {real_path} loaded twice! ignoring. " + "Please ensure version_locations is unique." ) continue dupes.add(real_path) - filename = os.path.basename(real_path) - dir_name = os.path.dirname(real_path) - script = Script._from_filename(self, dir_name, filename) + script = Script._from_path(self, real_path) if script is None: continue yield script @@ -165,78 +166,36 @@ def from_config(cls, config: Config) -> ScriptDirectory: present. """ - script_location = config.get_main_option("script_location") + script_location = config.get_alembic_option("script_location") if script_location is None: raise util.CommandError( - "No 'script_location' key " "found in configuration." + "No 'script_location' key found in configuration." ) truncate_slug_length: Optional[int] - tsl = config.get_main_option("truncate_slug_length") + tsl = config.get_alembic_option("truncate_slug_length") if tsl is not None: truncate_slug_length = int(tsl) else: truncate_slug_length = None - version_locations_str = config.get_main_option("version_locations") - version_locations: Optional[List[str]] - if version_locations_str: - version_path_separator = config.get_main_option( - "version_path_separator" - ) - - split_on_path = { - None: None, - "space": " ", - "newline": "\n", - "os": os.pathsep, - ":": ":", - ";": ";", - } - - try: - split_char: Optional[str] = split_on_path[ - version_path_separator - ] - except KeyError as ke: - raise ValueError( - "'%s' is not a valid value for " - "version_path_separator; " - "expected 'space', 'newline', 'os', ':', ';'" - % version_path_separator - ) from ke - else: - if split_char is None: - # legacy behaviour for backwards compatibility - version_locations = _split_on_space_comma.split( - version_locations_str - ) - else: - version_locations = [ - x.strip() - for x in version_locations_str.split(split_char) - if x - ] - else: - version_locations = None - - prepend_sys_path = config.get_main_option("prepend_sys_path") + prepend_sys_path = config.get_prepend_sys_paths_list() if prepend_sys_path: - sys.path[:0] = list( - _split_on_space_comma_colon.split(prepend_sys_path) - ) + sys.path[:0] = prepend_sys_path - rvl = config.get_main_option("recursive_version_locations") == "true" + rvl = config.get_alembic_boolean_option("recursive_version_locations") return ScriptDirectory( util.coerce_resource_to_filename(script_location), - file_template=config.get_main_option( + file_template=config.get_alembic_option( "file_template", _default_file_template ), truncate_slug_length=truncate_slug_length, - sourceless=config.get_main_option("sourceless") == "true", - output_encoding=config.get_main_option("output_encoding", "utf-8"), - version_locations=version_locations, - timezone=config.get_main_option("timezone"), - hook_config=config.get_section("post_write_hooks", {}), + sourceless=config.get_alembic_boolean_option("sourceless"), + output_encoding=config.get_alembic_option( + "output_encoding", "utf-8" + ), + version_locations=config.get_version_locations_list(), + timezone=config.get_alembic_option("timezone"), + hooks=config.get_hooks_list(), recursive_version_locations=rvl, messaging_opts=config.messaging_opts, ) @@ -587,23 +546,36 @@ def run_env(self) -> None: @property def env_py_location(self) -> str: - return os.path.abspath(os.path.join(self.dir, "env.py")) + return str(Path(self.dir, "env.py")) + + def _append_template(self, src: Path, dest: Path, **kw: Any) -> None: + with util.status( + f"Appending to existing {dest.absolute()}", + **self.messaging_opts, + ): + util.template_to_file( + src, + dest, + self.output_encoding, + append_with_newlines=True, + **kw, + ) - def _generate_template(self, src: str, dest: str, **kw: Any) -> None: + def _generate_template(self, src: Path, dest: Path, **kw: Any) -> None: with util.status( - f"Generating {os.path.abspath(dest)}", **self.messaging_opts + f"Generating {dest.absolute()}", **self.messaging_opts ): util.template_to_file(src, dest, self.output_encoding, **kw) - def _copy_file(self, src: str, dest: str) -> None: + def _copy_file(self, src: Path, dest: Path) -> None: with util.status( - f"Generating {os.path.abspath(dest)}", **self.messaging_opts + f"Generating {dest.absolute()}", **self.messaging_opts ): shutil.copy(src, dest) - def _ensure_directory(self, path: str) -> None: - path = os.path.abspath(path) - if not os.path.exists(path): + def _ensure_directory(self, path: Path) -> None: + path = path.absolute() + if not path.exists(): with util.status( f"Creating directory {path}", **self.messaging_opts ): @@ -628,11 +600,10 @@ def _generate_create_date(self) -> datetime.datetime: raise util.CommandError( "Can't locate timezone: %s" % self.timezone ) from None - create_date = ( - datetime.datetime.utcnow() - .replace(tzinfo=datetime.timezone.utc) - .astimezone(tzinfo) - ) + + create_date = datetime.datetime.now( + tz=datetime.timezone.utc + ).astimezone(tzinfo) else: create_date = datetime.datetime.now() return create_date @@ -644,7 +615,8 @@ def generate_revision( head: Optional[_RevIdType] = None, splice: Optional[bool] = False, branch_labels: Optional[_RevIdType] = None, - version_path: Optional[str] = None, + version_path: Union[str, os.PathLike[str], None] = None, + file_template: Optional[str] = None, depends_on: Optional[_RevIdType] = None, **kw: Any, ) -> Optional[Script]: @@ -697,7 +669,7 @@ def generate_revision( for head_ in heads: if head_ is not None: assert isinstance(head_, Script) - version_path = os.path.dirname(head_.path) + version_path = head_._script_path.parent break else: raise util.CommandError( @@ -705,22 +677,26 @@ def generate_revision( "please specify --version-path" ) else: - version_path = self.versions + version_path = self._singular_version_location + else: + version_path = Path(version_path) - norm_path = os.path.normpath(os.path.abspath(version_path)) + assert isinstance(version_path, Path) + norm_path = version_path.absolute() for vers_path in self._version_locations: - if os.path.normpath(vers_path) == norm_path: + if vers_path.absolute() == norm_path: break else: raise util.CommandError( - "Path %s is not represented in current " - "version locations" % version_path + f"Path {version_path} is not represented in current " + "version locations" ) if self.version_locations: self._ensure_directory(version_path) path = self._rev_path(version_path, revid, message, create_date) + self._ensure_directory(path.parent) if not splice: for head_ in heads: @@ -749,7 +725,7 @@ def generate_revision( resolved_depends_on = None self._generate_template( - os.path.join(self.dir, "script.py.mako"), + Path(self.dir, "script.py.mako"), path, up_revision=str(revid), down_revision=revision.tuple_rev_as_scalar( @@ -763,7 +739,7 @@ def generate_revision( **kw, ) - post_write_hooks = self.hook_config + post_write_hooks = self.hooks if post_write_hooks: write_hooks._run_hooks(path, post_write_hooks) @@ -786,11 +762,11 @@ def generate_revision( def _rev_path( self, - path: str, + path: Union[str, os.PathLike[str]], rev_id: str, message: Optional[str], create_date: datetime.datetime, - ) -> str: + ) -> Path: epoch = int(create_date.timestamp()) slug = "_".join(_slug_re.findall(message or "")).lower() if len(slug) > self.truncate_slug_length: @@ -809,7 +785,7 @@ def _rev_path( "second": create_date.second, } ) - return os.path.join(path, filename) + return Path(path) / filename class Script(revision.Revision): @@ -820,9 +796,14 @@ class Script(revision.Revision): """ - def __init__(self, module: ModuleType, rev_id: str, path: str): + def __init__( + self, + module: ModuleType, + rev_id: str, + path: Union[str, os.PathLike[str]], + ): self.module = module - self.path = path + self.path = _preserving_path_as_str(path) super().__init__( rev_id, module.down_revision, @@ -840,6 +821,10 @@ def __init__(self, module: ModuleType, rev_id: str, path: str): path: str """Filesystem path of the script.""" + @property + def _script_path(self) -> Path: + return Path(self.path) + _db_current_indicator: Optional[bool] = None """Utility variable which when set will cause string output to indicate this is a "current" version in some database""" @@ -860,7 +845,7 @@ def longdoc(self) -> str: doc = doc.decode( # type: ignore[attr-defined] self.module._alembic_source_encoding ) - return doc.strip() # type: ignore[union-attr] + return doc.strip() else: return "" @@ -972,36 +957,33 @@ def _format_down_revision(self) -> str: return util.format_as_comma(self._versioned_down_revisions) @classmethod - def _from_path( - cls, scriptdir: ScriptDirectory, path: str - ) -> Optional[Script]: - dir_, filename = os.path.split(path) - return cls._from_filename(scriptdir, dir_, filename) - - @classmethod - def _list_py_dir(cls, scriptdir: ScriptDirectory, path: str) -> List[str]: + def _list_py_dir( + cls, scriptdir: ScriptDirectory, path: Path + ) -> List[Path]: paths = [] - for root, dirs, files in os.walk(path, topdown=True): - if root.endswith("__pycache__"): + for root, dirs, files in compat.path_walk(path, top_down=True): + if root.name.endswith("__pycache__"): # a special case - we may include these files # if a `sourceless` option is specified continue for filename in sorted(files): - paths.append(os.path.join(root, filename)) + paths.append(root / filename) if scriptdir.sourceless: # look for __pycache__ - py_cache_path = os.path.join(root, "__pycache__") - if os.path.exists(py_cache_path): + py_cache_path = root / "__pycache__" + if py_cache_path.exists(): # add all files from __pycache__ whose filename is not # already in the names we got from the version directory. # add as relative paths including __pycache__ token - names = {filename.split(".")[0] for filename in files} + names = { + Path(filename).name.split(".")[0] for filename in files + } paths.extend( - os.path.join(py_cache_path, pyc) - for pyc in os.listdir(py_cache_path) - if pyc.split(".")[0] not in names + py_cache_path / pyc + for pyc in py_cache_path.iterdir() + if pyc.name.split(".")[0] not in names ) if not scriptdir.recursive_version_locations: @@ -1016,9 +998,13 @@ def _list_py_dir(cls, scriptdir: ScriptDirectory, path: str) -> List[str]: return paths @classmethod - def _from_filename( - cls, scriptdir: ScriptDirectory, dir_: str, filename: str + def _from_path( + cls, scriptdir: ScriptDirectory, path: Union[str, os.PathLike[str]] ) -> Optional[Script]: + + path = Path(path) + dir_, filename = path.parent, path.name + if scriptdir.sourceless: py_match = _sourceless_rev_file.match(filename) else: @@ -1036,8 +1022,8 @@ def _from_filename( is_c = is_o = False if is_o or is_c: - py_exists = os.path.exists(os.path.join(dir_, py_filename)) - pyc_exists = os.path.exists(os.path.join(dir_, py_filename + "c")) + py_exists = (dir_ / py_filename).exists() + pyc_exists = (dir_ / (py_filename + "c")).exists() # prefer .py over .pyc because we'd like to get the # source encoding; prefer .pyc over .pyo because we'd like to @@ -1053,14 +1039,14 @@ def _from_filename( m = _legacy_rev.match(filename) if not m: raise util.CommandError( - "Could not determine revision id from filename %s. " + "Could not determine revision id from " + f"filename {filename}. " "Be sure the 'revision' variable is " "declared inside the script (please see 'Upgrading " "from Alembic 0.1 to 0.2' in the documentation)." - % filename ) else: revision = m.group(1) else: revision = module.revision - return Script(module, revision, os.path.join(dir_, filename)) + return Script(module, revision, dir_ / filename) diff --git a/libs/alembic/script/revision.py b/libs/alembic/script/revision.py index c3108e985a..5825da34f4 100644 --- a/libs/alembic/script/revision.py +++ b/libs/alembic/script/revision.py @@ -45,7 +45,7 @@ _TR = TypeVar("_TR", bound=Optional[_RevisionOrStr]) _relative_destination = re.compile(r"(?:(.+?)@)?(\w+)?((?:\+|-)\d+)") -_revision_illegal_chars = ["@", "-", "+"] +_revision_illegal_chars = ["@", "-", "+", ":"] class _CollectRevisionsProtocol(Protocol): @@ -1708,7 +1708,7 @@ def tuple_rev_as_scalar(rev: None) -> None: ... @overload def tuple_rev_as_scalar( - rev: Union[Tuple[_T, ...], List[_T]] + rev: Union[Tuple[_T, ...], List[_T]], ) -> Union[_T, Tuple[_T, ...], List[_T]]: ... diff --git a/libs/alembic/script/write_hooks.py b/libs/alembic/script/write_hooks.py index 9977147921..3dd49d9108 100644 --- a/libs/alembic/script/write_hooks.py +++ b/libs/alembic/script/write_hooks.py @@ -3,20 +3,21 @@ from __future__ import annotations +import importlib.util +import os import shlex import subprocess import sys from typing import Any from typing import Callable -from typing import Dict -from typing import List -from typing import Mapping -from typing import Optional -from typing import Union +from typing import TYPE_CHECKING from .. import util from ..util import compat +from ..util.pyfiles import _preserving_path_as_str +if TYPE_CHECKING: + from ..config import PostWriteHookConfig REVISION_SCRIPT_TOKEN = "REVISION_SCRIPT_FILENAME" @@ -43,16 +44,19 @@ def decorate(fn): def _invoke( - name: str, revision: str, options: Mapping[str, Union[str, int]] + name: str, + revision_path: str | os.PathLike[str], + options: PostWriteHookConfig, ) -> Any: """Invokes the formatter registered for the given name. :param name: The name of a formatter in the registry - :param revision: A :class:`.MigrationRevision` instance + :param revision: string path to the revision file :param options: A dict containing kwargs passed to the specified formatter. :raises: :class:`alembic.util.CommandError` """ + revision_path = _preserving_path_as_str(revision_path) try: hook = _registry[name] except KeyError as ke: @@ -60,39 +64,31 @@ def _invoke( f"No formatter with name '{name}' registered" ) from ke else: - return hook(revision, options) + return hook(revision_path, options) -def _run_hooks(path: str, hook_config: Mapping[str, str]) -> None: +def _run_hooks( + path: str | os.PathLike[str], hooks: list[PostWriteHookConfig] +) -> None: """Invoke hooks for a generated revision.""" - from .base import _split_on_space_comma - - names = _split_on_space_comma.split(hook_config.get("hooks", "")) - - for name in names: - if not name: - continue - opts = { - key[len(name) + 1 :]: hook_config[key] - for key in hook_config - if key.startswith(name + ".") - } - opts["_hook_name"] = name + for hook in hooks: + name = hook["_hook_name"] try: - type_ = opts["type"] + type_ = hook["type"] except KeyError as ke: raise util.CommandError( - f"Key {name}.type is required for post write hook {name!r}" + f"Key '{name}.type' (or 'type' in toml) is required " + f"for post write hook {name!r}" ) from ke else: with util.status( f"Running post write hook {name!r}", newline=True ): - _invoke(type_, path, opts) + _invoke(type_, path, hook) -def _parse_cmdline_options(cmdline_options_str: str, path: str) -> List[str]: +def _parse_cmdline_options(cmdline_options_str: str, path: str) -> list[str]: """Parse options from a string into a list. Also substitutes the revision script token with the actual filename of @@ -113,17 +109,38 @@ def _parse_cmdline_options(cmdline_options_str: str, path: str) -> List[str]: return cmdline_options_list -@register("console_scripts") -def console_scripts( - path: str, options: dict, ignore_output: bool = False -) -> None: +def _get_required_option(options: dict, name: str) -> str: try: - entrypoint_name = options["entrypoint"] + return options[name] except KeyError as ke: raise util.CommandError( - f"Key {options['_hook_name']}.entrypoint is required for post " + f"Key {options['_hook_name']}.{name} is required for post " f"write hook {options['_hook_name']!r}" ) from ke + + +def _run_hook( + path: str, options: dict, ignore_output: bool, command: list[str] +) -> None: + cwd: str | None = options.get("cwd", None) + cmdline_options_str = options.get("options", "") + cmdline_options_list = _parse_cmdline_options(cmdline_options_str, path) + + kw: dict[str, Any] = {} + if ignore_output: + kw["stdout"] = kw["stderr"] = subprocess.DEVNULL + + subprocess.run([*command, *cmdline_options_list], cwd=cwd, **kw) + + +@register("console_scripts") +def console_scripts( + path: str, + options: dict, + ignore_output: bool = False, + verify_version: tuple[int, ...] | None = None, +) -> None: + entrypoint_name = _get_required_option(options, "entrypoint") for entry in compat.importlib_metadata_get("console_scripts"): if entry.name == entrypoint_name: impl: Any = entry @@ -132,48 +149,33 @@ def console_scripts( raise util.CommandError( f"Could not find entrypoint console_scripts.{entrypoint_name}" ) - cwd: Optional[str] = options.get("cwd", None) - cmdline_options_str = options.get("options", "") - cmdline_options_list = _parse_cmdline_options(cmdline_options_str, path) - kw: Dict[str, Any] = {} - if ignore_output: - kw["stdout"] = kw["stderr"] = subprocess.DEVNULL + if verify_version: + pyscript = ( + f"import {impl.module}; " + f"assert tuple(int(x) for x in {impl.module}.__version__.split('.')) >= {verify_version}, " # noqa: E501 + f"'need exactly version {verify_version} of {impl.name}'; " + f"{impl.module}.{impl.attr}()" + ) + else: + pyscript = f"import {impl.module}; {impl.module}.{impl.attr}()" - subprocess.run( - [ - sys.executable, - "-c", - f"import {impl.module}; {impl.module}.{impl.attr}()", - ] - + cmdline_options_list, - cwd=cwd, - **kw, - ) + command = [sys.executable, "-c", pyscript] + _run_hook(path, options, ignore_output, command) @register("exec") def exec_(path: str, options: dict, ignore_output: bool = False) -> None: - try: - executable = options["executable"] - except KeyError as ke: - raise util.CommandError( - f"Key {options['_hook_name']}.executable is required for post " - f"write hook {options['_hook_name']!r}" - ) from ke - cwd: Optional[str] = options.get("cwd", None) - cmdline_options_str = options.get("options", "") - cmdline_options_list = _parse_cmdline_options(cmdline_options_str, path) + executable = _get_required_option(options, "executable") + _run_hook(path, options, ignore_output, command=[executable]) - kw: Dict[str, Any] = {} - if ignore_output: - kw["stdout"] = kw["stderr"] = subprocess.DEVNULL - subprocess.run( - [ - executable, - *cmdline_options_list, - ], - cwd=cwd, - **kw, - ) +@register("module") +def module(path: str, options: dict, ignore_output: bool = False) -> None: + module_name = _get_required_option(options, "module") + + if importlib.util.find_spec(module_name) is None: + raise util.CommandError(f"Could not find module {module_name}") + + command = [sys.executable, "-m", module_name] + _run_hook(path, options, ignore_output, command) diff --git a/libs/alembic/templates/async/alembic.ini.mako b/libs/alembic/templates/async/alembic.ini.mako index 7eee913205..02ccb0f6de 100644 --- a/libs/alembic/templates/async/alembic.ini.mako +++ b/libs/alembic/templates/async/alembic.ini.mako @@ -2,21 +2,28 @@ [alembic] # path to migration scripts. -# Use forward slashes (/) also on windows to provide an os agnostic path +# this is typically a path given in POSIX (e.g. forward slashes) +# format, relative to the token %(here)s which refers to the location of this +# ini file script_location = ${script_location} # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s # Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s +# Or organize into date-based subdirectories (requires recursive_version_locations = true) +# file_template = %%(year)d/%%(month).2d/%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d_%%(rev)s_%%(slug)s # sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. +# defaults to the current working directory. for multiple paths, the path separator +# is defined by "path_separator" below. prepend_sys_path = . # timezone to use when rendering the date within the migration file # as well as the filename. -# If specified, requires the python>=3.9 or backports.zoneinfo library. -# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# If specified, requires the tzdata library which can be installed by adding +# `alembic[tz]` to the pip requirements. # string value is passed to ZoneInfo() # leave blank for localtime # timezone = @@ -34,21 +41,38 @@ prepend_sys_path = . # sourceless = false # version location specification; This defaults -# to ${script_location}/versions. When using multiple version +# to /versions. When using multiple version # directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:${script_location}/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. -# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. -# Valid values for version_path_separator are: +# The path separator used here should be the separator specified by "path_separator" +# below. +# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions + +# path_separator; This indicates what character is used to split lists of file +# paths, including version_locations and prepend_sys_path within configparser +# files such as alembic.ini. +# The default rendered in new alembic.ini files is "os", which uses os.pathsep +# to provide os-dependent path splitting. +# +# Note that in order to support legacy alembic.ini files, this default does NOT +# take place if path_separator is not present in alembic.ini. If this +# option is omitted entirely, fallback logic is as follows: +# +# 1. Parsing of the version_locations option falls back to using the legacy +# "version_path_separator" key, which if absent then falls back to the legacy +# behavior of splitting on spaces and/or commas. +# 2. Parsing of the prepend_sys_path option falls back to the legacy +# behavior of splitting on spaces, commas, or colons. +# +# Valid values for path_separator are: # -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -# version_path_separator = newline -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. +# path_separator = : +# path_separator = ; +# path_separator = space +# path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +path_separator = os + # set to 'true' to search source files recursively # in each "version_locations" directory @@ -59,6 +83,9 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne # are written from script.py.mako # output_encoding = utf-8 +# database URL. This is consumed by the user-maintained env.py script only. +# other means of configuring database URLs may be customized within the env.py +# file. sqlalchemy.url = driver://user:pass@localhost/dbname @@ -73,13 +100,20 @@ sqlalchemy.url = driver://user:pass@localhost/dbname # black.entrypoint = black # black.options = -l 79 REVISION_SCRIPT_FILENAME -# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module +# hooks = ruff +# ruff.type = module +# ruff.module = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Alternatively, use the exec runner to execute a binary found on your PATH # hooks = ruff # ruff.type = exec -# ruff.executable = %(here)s/.venv/bin/ruff -# ruff.options = --fix REVISION_SCRIPT_FILENAME +# ruff.executable = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME -# Logging configuration +# Logging configuration. This is also consumed by the user-maintained +# env.py script only. [loggers] keys = root,sqlalchemy,alembic diff --git a/libs/alembic/templates/async/script.py.mako b/libs/alembic/templates/async/script.py.mako index fbc4b07dce..11016301e7 100644 --- a/libs/alembic/templates/async/script.py.mako +++ b/libs/alembic/templates/async/script.py.mako @@ -13,14 +13,16 @@ ${imports if imports else ""} # revision identifiers, used by Alembic. revision: str = ${repr(up_revision)} -down_revision: Union[str, None] = ${repr(down_revision)} +down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} def upgrade() -> None: + """Upgrade schema.""" ${upgrades if upgrades else "pass"} def downgrade() -> None: + """Downgrade schema.""" ${downgrades if downgrades else "pass"} diff --git a/libs/alembic/templates/generic/alembic.ini.mako b/libs/alembic/templates/generic/alembic.ini.mako index f1f76cae80..0127b2af8d 100644 --- a/libs/alembic/templates/generic/alembic.ini.mako +++ b/libs/alembic/templates/generic/alembic.ini.mako @@ -1,8 +1,10 @@ # A generic, single database configuration. [alembic] -# path to migration scripts -# Use forward slashes (/) also on windows to provide an os agnostic path +# path to migration scripts. +# this is typically a path given in POSIX (e.g. forward slashes) +# format, relative to the token %(here)s which refers to the location of this +# ini file script_location = ${script_location} # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s @@ -10,15 +12,19 @@ script_location = ${script_location} # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file # for all available tokens # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s +# Or organize into date-based subdirectories (requires recursive_version_locations = true) +# file_template = %%(year)d/%%(month).2d/%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d_%%(rev)s_%%(slug)s # sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. +# defaults to the current working directory. for multiple paths, the path separator +# is defined by "path_separator" below. prepend_sys_path = . + # timezone to use when rendering the date within the migration file # as well as the filename. -# If specified, requires the python>=3.9 or backports.zoneinfo library. -# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# If specified, requires the tzdata library which can be installed by adding +# `alembic[tz]` to the pip requirements. # string value is passed to ZoneInfo() # leave blank for localtime # timezone = @@ -36,21 +42,37 @@ prepend_sys_path = . # sourceless = false # version location specification; This defaults -# to ${script_location}/versions. When using multiple version +# to /versions. When using multiple version # directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:${script_location}/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. -# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. -# Valid values for version_path_separator are: +# The path separator used here should be the separator specified by "path_separator" +# below. +# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions + +# path_separator; This indicates what character is used to split lists of file +# paths, including version_locations and prepend_sys_path within configparser +# files such as alembic.ini. +# The default rendered in new alembic.ini files is "os", which uses os.pathsep +# to provide os-dependent path splitting. +# +# Note that in order to support legacy alembic.ini files, this default does NOT +# take place if path_separator is not present in alembic.ini. If this +# option is omitted entirely, fallback logic is as follows: +# +# 1. Parsing of the version_locations option falls back to using the legacy +# "version_path_separator" key, which if absent then falls back to the legacy +# behavior of splitting on spaces and/or commas. +# 2. Parsing of the prepend_sys_path option falls back to the legacy +# behavior of splitting on spaces, commas, or colons. # -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -# version_path_separator = newline -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. +# Valid values for path_separator are: +# +# path_separator = : +# path_separator = ; +# path_separator = space +# path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +path_separator = os # set to 'true' to search source files recursively # in each "version_locations" directory @@ -61,6 +83,9 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne # are written from script.py.mako # output_encoding = utf-8 +# database URL. This is consumed by the user-maintained env.py script only. +# other means of configuring database URLs may be customized within the env.py +# file. sqlalchemy.url = driver://user:pass@localhost/dbname @@ -75,13 +100,20 @@ sqlalchemy.url = driver://user:pass@localhost/dbname # black.entrypoint = black # black.options = -l 79 REVISION_SCRIPT_FILENAME -# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module +# hooks = ruff +# ruff.type = module +# ruff.module = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Alternatively, use the exec runner to execute a binary found on your PATH # hooks = ruff # ruff.type = exec -# ruff.executable = %(here)s/.venv/bin/ruff -# ruff.options = --fix REVISION_SCRIPT_FILENAME +# ruff.executable = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME -# Logging configuration +# Logging configuration. This is also consumed by the user-maintained +# env.py script only. [loggers] keys = root,sqlalchemy,alembic diff --git a/libs/alembic/templates/generic/script.py.mako b/libs/alembic/templates/generic/script.py.mako index fbc4b07dce..11016301e7 100644 --- a/libs/alembic/templates/generic/script.py.mako +++ b/libs/alembic/templates/generic/script.py.mako @@ -13,14 +13,16 @@ ${imports if imports else ""} # revision identifiers, used by Alembic. revision: str = ${repr(up_revision)} -down_revision: Union[str, None] = ${repr(down_revision)} +down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} def upgrade() -> None: + """Upgrade schema.""" ${upgrades if upgrades else "pass"} def downgrade() -> None: + """Downgrade schema.""" ${downgrades if downgrades else "pass"} diff --git a/libs/alembic/templates/multidb/alembic.ini.mako b/libs/alembic/templates/multidb/alembic.ini.mako index bf383ea1de..76846465be 100644 --- a/libs/alembic/templates/multidb/alembic.ini.mako +++ b/libs/alembic/templates/multidb/alembic.ini.mako @@ -1,8 +1,10 @@ # a multi-database configuration. [alembic] -# path to migration scripts -# Use forward slashes (/) also on windows to provide an os agnostic path +# path to migration scripts. +# this is typically a path given in POSIX (e.g. forward slashes) +# format, relative to the token %(here)s which refers to the location of this +# ini file script_location = ${script_location} # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s @@ -10,15 +12,18 @@ script_location = ${script_location} # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file # for all available tokens # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s +# Or organize into date-based subdirectories (requires recursive_version_locations = true) +# file_template = %%(year)d/%%(month).2d/%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d_%%(rev)s_%%(slug)s # sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. +# defaults to the current working directory. for multiple paths, the path separator +# is defined by "path_separator" below. prepend_sys_path = . # timezone to use when rendering the date within the migration file # as well as the filename. -# If specified, requires the python>=3.9 or backports.zoneinfo library. -# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# If specified, requires the tzdata library which can be installed by adding +# `alembic[tz]` to the pip requirements. # string value is passed to ZoneInfo() # leave blank for localtime # timezone = @@ -36,21 +41,37 @@ prepend_sys_path = . # sourceless = false # version location specification; This defaults -# to ${script_location}/versions. When using multiple version +# to /versions. When using multiple version # directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:${script_location}/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. -# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. -# Valid values for version_path_separator are: +# The path separator used here should be the separator specified by "path_separator" +# below. +# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions + +# path_separator; This indicates what character is used to split lists of file +# paths, including version_locations and prepend_sys_path within configparser +# files such as alembic.ini. +# The default rendered in new alembic.ini files is "os", which uses os.pathsep +# to provide os-dependent path splitting. +# +# Note that in order to support legacy alembic.ini files, this default does NOT +# take place if path_separator is not present in alembic.ini. If this +# option is omitted entirely, fallback logic is as follows: +# +# 1. Parsing of the version_locations option falls back to using the legacy +# "version_path_separator" key, which if absent then falls back to the legacy +# behavior of splitting on spaces and/or commas. +# 2. Parsing of the prepend_sys_path option falls back to the legacy +# behavior of splitting on spaces, commas, or colons. +# +# Valid values for path_separator are: # -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -# version_path_separator = newline -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. +# path_separator = : +# path_separator = ; +# path_separator = space +# path_separator = newline +# +# Use os.pathsep. Default configuration used for new projects. +path_separator = os # set to 'true' to search source files recursively # in each "version_locations" directory @@ -61,6 +82,13 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne # are written from script.py.mako # output_encoding = utf-8 +# for multiple database configuration, new named sections are added +# which each include a distinct ``sqlalchemy.url`` entry. A custom value +# ``databases`` is added which indicates a listing of the per-database sections. +# The ``databases`` entry as well as the URLs present in the ``[engine1]`` +# and ``[engine2]`` sections continue to be consumed by the user-maintained env.py +# script only. + databases = engine1, engine2 [engine1] @@ -80,13 +108,20 @@ sqlalchemy.url = driver://user:pass@localhost/dbname2 # black.entrypoint = black # black.options = -l 79 REVISION_SCRIPT_FILENAME -# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module +# hooks = ruff +# ruff.type = module +# ruff.module = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME + +# Alternatively, use the exec runner to execute a binary found on your PATH # hooks = ruff # ruff.type = exec -# ruff.executable = %(here)s/.venv/bin/ruff -# ruff.options = --fix REVISION_SCRIPT_FILENAME +# ruff.executable = ruff +# ruff.options = check --fix REVISION_SCRIPT_FILENAME -# Logging configuration +# Logging configuration. This is also consumed by the user-maintained +# env.py script only. [loggers] keys = root,sqlalchemy,alembic diff --git a/libs/alembic/templates/multidb/script.py.mako b/libs/alembic/templates/multidb/script.py.mako index 6108b8a0dc..8e667d84c8 100644 --- a/libs/alembic/templates/multidb/script.py.mako +++ b/libs/alembic/templates/multidb/script.py.mako @@ -16,16 +16,18 @@ ${imports if imports else ""} # revision identifiers, used by Alembic. revision: str = ${repr(up_revision)} -down_revision: Union[str, None] = ${repr(down_revision)} +down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} def upgrade(engine_name: str) -> None: + """Upgrade schema.""" globals()["upgrade_%s" % engine_name]() def downgrade(engine_name: str) -> None: + """Downgrade schema.""" globals()["downgrade_%s" % engine_name]() <% @@ -38,10 +40,12 @@ def downgrade(engine_name: str) -> None: % for db_name in re.split(r',\s*', db_names): def upgrade_${db_name}() -> None: + """Upgrade ${db_name} schema.""" ${context.get("%s_upgrades" % db_name, "pass")} def downgrade_${db_name}() -> None: + """Downgrade ${db_name} schema.""" ${context.get("%s_downgrades" % db_name, "pass")} % endfor diff --git a/libs/alembic/templates/pyproject/README b/libs/alembic/templates/pyproject/README new file mode 100644 index 0000000000..fdacc05f68 --- /dev/null +++ b/libs/alembic/templates/pyproject/README @@ -0,0 +1 @@ +pyproject configuration, based on the generic configuration. \ No newline at end of file diff --git a/libs/alembic/templates/pyproject/alembic.ini.mako b/libs/alembic/templates/pyproject/alembic.ini.mako new file mode 100644 index 0000000000..3d10f0e46c --- /dev/null +++ b/libs/alembic/templates/pyproject/alembic.ini.mako @@ -0,0 +1,44 @@ +# A generic, single database configuration. + +[alembic] + +# database URL. This is consumed by the user-maintained env.py script only. +# other means of configuring database URLs may be customized within the env.py +# file. +sqlalchemy.url = driver://user:pass@localhost/dbname + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/libs/alembic/templates/pyproject/env.py b/libs/alembic/templates/pyproject/env.py new file mode 100644 index 0000000000..36112a3c68 --- /dev/null +++ b/libs/alembic/templates/pyproject/env.py @@ -0,0 +1,78 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = None + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/libs/alembic/templates/pyproject/pyproject.toml.mako b/libs/alembic/templates/pyproject/pyproject.toml.mako new file mode 100644 index 0000000000..7edd43b0c9 --- /dev/null +++ b/libs/alembic/templates/pyproject/pyproject.toml.mako @@ -0,0 +1,84 @@ +[tool.alembic] + +# path to migration scripts. +# this is typically a path given in POSIX (e.g. forward slashes) +# format, relative to the token %(here)s which refers to the location of this +# ini file +script_location = "${script_location}" + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = "%%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s" +# Or organize into date-based subdirectories (requires recursive_version_locations = true) +# file_template = "%%(year)d/%%(month).2d/%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d_%%(rev)s_%%(slug)s" + +# additional paths to be prepended to sys.path. defaults to the current working directory. +prepend_sys_path = [ + "." +] + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the tzdata library which can be installed by adding +# `alembic[tz]` to the pip requirements. +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to /versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# version_locations = [ +# "%(here)s/alembic/versions", +# "%(here)s/foo/bar" +# ] + + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = "utf-8" + +# This section defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples +# [[tool.alembic.post_write_hooks]] +# format using "black" - use the console_scripts runner, +# against the "black" entrypoint +# name = "black" +# type = "console_scripts" +# entrypoint = "black" +# options = "-l 79 REVISION_SCRIPT_FILENAME" +# +# [[tool.alembic.post_write_hooks]] +# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module +# name = "ruff" +# type = "module" +# module = "ruff" +# options = "check --fix REVISION_SCRIPT_FILENAME" +# +# [[tool.alembic.post_write_hooks]] +# Alternatively, use the exec runner to execute a binary found on your PATH +# name = "ruff" +# type = "exec" +# executable = "ruff" +# options = "check --fix REVISION_SCRIPT_FILENAME" + diff --git a/libs/alembic/templates/pyproject/script.py.mako b/libs/alembic/templates/pyproject/script.py.mako new file mode 100644 index 0000000000..11016301e7 --- /dev/null +++ b/libs/alembic/templates/pyproject/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/libs/alembic/templates/pyproject_async/README b/libs/alembic/templates/pyproject_async/README new file mode 100644 index 0000000000..dfd718d3b9 --- /dev/null +++ b/libs/alembic/templates/pyproject_async/README @@ -0,0 +1 @@ +pyproject configuration, with an async dbapi. \ No newline at end of file diff --git a/libs/alembic/templates/pyproject_async/alembic.ini.mako b/libs/alembic/templates/pyproject_async/alembic.ini.mako new file mode 100644 index 0000000000..3d10f0e46c --- /dev/null +++ b/libs/alembic/templates/pyproject_async/alembic.ini.mako @@ -0,0 +1,44 @@ +# A generic, single database configuration. + +[alembic] + +# database URL. This is consumed by the user-maintained env.py script only. +# other means of configuring database URLs may be customized within the env.py +# file. +sqlalchemy.url = driver://user:pass@localhost/dbname + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/libs/alembic/templates/pyproject_async/env.py b/libs/alembic/templates/pyproject_async/env.py new file mode 100644 index 0000000000..9f2d519400 --- /dev/null +++ b/libs/alembic/templates/pyproject_async/env.py @@ -0,0 +1,89 @@ +import asyncio +from logging.config import fileConfig + +from sqlalchemy import pool +from sqlalchemy.engine import Connection +from sqlalchemy.ext.asyncio import async_engine_from_config + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = None + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection: Connection) -> None: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + """In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + connectable = async_engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode.""" + + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/libs/alembic/templates/pyproject_async/pyproject.toml.mako b/libs/alembic/templates/pyproject_async/pyproject.toml.mako new file mode 100644 index 0000000000..7edd43b0c9 --- /dev/null +++ b/libs/alembic/templates/pyproject_async/pyproject.toml.mako @@ -0,0 +1,84 @@ +[tool.alembic] + +# path to migration scripts. +# this is typically a path given in POSIX (e.g. forward slashes) +# format, relative to the token %(here)s which refers to the location of this +# ini file +script_location = "${script_location}" + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = "%%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s" +# Or organize into date-based subdirectories (requires recursive_version_locations = true) +# file_template = "%%(year)d/%%(month).2d/%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d_%%(rev)s_%%(slug)s" + +# additional paths to be prepended to sys.path. defaults to the current working directory. +prepend_sys_path = [ + "." +] + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the tzdata library which can be installed by adding +# `alembic[tz]` to the pip requirements. +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to /versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# version_locations = [ +# "%(here)s/alembic/versions", +# "%(here)s/foo/bar" +# ] + + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = "utf-8" + +# This section defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples +# [[tool.alembic.post_write_hooks]] +# format using "black" - use the console_scripts runner, +# against the "black" entrypoint +# name = "black" +# type = "console_scripts" +# entrypoint = "black" +# options = "-l 79 REVISION_SCRIPT_FILENAME" +# +# [[tool.alembic.post_write_hooks]] +# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module +# name = "ruff" +# type = "module" +# module = "ruff" +# options = "check --fix REVISION_SCRIPT_FILENAME" +# +# [[tool.alembic.post_write_hooks]] +# Alternatively, use the exec runner to execute a binary found on your PATH +# name = "ruff" +# type = "exec" +# executable = "ruff" +# options = "check --fix REVISION_SCRIPT_FILENAME" + diff --git a/libs/alembic/templates/pyproject_async/script.py.mako b/libs/alembic/templates/pyproject_async/script.py.mako new file mode 100644 index 0000000000..11016301e7 --- /dev/null +++ b/libs/alembic/templates/pyproject_async/script.py.mako @@ -0,0 +1,28 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/libs/alembic/testing/__init__.py b/libs/alembic/testing/__init__.py index 0407adfe9c..32915081d9 100644 --- a/libs/alembic/testing/__init__.py +++ b/libs/alembic/testing/__init__.py @@ -9,12 +9,15 @@ from sqlalchemy.testing.config import combinations from sqlalchemy.testing.config import fixture from sqlalchemy.testing.config import requirements as requires +from sqlalchemy.testing.config import Variation +from sqlalchemy.testing.config import variation from .assertions import assert_raises from .assertions import assert_raises_message from .assertions import emits_python_deprecation_warning from .assertions import eq_ from .assertions import eq_ignore_whitespace +from .assertions import expect_deprecated from .assertions import expect_raises from .assertions import expect_raises_message from .assertions import expect_sqlalchemy_deprecated diff --git a/libs/alembic/testing/assertions.py b/libs/alembic/testing/assertions.py index e071697cd7..e76103d5f3 100644 --- a/libs/alembic/testing/assertions.py +++ b/libs/alembic/testing/assertions.py @@ -8,6 +8,7 @@ from sqlalchemy import exc as sa_exc from sqlalchemy.engine import default +from sqlalchemy.engine import URL from sqlalchemy.testing.assertions import _expect_warnings from sqlalchemy.testing.assertions import eq_ # noqa from sqlalchemy.testing.assertions import is_ # noqa @@ -17,8 +18,6 @@ from sqlalchemy.testing.assertions import ne_ # noqa from sqlalchemy.util import decorator -from ..util import sqla_compat - def _assert_proper_exception_context(exception): """assert that any exception we're catching does not have a __context__ @@ -127,12 +126,13 @@ def _get_dialect(name): if name is None or name == "default": return default.DefaultDialect() else: - d = sqla_compat._create_url(name).get_dialect()() + d = URL.create(name).get_dialect()() if name == "postgresql": d.implicit_returning = True elif name == "mssql": d.legacy_schema_aliasing = False + d.default_schema_name = "dbo" return d @@ -168,6 +168,10 @@ def decorate(fn, *args, **kw): return decorate +def expect_deprecated(*messages, **kw): + return _expect_warnings(DeprecationWarning, messages, **kw) + + def expect_sqlalchemy_deprecated(*messages, **kw): return _expect_warnings(sa_exc.SADeprecationWarning, messages, **kw) diff --git a/libs/alembic/testing/env.py b/libs/alembic/testing/env.py index c37b4d3032..ad4de78353 100644 --- a/libs/alembic/testing/env.py +++ b/libs/alembic/testing/env.py @@ -1,5 +1,7 @@ import importlib.machinery +import logging import os +from pathlib import Path import shutil import textwrap @@ -16,15 +18,37 @@ def _get_staging_directory(): if provision.FOLLOWER_IDENT: - return "scratch_%s" % provision.FOLLOWER_IDENT + return f"scratch_{provision.FOLLOWER_IDENT}" else: return "scratch" +_restore_log = None + + +def _replace_logger(): + global _restore_log + if _restore_log is None: + _restore_log = (logging.root, logging.Logger.manager) + logging.root = logging.RootLogger(logging.WARNING) + logging.Logger.root = logging.root + logging.Logger.manager = logging.Manager(logging.root) + + +def _restore_logger(): + global _restore_log + + if _restore_log is not None: + logging.root, logging.Logger.manager = _restore_log + logging.Logger.root = logging.root + _restore_log = None + + def staging_env(create=True, template="generic", sourceless=False): + _replace_logger() cfg = _testing_config() if create: - path = os.path.join(_get_staging_directory(), "scripts") + path = _join_path(_get_staging_directory(), "scripts") assert not os.path.exists(path), ( "staging directory %s already exists; poor cleanup?" % path ) @@ -47,7 +71,7 @@ def staging_env(create=True, template="generic", sourceless=False): "pep3147_everything", ), sourceless make_sourceless( - os.path.join(path, "env.py"), + _join_path(path, "env.py"), "pep3147" if "pep3147" in sourceless else "simple", ) @@ -60,17 +84,18 @@ def clear_staging_env(): engines.testing_reaper.close_all() shutil.rmtree(_get_staging_directory(), True) + _restore_logger() def script_file_fixture(txt): - dir_ = os.path.join(_get_staging_directory(), "scripts") - path = os.path.join(dir_, "script.py.mako") + dir_ = _join_path(_get_staging_directory(), "scripts") + path = _join_path(dir_, "script.py.mako") with open(path, "w") as f: f.write(txt) def env_file_fixture(txt): - dir_ = os.path.join(_get_staging_directory(), "scripts") + dir_ = _join_path(_get_staging_directory(), "scripts") txt = ( """ from alembic import context @@ -80,7 +105,7 @@ def env_file_fixture(txt): + txt ) - path = os.path.join(dir_, "env.py") + path = _join_path(dir_, "env.py") pyc_path = util.pyc_file_from_path(path) if pyc_path: os.unlink(pyc_path) @@ -90,26 +115,26 @@ def env_file_fixture(txt): def _sqlite_file_db(tempname="foo.db", future=False, scope=None, **options): - dir_ = os.path.join(_get_staging_directory(), "scripts") + dir_ = _join_path(_get_staging_directory(), "scripts") url = "sqlite:///%s/%s" % (dir_, tempname) - if scope and util.sqla_14: + if scope: options["scope"] = scope return testing_util.testing_engine(url=url, future=future, options=options) def _sqlite_testing_config(sourceless=False, future=False): - dir_ = os.path.join(_get_staging_directory(), "scripts") - url = "sqlite:///%s/foo.db" % dir_ + dir_ = _join_path(_get_staging_directory(), "scripts") + url = f"sqlite:///{dir_}/foo.db" sqlalchemy_future = future or ("future" in config.db.__class__.__module__) return _write_config_file( - """ + f""" [alembic] -script_location = %s -sqlalchemy.url = %s -sourceless = %s -%s +script_location = {dir_} +sqlalchemy.url = {url} +sourceless = {"true" if sourceless else "false"} +{"sqlalchemy.future = true" if sqlalchemy_future else ""} [loggers] keys = root,sqlalchemy @@ -140,29 +165,25 @@ class = StreamHandler format = %%(levelname)-5.5s [%%(name)s] %%(message)s datefmt = %%H:%%M:%%S """ - % ( - dir_, - url, - "true" if sourceless else "false", - "sqlalchemy.future = true" if sqlalchemy_future else "", - ) ) def _multi_dir_testing_config(sourceless=False, extra_version_location=""): - dir_ = os.path.join(_get_staging_directory(), "scripts") + dir_ = _join_path(_get_staging_directory(), "scripts") sqlalchemy_future = "future" in config.db.__class__.__module__ url = "sqlite:///%s/foo.db" % dir_ return _write_config_file( - """ + f""" [alembic] -script_location = %s -sqlalchemy.url = %s -sqlalchemy.future = %s -sourceless = %s -version_locations = %%(here)s/model1/ %%(here)s/model2/ %%(here)s/model3/ %s +script_location = {dir_} +sqlalchemy.url = {url} +sqlalchemy.future = {"true" if sqlalchemy_future else "false"} +sourceless = {"true" if sourceless else "false"} +path_separator = space +version_locations = %(here)s/model1/ %(here)s/model2/ %(here)s/model3/ \ +{extra_version_location} [loggers] keys = root @@ -188,26 +209,63 @@ class = StreamHandler format = %%(levelname)-5.5s [%%(name)s] %%(message)s datefmt = %%H:%%M:%%S """ - % ( - dir_, - url, - "true" if sqlalchemy_future else "false", - "true" if sourceless else "false", - extra_version_location, - ) + ) + + +def _no_sql_pyproject_config(dialect="postgresql", directives=""): + """use a postgresql url with no host so that + connections guaranteed to fail""" + dir_ = _join_path(_get_staging_directory(), "scripts") + + return _write_toml_config( + f""" +[tool.alembic] +script_location ="{dir_}" +{textwrap.dedent(directives)} + + """, + f""" +[alembic] +sqlalchemy.url = {dialect}:// + +[loggers] +keys = root + +[handlers] +keys = console + +[logger_root] +level = WARNING +handlers = console +qualname = + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatters] +keys = generic + +[formatter_generic] +format = %%(levelname)-5.5s [%%(name)s] %%(message)s +datefmt = %%H:%%M:%%S + +""", ) def _no_sql_testing_config(dialect="postgresql", directives=""): """use a postgresql url with no host so that connections guaranteed to fail""" - dir_ = os.path.join(_get_staging_directory(), "scripts") + dir_ = _join_path(_get_staging_directory(), "scripts") return _write_config_file( - """ + f""" [alembic] -script_location = %s -sqlalchemy.url = %s:// -%s +script_location ={dir_} +sqlalchemy.url = {dialect}:// +{directives} [loggers] keys = root @@ -234,10 +292,16 @@ class = StreamHandler datefmt = %%H:%%M:%%S """ - % (dir_, dialect, directives) ) +def _write_toml_config(tomltext, initext): + cfg = _write_config_file(initext) + with open(cfg.toml_file_name, "w") as f: + f.write(tomltext) + return cfg + + def _write_config_file(text): cfg = _testing_config() with open(cfg.config_file_name, "w") as f: @@ -250,7 +314,10 @@ def _testing_config(): if not os.access(_get_staging_directory(), os.F_OK): os.mkdir(_get_staging_directory()) - return Config(os.path.join(_get_staging_directory(), "test_alembic.ini")) + return Config( + _join_path(_get_staging_directory(), "test_alembic.ini"), + _join_path(_get_staging_directory(), "pyproject.toml"), + ) def write_script( @@ -270,9 +337,7 @@ def write_script( script = Script._from_path(scriptdir, path) old = scriptdir.revision_map.get_revision(script.revision) if old.down_revision != script.down_revision: - raise Exception( - "Can't change down_revision " "on a refresh operation." - ) + raise Exception("Can't change down_revision on a refresh operation.") scriptdir.revision_map.add_revision(script, _replace=True) if sourceless: @@ -312,9 +377,9 @@ def three_rev_fixture(cfg): write_script( script, a, - """\ + f"""\ "Rev A" -revision = '%s' +revision = '{a}' down_revision = None from alembic import op @@ -327,8 +392,7 @@ def upgrade(): def downgrade(): op.execute("DROP STEP 1") -""" - % a, +""", ) script.generate_revision(b, "revision b", refresh=True, head=a) @@ -358,10 +422,10 @@ def downgrade(): write_script( script, c, - """\ + f"""\ "Rev C" -revision = '%s' -down_revision = '%s' +revision = '{c}' +down_revision = '{b}' from alembic import op @@ -373,8 +437,7 @@ def upgrade(): def downgrade(): op.execute("DROP STEP 3") -""" - % (c, b), +""", ) return a, b, c @@ -396,10 +459,10 @@ def multi_heads_fixture(cfg, a, b, c): write_script( script, d, - """\ + f"""\ "Rev D" -revision = '%s' -down_revision = '%s' +revision = '{d}' +down_revision = '{b}' from alembic import op @@ -411,8 +474,7 @@ def upgrade(): def downgrade(): op.execute("DROP STEP 4") -""" - % (d, b), +""", ) script.generate_revision( @@ -421,10 +483,10 @@ def downgrade(): write_script( script, e, - """\ + f"""\ "Rev E" -revision = '%s' -down_revision = '%s' +revision = '{e}' +down_revision = '{d}' from alembic import op @@ -436,8 +498,7 @@ def upgrade(): def downgrade(): op.execute("DROP STEP 5") -""" - % (e, d), +""", ) script.generate_revision( @@ -446,10 +507,10 @@ def downgrade(): write_script( script, f, - """\ + f"""\ "Rev F" -revision = '%s' -down_revision = '%s' +revision = '{f}' +down_revision = '{b}' from alembic import op @@ -461,8 +522,7 @@ def upgrade(): def downgrade(): op.execute("DROP STEP 6") -""" - % (f, b), +""", ) return d, e, f @@ -471,25 +531,25 @@ def downgrade(): def _multidb_testing_config(engines): """alembic.ini fixture to work exactly with the 'multidb' template""" - dir_ = os.path.join(_get_staging_directory(), "scripts") + dir_ = _join_path(_get_staging_directory(), "scripts") sqlalchemy_future = "future" in config.db.__class__.__module__ databases = ", ".join(engines.keys()) engines = "\n\n".join( - "[%s]\n" "sqlalchemy.url = %s" % (key, value.url) + f"[{key}]\nsqlalchemy.url = {value.url}" for key, value in engines.items() ) return _write_config_file( - """ + f""" [alembic] -script_location = %s +script_location = {dir_} sourceless = false -sqlalchemy.future = %s -databases = %s +sqlalchemy.future = {"true" if sqlalchemy_future else "false"} +databases = {databases} -%s +{engines} [loggers] keys = root @@ -514,5 +574,8 @@ class = StreamHandler format = %%(levelname)-5.5s [%%(name)s] %%(message)s datefmt = %%H:%%M:%%S """ - % (dir_, "true" if sqlalchemy_future else "false", databases, engines) ) + + +def _join_path(base: str, *more: str): + return str(Path(base).joinpath(*more).as_posix()) diff --git a/libs/alembic/testing/fixtures.py b/libs/alembic/testing/fixtures.py index 3b5ce596e6..73e421259d 100644 --- a/libs/alembic/testing/fixtures.py +++ b/libs/alembic/testing/fixtures.py @@ -3,11 +3,17 @@ import configparser from contextlib import contextmanager import io +import os import re +import shutil from typing import Any from typing import Dict +from typing import Generator +from typing import Literal +from typing import overload from sqlalchemy import Column +from sqlalchemy import create_mock_engine from sqlalchemy import inspect from sqlalchemy import MetaData from sqlalchemy import String @@ -17,20 +23,20 @@ from sqlalchemy.testing import config from sqlalchemy.testing import mock from sqlalchemy.testing.assertions import eq_ +from sqlalchemy.testing.fixtures import FutureEngineMixin from sqlalchemy.testing.fixtures import TablesTest as SQLAlchemyTablesTest from sqlalchemy.testing.fixtures import TestBase as SQLAlchemyTestBase +from sqlalchemy.testing.util import drop_all_tables_from_metadata import alembic from .assertions import _get_dialect +from .env import _get_staging_directory from ..environment import EnvironmentContext from ..migration import MigrationContext from ..operations import Operations from ..util import sqla_compat -from ..util.sqla_compat import create_mock_engine -from ..util.sqla_compat import sqla_14 from ..util.sqla_compat import sqla_2 - testing_config = configparser.ConfigParser() testing_config.read(["test.cfg"]) @@ -38,6 +44,31 @@ class TestBase(SQLAlchemyTestBase): is_sqlalchemy_future = sqla_2 + @testing.fixture() + def clear_staging_dir(self): + yield + location = _get_staging_directory() + for filename in os.listdir(location): + file_path = os.path.join(location, filename) + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + elif os.path.isdir(file_path): + shutil.rmtree(file_path) + + @contextmanager + def pushd(self, dirname) -> Generator[None, None, None]: + current_dir = os.getcwd() + try: + os.chdir(dirname) + yield + finally: + os.chdir(current_dir) + + @testing.fixture() + def pop_alembic_config_env(self): + yield + os.environ.pop("ALEMBIC_CONFIG", None) + @testing.fixture() def ops_context(self, migration_context): with migration_context.begin_transaction(_per_migration=True): @@ -57,20 +88,61 @@ def as_sql_migration_context(self, connection): @testing.fixture def connection(self): + global _connection_fixture_connection + with config.db.connect() as conn: + _connection_fixture_connection = conn yield conn + _connection_fixture_connection = None -class TablesTest(TestBase, SQLAlchemyTablesTest): - pass + @testing.fixture + def restore_operations(self): + """Restore runners for modified operations""" + + saved_impls = None + op_cls = None + + def _save_attrs(_op_cls): + nonlocal saved_impls, op_cls + saved_impls = _op_cls._to_impl._registry.copy() + op_cls = _op_cls + + yield _save_attrs + + if op_cls is not None and saved_impls is not None: + op_cls._to_impl._registry = saved_impls + + @config.fixture() + def metadata(self, request): + """Provide bound MetaData for a single test, dropping afterwards.""" + + from sqlalchemy.sql import schema + + metadata = schema.MetaData() + request.instance.metadata = metadata + yield metadata + del request.instance.metadata + + if ( + _connection_fixture_connection + and _connection_fixture_connection.in_transaction() + ): + trans = _connection_fixture_connection.get_transaction() + trans.rollback() + with _connection_fixture_connection.begin(): + drop_all_tables_from_metadata( + metadata, _connection_fixture_connection + ) + else: + drop_all_tables_from_metadata(metadata, config.db) -if sqla_14: - from sqlalchemy.testing.fixtures import FutureEngineMixin -else: +_connection_fixture_connection = None - class FutureEngineMixin: # type:ignore[no-redef] - __requires__ = ("sqlalchemy_14",) + +class TablesTest(TestBase, SQLAlchemyTablesTest): + pass FutureEngineMixin.is_sqlalchemy_future = True @@ -89,8 +161,24 @@ def dump(sql, *multiparams, **params): _engs: Dict[Any, Any] = {} +@overload +@contextmanager +def capture_context_buffer( + bytes_io: Literal[True], **kw: Any +) -> Generator[io.BytesIO, None, None]: ... + + +@overload @contextmanager -def capture_context_buffer(**kw): +def capture_context_buffer( + **kw: Any, +) -> Generator[io.StringIO, None, None]: ... + + +@contextmanager +def capture_context_buffer( + **kw: Any, +) -> Generator[io.StringIO | io.BytesIO, None, None]: if kw.pop("bytes_io", False): buf = io.BytesIO() else: @@ -108,7 +196,9 @@ def configure(*arg, **opt): @contextmanager -def capture_engine_context_buffer(**kw): +def capture_engine_context_buffer( + **kw: Any, +) -> Generator[io.StringIO, None, None]: from .env import _sqlite_file_db from sqlalchemy import event @@ -190,12 +280,8 @@ def assert_contains(self, sql): opts["as_sql"] = as_sql if literal_binds: opts["literal_binds"] = literal_binds - if not sqla_14 and dialect == "mariadb": - ctx_dialect = _get_dialect("mysql") - ctx_dialect.server_version_info = (10, 4, 0, "MariaDB") - else: - ctx_dialect = _get_dialect(dialect) + ctx_dialect = _get_dialect(dialect) if native_boolean is not None: ctx_dialect.supports_native_boolean = native_boolean # this is new as of SQLAlchemy 1.2.7 and is used by SQL Server, diff --git a/libs/alembic/testing/requirements.py b/libs/alembic/testing/requirements.py index 6e07e28ea4..1b217c937a 100644 --- a/libs/alembic/testing/requirements.py +++ b/libs/alembic/testing/requirements.py @@ -1,7 +1,6 @@ from sqlalchemy.testing.requirements import Requirements from alembic import util -from alembic.util import sqla_compat from ..testing import exclusions @@ -74,13 +73,6 @@ def reflects_pk_names(self): def reflects_fk_options(self): return exclusions.closed() - @property - def sqlalchemy_14(self): - return exclusions.skip_if( - lambda config: not util.sqla_14, - "SQLAlchemy 1.4 or greater required", - ) - @property def sqlalchemy_1x(self): return exclusions.skip_if( @@ -105,7 +97,7 @@ def go(config): else: return True - return self.sqlalchemy_14 + exclusions.only_if(go) + return exclusions.only_if(go) @property def comments(self): @@ -122,24 +114,13 @@ def computed_columns(self): return exclusions.closed() @property - def computed_columns_api(self): - return exclusions.only_if( - exclusions.BooleanPredicate(sqla_compat.has_computed) - ) - - @property - def computed_reflects_normally(self): - return exclusions.only_if( - exclusions.BooleanPredicate(sqla_compat.has_computed_reflection) - ) - - @property - def computed_reflects_as_server_default(self): - return exclusions.closed() + def computed_columns_warn_no_persisted(self): + def go(config): + return hasattr( + config.db.dialect, "supports_virtual_generated_columns" + ) - @property - def computed_doesnt_reflect_as_server_default(self): - return exclusions.closed() + return exclusions.only_if("postgresql<18") + exclusions.only_if(go) @property def autoincrement_on_composite_pk(self): @@ -183,6 +164,10 @@ def fk_deferrable_is_reflected(self): @property def fk_names(self): + return self.foreign_key_name_reflection + + @property + def foreign_key_name_reflection(self): return exclusions.open() @property @@ -202,9 +187,3 @@ def identity_columns(self): @property def identity_columns_alter(self): return exclusions.closed() - - @property - def identity_columns_api(self): - return exclusions.only_if( - exclusions.BooleanPredicate(sqla_compat.has_identity) - ) diff --git a/libs/alembic/testing/suite/_autogen_fixtures.py b/libs/alembic/testing/suite/_autogen_fixtures.py index d838ebef10..8329a1ac89 100644 --- a/libs/alembic/testing/suite/_autogen_fixtures.py +++ b/libs/alembic/testing/suite/_autogen_fixtures.py @@ -2,6 +2,8 @@ from typing import Any from typing import Dict +from typing import Literal +from typing import overload from typing import Set from sqlalchemy import CHAR @@ -14,6 +16,7 @@ from sqlalchemy import Integer from sqlalchemy import MetaData from sqlalchemy import Numeric +from sqlalchemy import PrimaryKeyConstraint from sqlalchemy import String from sqlalchemy import Table from sqlalchemy import Text @@ -149,6 +152,118 @@ def _get_model_schema(cls): return m +class NamingConvModel: + __requires__ = ("unique_constraint_reflection",) + configure_opts = {"conv_all_constraint_names": True} + naming_convention = { + "ix": "ix_%(column_0_label)s", + "uq": "uq_%(table_name)s_%(constraint_name)s", + "ck": "ck_%(table_name)s_%(constraint_name)s", + "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s", + "pk": "pk_%(table_name)s", + } + + @classmethod + def _get_db_schema(cls): + # database side - assume all constraints have a name that + # we would assume here is a "db generated" name. need to make + # sure these all render with op.f(). + m = MetaData() + Table( + "x1", + m, + Column("q", Integer), + Index("db_x1_index_q", "q"), + PrimaryKeyConstraint("q", name="db_x1_primary_q"), + ) + Table( + "x2", + m, + Column("q", Integer), + Column("p", ForeignKey("x1.q", name="db_x2_foreign_q")), + CheckConstraint("q > 5", name="db_x2_check_q"), + ) + Table( + "x3", + m, + Column("q", Integer), + Column("r", Integer), + Column("s", Integer), + UniqueConstraint("q", name="db_x3_unique_q"), + ) + Table( + "x4", + m, + Column("q", Integer), + PrimaryKeyConstraint("q", name="db_x4_primary_q"), + ) + Table( + "x5", + m, + Column("q", Integer), + Column("p", ForeignKey("x4.q", name="db_x5_foreign_q")), + Column("r", Integer), + Column("s", Integer), + PrimaryKeyConstraint("q", name="db_x5_primary_q"), + UniqueConstraint("r", name="db_x5_unique_r"), + CheckConstraint("s > 5", name="db_x5_check_s"), + ) + # SQLite and it's "no names needed" thing. bleh. + # we can't have a name for these so you'll see "None" for the name. + Table( + "unnamed_sqlite", + m, + Column("q", Integer), + Column("r", Integer), + PrimaryKeyConstraint("q"), + UniqueConstraint("r"), + ) + return m + + @classmethod + def _get_model_schema(cls): + from sqlalchemy.sql.naming import conv + + m = MetaData(naming_convention=cls.naming_convention) + Table( + "x1", m, Column("q", Integer, primary_key=True), Index(None, "q") + ) + Table( + "x2", + m, + Column("q", Integer), + Column("p", ForeignKey("x1.q")), + CheckConstraint("q > 5", name="token_x2check1"), + ) + Table( + "x3", + m, + Column("q", Integer), + Column("r", Integer), + Column("s", Integer), + UniqueConstraint("r", name="token_x3r"), + UniqueConstraint("s", name=conv("userdef_x3_unique_s")), + ) + Table( + "x4", + m, + Column("q", Integer, primary_key=True), + Index("userdef_x4_idx_q", "q"), + ) + Table( + "x6", + m, + Column("q", Integer, primary_key=True), + Column("p", ForeignKey("x4.q")), + Column("r", Integer), + Column("s", Integer), + UniqueConstraint("r", name="token_x6r"), + CheckConstraint("s > 5", "token_x6check1"), + CheckConstraint("s < 20", conv("userdef_x6_check_s")), + ) + return m + + class _ComparesFKs: def _assert_fk_diff( self, @@ -268,17 +383,46 @@ def _update_context( class AutogenFixtureTest(_ComparesFKs): + + @overload def _fixture( self, - m1, - m2, + m1: MetaData, + m2: MetaData, + include_schemas=..., + opts=..., + object_filters=..., + name_filters=..., + *, + return_ops: Literal[True], + max_identifier_length=..., + ) -> ops.UpgradeOps: ... + + @overload + def _fixture( + self, + m1: MetaData, + m2: MetaData, + include_schemas=..., + opts=..., + object_filters=..., + name_filters=..., + *, + return_ops: Literal[False] = ..., + max_identifier_length=..., + ) -> list[Any]: ... + + def _fixture( + self, + m1: MetaData, + m2: MetaData, include_schemas=False, opts=None, object_filters=_default_object_filters, name_filters=_default_name_filters, - return_ops=False, + return_ops: bool = False, max_identifier_length=None, - ): + ) -> ops.UpgradeOps | list[Any]: if max_identifier_length: dialect = self.bind.dialect existing_length = dialect.max_identifier_length diff --git a/libs/alembic/testing/suite/test_autogen_computed.py b/libs/alembic/testing/suite/test_autogen_computed.py index 04a3caf072..586691b265 100644 --- a/libs/alembic/testing/suite/test_autogen_computed.py +++ b/libs/alembic/testing/suite/test_autogen_computed.py @@ -1,3 +1,5 @@ +from contextlib import nullcontext + import sqlalchemy as sa from sqlalchemy import Column from sqlalchemy import Integer @@ -8,7 +10,7 @@ from ... import testing from ...testing import config from ...testing import eq_ -from ...testing import exclusions +from ...testing import expect_warnings from ...testing import is_ from ...testing import is_true from ...testing import mock @@ -19,6 +21,13 @@ class AutogenerateComputedTest(AutogenFixtureTest, TestBase): __requires__ = ("computed_columns",) __backend__ = True + def _fixture_ctx(self): + if config.requirements.computed_columns_warn_no_persisted.enabled: + ctx = expect_warnings() + else: + ctx = nullcontext() + return ctx + def test_add_computed_column(self): m1 = MetaData() m2 = MetaData() @@ -32,7 +41,8 @@ def test_add_computed_column(self): Column("foo", Integer, sa.Computed("5")), ) - diffs = self._fixture(m1, m2) + with self._fixture_ctx(): + diffs = self._fixture(m1, m2) eq_(diffs[0][0], "add_column") eq_(diffs[0][2], "user") @@ -56,25 +66,16 @@ def test_remove_computed_column(self): Table("user", m2, Column("id", Integer, primary_key=True)) - diffs = self._fixture(m1, m2) + with self._fixture_ctx(): + diffs = self._fixture(m1, m2) eq_(diffs[0][0], "remove_column") eq_(diffs[0][2], "user") c = diffs[0][3] eq_(c.name, "foo") - if config.requirements.computed_reflects_normally.enabled: - is_true(isinstance(c.computed, sa.Computed)) - else: - is_(c.computed, None) - - if config.requirements.computed_reflects_as_server_default.enabled: - is_true(isinstance(c.server_default, sa.DefaultClause)) - eq_(str(c.server_default.arg.text), "5") - elif config.requirements.computed_reflects_normally.enabled: - is_true(isinstance(c.computed, sa.Computed)) - else: - is_(c.computed, None) + is_true(isinstance(c.computed, sa.Computed)) + is_true(isinstance(c.server_default, sa.Computed)) @testing.combinations( lambda: (None, sa.Computed("bar*5")), @@ -85,7 +86,6 @@ def test_remove_computed_column(self): ), lambda: (sa.Computed("bar*5"), sa.Computed("bar * 42")), ) - @config.requirements.computed_reflects_normally def test_cant_change_computed_warning(self, test_case): arg_before, arg_after = testing.resolve_lambda(test_case, **locals()) m1 = MetaData() @@ -110,7 +110,7 @@ def test_cant_change_computed_warning(self, test_case): Column("foo", Integer, *arg_after), ) - with mock.patch("alembic.util.warn") as mock_warn: + with mock.patch("alembic.util.warn") as mock_warn, self._fixture_ctx(): diffs = self._fixture(m1, m2) eq_( @@ -125,10 +125,6 @@ def test_cant_change_computed_warning(self, test_case): lambda: (sa.Computed("5"), sa.Computed("5")), lambda: (sa.Computed("bar*5"), sa.Computed("bar*5")), lambda: (sa.Computed("bar*5"), sa.Computed("bar * \r\n\t5")), - ( - lambda: (sa.Computed("bar*5"), None), - config.requirements.computed_doesnt_reflect_as_server_default, - ), ) def test_computed_unchanged(self, test_case): arg_before, arg_after = testing.resolve_lambda(test_case, **locals()) @@ -154,51 +150,8 @@ def test_computed_unchanged(self, test_case): Column("foo", Integer, *arg_after), ) - with mock.patch("alembic.util.warn") as mock_warn: + with mock.patch("alembic.util.warn") as mock_warn, self._fixture_ctx(): diffs = self._fixture(m1, m2) eq_(mock_warn.mock_calls, []) eq_(list(diffs), []) - - @config.requirements.computed_reflects_as_server_default - def test_remove_computed_default_on_computed(self): - """Asserts the current behavior which is that on PG and Oracle, - the GENERATED ALWAYS AS is reflected as a server default which we can't - tell is actually "computed", so these come out as a modification to - the server default. - - """ - m1 = MetaData() - m2 = MetaData() - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("bar", Integer), - Column("foo", Integer, sa.Computed("bar + 42")), - ) - - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("bar", Integer), - Column("foo", Integer), - ) - - diffs = self._fixture(m1, m2) - - eq_(diffs[0][0][0], "modify_default") - eq_(diffs[0][0][2], "user") - eq_(diffs[0][0][3], "foo") - old = diffs[0][0][-2] - new = diffs[0][0][-1] - - is_(new, None) - is_true(isinstance(old, sa.DefaultClause)) - - if exclusions.against(config, "postgresql"): - eq_(str(old.arg.text), "(bar + 42)") - elif exclusions.against(config, "oracle"): - eq_(str(old.arg.text), '"BAR"+42') diff --git a/libs/alembic/testing/suite/test_autogen_fks.py b/libs/alembic/testing/suite/test_autogen_fks.py index 0240b98d38..d69736e64d 100644 --- a/libs/alembic/testing/suite/test_autogen_fks.py +++ b/libs/alembic/testing/suite/test_autogen_fks.py @@ -199,6 +199,7 @@ def test_no_change_composite_fk(self): eq_(diffs, []) + @config.requirements.foreign_key_name_reflection def test_casing_convention_changed_so_put_drops_first(self): m1 = MetaData() m2 = MetaData() @@ -247,7 +248,7 @@ def test_casing_convention_changed_so_put_drops_first(self): ["test2"], "some_table", ["test"], - name="MyFK" if config.requirements.fk_names.enabled else None, + name="MyFK", ) self._assert_fk_diff( diff --git a/libs/alembic/testing/warnings.py b/libs/alembic/testing/warnings.py index e87136b85f..86d45a0dd5 100644 --- a/libs/alembic/testing/warnings.py +++ b/libs/alembic/testing/warnings.py @@ -10,8 +10,6 @@ from sqlalchemy import exc as sa_exc -from ..util import sqla_14 - def setup_filters(): """Set global warning behavior for the test suite.""" @@ -23,13 +21,6 @@ def setup_filters(): # some selected deprecations... warnings.filterwarnings("error", category=DeprecationWarning) - if not sqla_14: - # 1.3 uses pkg_resources in PluginLoader - warnings.filterwarnings( - "ignore", - "pkg_resources is deprecated as an API", - DeprecationWarning, - ) try: import pytest except ImportError: diff --git a/libs/alembic/util/__init__.py b/libs/alembic/util/__init__.py index 4724e1f084..8f3f685b44 100644 --- a/libs/alembic/util/__init__.py +++ b/libs/alembic/util/__init__.py @@ -1,15 +1,19 @@ from .editor import open_in_editor as open_in_editor from .exc import AutogenerateDiffsDetected as AutogenerateDiffsDetected from .exc import CommandError as CommandError +from .exc import DatabaseNotAtHead as DatabaseNotAtHead from .langhelpers import _with_legacy_names as _with_legacy_names from .langhelpers import asbool as asbool from .langhelpers import dedupe_tuple as dedupe_tuple from .langhelpers import Dispatcher as Dispatcher +from .langhelpers import DispatchPriority as DispatchPriority from .langhelpers import EMPTY_DICT as EMPTY_DICT from .langhelpers import immutabledict as immutabledict from .langhelpers import memoized_property as memoized_property from .langhelpers import ModuleClsProxy as ModuleClsProxy from .langhelpers import not_none as not_none +from .langhelpers import PriorityDispatcher as PriorityDispatcher +from .langhelpers import PriorityDispatchResult as PriorityDispatchResult from .langhelpers import rev_id as rev_id from .langhelpers import to_list as to_list from .langhelpers import to_tuple as to_tuple @@ -20,16 +24,10 @@ from .messaging import obfuscate_url_pw as obfuscate_url_pw from .messaging import status as status from .messaging import warn as warn +from .messaging import warn_deprecated as warn_deprecated from .messaging import write_outstream as write_outstream from .pyfiles import coerce_resource_to_filename as coerce_resource_to_filename from .pyfiles import load_python_file as load_python_file from .pyfiles import pyc_file_from_path as pyc_file_from_path from .pyfiles import template_to_file as template_to_file -from .sqla_compat import has_computed as has_computed -from .sqla_compat import sqla_13 as sqla_13 -from .sqla_compat import sqla_14 as sqla_14 from .sqla_compat import sqla_2 as sqla_2 - - -if not sqla_13: - raise CommandError("SQLAlchemy 1.3.0 or greater is required.") diff --git a/libs/alembic/util/compat.py b/libs/alembic/util/compat.py index e185cc4172..527ffdc97d 100644 --- a/libs/alembic/util/compat.py +++ b/libs/alembic/util/compat.py @@ -3,15 +3,16 @@ from __future__ import annotations from configparser import ConfigParser +from importlib import metadata +from importlib.metadata import EntryPoint import io import os +from pathlib import Path import sys import typing from typing import Any -from typing import List -from typing import Optional +from typing import Iterator from typing import Sequence -from typing import Union if True: # zimports hack for too-long names @@ -24,9 +25,10 @@ is_posix = os.name == "posix" +py314 = sys.version_info >= (3, 14) +py313 = sys.version_info >= (3, 13) +py312 = sys.version_info >= (3, 12) py311 = sys.version_info >= (3, 11) -py310 = sys.version_info >= (3, 10) -py39 = sys.version_info >= (3, 9) # produce a wrapper that allows encoded text to stream @@ -37,30 +39,72 @@ def close(self) -> None: pass -if py39: - from importlib import resources as _resources +if py311: + import tomllib as tomllib +else: + import tomli as tomllib # type: ignore # noqa + + +if py312: + + def path_walk( + path: Path, *, top_down: bool = True + ) -> Iterator[tuple[Path, list[str], list[str]]]: + return Path.walk(path) - importlib_resources = _resources - from importlib import metadata as _metadata + def path_relative_to( + path: Path, other: Path, *, walk_up: bool = False + ) -> Path: + return path.relative_to(other, walk_up=walk_up) - importlib_metadata = _metadata - from importlib.metadata import EntryPoint as EntryPoint else: - import importlib_resources # type:ignore # noqa - import importlib_metadata # type:ignore # noqa - from importlib_metadata import EntryPoint # type:ignore # noqa + + def path_walk( + path: Path, *, top_down: bool = True + ) -> Iterator[tuple[Path, list[str], list[str]]]: + for root, dirs, files in os.walk(path, topdown=top_down): + yield Path(root), dirs, files + + def path_relative_to( + path: Path, other: Path, *, walk_up: bool = False + ) -> Path: + """ + Calculate the relative path of 'path' with respect to 'other', + optionally allowing 'path' to be outside the subtree of 'other'. + + OK I used AI for this, sorry + + """ + try: + return path.relative_to(other) + except ValueError: + if walk_up: + other_ancestors = list(other.parents) + [other] + for ancestor in other_ancestors: + try: + return path.relative_to(ancestor) + except ValueError: + continue + raise ValueError( + f"{path} is not in the same subtree as {other}" + ) + else: + raise def importlib_metadata_get(group: str) -> Sequence[EntryPoint]: - ep = importlib_metadata.entry_points() - if hasattr(ep, "select"): - return ep.select(group=group) - else: - return ep.get(group, ()) # type: ignore + """provide a facade for metadata.entry_points(). + + This is no longer a "compat" function as of Python 3.10, however + the function is widely referenced in the test suite and elsewhere so is + still in this module for compatibility reasons. + + """ + return metadata.entry_points().select(group=group) def formatannotation_fwdref( - annotation: Any, base_module: Optional[Any] = None + annotation: Any, base_module: Any | None = None ) -> str: """vendored from python 3.7""" # copied over _formatannotation from sqlalchemy 2.0 @@ -81,9 +125,6 @@ def formatannotation_fwdref( def read_config_parser( file_config: ConfigParser, - file_argument: Sequence[Union[str, os.PathLike[str]]], -) -> List[str]: - if py310: - return file_config.read(file_argument, encoding="locale") - else: - return file_config.read(file_argument) + file_argument: list[str | os.PathLike[str]], +) -> list[str]: + return file_config.read(file_argument, encoding="locale") diff --git a/libs/alembic/util/exc.py b/libs/alembic/util/exc.py index 0d0496b1e2..4658f7823d 100644 --- a/libs/alembic/util/exc.py +++ b/libs/alembic/util/exc.py @@ -1,6 +1,43 @@ +from __future__ import annotations + +from typing import Any +from typing import List +from typing import Tuple +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from alembic.autogenerate import RevisionContext + + class CommandError(Exception): - pass + """Base command error for all exceptions""" + + +class DatabaseNotAtHead(CommandError): + """Indicates the database is not at current head revisions. + + Raised by the :func:`.command.current` command when the + :paramref:`.command.current.check_heads` parameter is used. + + .. versionadded:: 1.17.1 + + """ class AutogenerateDiffsDetected(CommandError): - pass + """Raised when diffs were detected by the :func:`.command.check` + command. + + .. versionadded:: 1.9.0 + + """ + + def __init__( + self, + message: str, + revision_context: RevisionContext, + diffs: List[Tuple[Any, ...]], + ) -> None: + super().__init__(message) + self.revision_context = revision_context + self.diffs = diffs diff --git a/libs/alembic/util/langhelpers.py b/libs/alembic/util/langhelpers.py index 80d88cbcec..cf0df2396a 100644 --- a/libs/alembic/util/langhelpers.py +++ b/libs/alembic/util/langhelpers.py @@ -2,6 +2,7 @@ import collections from collections.abc import Iterable +import enum import textwrap from typing import Any from typing import Callable @@ -17,9 +18,7 @@ from typing import Set from typing import Tuple from typing import Type -from typing import TYPE_CHECKING from typing import TypeVar -from typing import Union import uuid import warnings @@ -264,20 +263,63 @@ def dedupe_tuple(tup: Tuple[str, ...]) -> Tuple[str, ...]: return tuple(unique_list(tup)) +class PriorityDispatchResult(enum.Enum): + """indicate an action after running a function within a + :class:`.PriorityDispatcher` + + .. versionadded:: 1.18.0 + + """ + + CONTINUE = enum.auto() + """Continue running more functions. + + Any return value that is not PriorityDispatchResult.STOP is equivalent + to this. + + """ + + STOP = enum.auto() + """Stop running any additional functions within the subgroup""" + + +class DispatchPriority(enum.IntEnum): + """Indicate which of three sub-collections a function inside a + :class:`.PriorityDispatcher` should be placed. + + .. versionadded:: 1.18.0 + + """ + + FIRST = 50 + """Run the funciton in the first batch of functions (highest priority)""" + + MEDIUM = 25 + """Run the function at normal priority (this is the default)""" + + LAST = 10 + """Run the function in the last batch of functions""" + + class Dispatcher: - def __init__(self, uselist: bool = False) -> None: + def __init__(self) -> None: self._registry: Dict[Tuple[Any, ...], Any] = {} - self.uselist = uselist def dispatch_for( - self, target: Any, qualifier: str = "default" + self, + target: Any, + *, + qualifier: str = "default", + replace: bool = False, ) -> Callable[[_C], _C]: def decorate(fn: _C) -> _C: - if self.uselist: - self._registry.setdefault((target, qualifier), []).append(fn) - else: - assert (target, qualifier) not in self._registry - self._registry[(target, qualifier)] = fn + if (target, qualifier) in self._registry and not replace: + raise ValueError( + "Can not set dispatch function for object " + f"{target!r}: key already exists. To replace " + "existing function, use replace=True." + ) + self._registry[(target, qualifier)] = fn return fn return decorate @@ -290,42 +332,113 @@ def dispatch(self, obj: Any, qualifier: str = "default") -> Any: else: targets = type(obj).__mro__ - for spcls in targets: - if qualifier != "default" and (spcls, qualifier) in self._registry: - return self._fn_or_list(self._registry[(spcls, qualifier)]) - elif (spcls, "default") in self._registry: - return self._fn_or_list(self._registry[(spcls, "default")]) + if qualifier != "default": + qualifiers = [qualifier, "default"] else: - raise ValueError("no dispatch function for object: %s" % obj) + qualifiers = ["default"] - def _fn_or_list( - self, fn_or_list: Union[List[Callable[..., Any]], Callable[..., Any]] - ) -> Callable[..., Any]: - if self.uselist: - - def go(*arg: Any, **kw: Any) -> None: - if TYPE_CHECKING: - assert isinstance(fn_or_list, Sequence) - for fn in fn_or_list: - fn(*arg, **kw) - - return go + for spcls in targets: + for qualifier in qualifiers: + if (spcls, qualifier) in self._registry: + return self._registry[(spcls, qualifier)] else: - return fn_or_list # type: ignore + raise ValueError("no dispatch function for object: %s" % obj) def branch(self) -> Dispatcher: """Return a copy of this dispatcher that is independently writable.""" d = Dispatcher() - if self.uselist: - d._registry.update( - (k, [fn for fn in self._registry[k]]) for k in self._registry + d._registry.update(self._registry) + return d + + +class PriorityDispatcher: + """registers lists of functions at multiple levels of priorty and provides + a target to invoke them in priority order. + + .. versionadded:: 1.18.0 - PriorityDispatcher replaces the job + of Dispatcher(uselist=True) + + """ + + def __init__(self) -> None: + self._registry: dict[tuple[Any, ...], Any] = collections.defaultdict( + list + ) + + def dispatch_for( + self, + target: str, + *, + priority: DispatchPriority = DispatchPriority.MEDIUM, + qualifier: str = "default", + subgroup: str | None = None, + ) -> Callable[[_C], _C]: + """return a decorator callable that registers a function at a + given priority, with a given qualifier, to fire off for a given + subgroup. + + It's important this remains as a decorator to support third party + plugins who are populating the dispatcher using that style. + + """ + + def decorate(fn: _C) -> _C: + self._registry[(target, qualifier, priority)].append( + (fn, subgroup) ) + return fn + + return decorate + + def dispatch( + self, target: str, *, qualifier: str = "default" + ) -> Callable[..., None]: + """Provide a callable for the given target and qualifier.""" + + if qualifier != "default": + qualifiers = [qualifier, "default"] else: - d._registry.update(self._registry) + qualifiers = ["default"] + + def go(*arg: Any, **kw: Any) -> Any: + results_by_subgroup: dict[str, PriorityDispatchResult] = {} + for priority in DispatchPriority: + for qualifier in qualifiers: + for fn, subgroup in self._registry.get( + (target, qualifier, priority), () + ): + if ( + results_by_subgroup.get( + subgroup, PriorityDispatchResult.CONTINUE + ) + is PriorityDispatchResult.STOP + ): + continue + + result = fn(*arg, **kw) + results_by_subgroup[subgroup] = result + + return go + + def branch(self) -> PriorityDispatcher: + """Return a copy of this dispatcher that is independently + writable.""" + + d = PriorityDispatcher() + d.populate_with(self) return d + def populate_with(self, other: PriorityDispatcher) -> None: + """Populate this PriorityDispatcher with the contents of another one. + + Additive, does not remove existing contents. + """ + for k in other._registry: + new_list = other._registry[k] + self._registry[k].extend(new_list) + def not_none(value: Optional[_T]) -> _T: assert value is not None diff --git a/libs/alembic/util/messaging.py b/libs/alembic/util/messaging.py index 6618fa7faa..4c08f16e7e 100644 --- a/libs/alembic/util/messaging.py +++ b/libs/alembic/util/messaging.py @@ -13,8 +13,6 @@ from sqlalchemy.engine import url -from . import sqla_compat - log = logging.getLogger(__name__) # disable "no handler found" errors @@ -76,14 +74,17 @@ def err(message: str, quiet: bool = False) -> None: def obfuscate_url_pw(input_url: str) -> str: - u = url.make_url(input_url) - return sqla_compat.url_render_as_string(u, hide_password=True) # type: ignore # noqa: E501 + return url.make_url(input_url).render_as_string(hide_password=True) def warn(msg: str, stacklevel: int = 2) -> None: warnings.warn(msg, UserWarning, stacklevel=stacklevel) +def warn_deprecated(msg: str, stacklevel: int = 2) -> None: + warnings.warn(msg, DeprecationWarning, stacklevel=stacklevel) + + def msg( msg: str, newline: bool = True, flush: bool = False, quiet: bool = False ) -> None: diff --git a/libs/alembic/util/pyfiles.py b/libs/alembic/util/pyfiles.py index 973bd458e5..135a42dce2 100644 --- a/libs/alembic/util/pyfiles.py +++ b/libs/alembic/util/pyfiles.py @@ -3,26 +3,33 @@ import atexit from contextlib import ExitStack import importlib +from importlib import resources import importlib.machinery import importlib.util import os +import pathlib import re import tempfile from types import ModuleType from typing import Any from typing import Optional +from typing import Union from mako import exceptions from mako.template import Template -from . import compat from .exc import CommandError def template_to_file( - template_file: str, dest: str, output_encoding: str, **kw: Any + template_file: Union[str, os.PathLike[str]], + dest: Union[str, os.PathLike[str]], + output_encoding: str, + *, + append_with_newlines: bool = False, + **kw: Any, ) -> None: - template = Template(filename=template_file) + template = Template(filename=_preserving_path_as_str(template_file)) try: output = template.render_unicode(**kw).encode(output_encoding) except: @@ -38,11 +45,13 @@ def template_to_file( "template-oriented traceback." % fname ) else: - with open(dest, "wb") as f: + with open(dest, "ab" if append_with_newlines else "wb") as f: + if append_with_newlines: + f.write("\n\n".encode(output_encoding)) f.write(output) -def coerce_resource_to_filename(fname: str) -> str: +def coerce_resource_to_filename(fname_or_resource: str) -> pathlib.Path: """Interpret a filename as either a filesystem location or as a package resource. @@ -50,48 +59,60 @@ def coerce_resource_to_filename(fname: str) -> str: are interpreted as resources and coerced to a file location. """ - if not os.path.isabs(fname) and ":" in fname: - tokens = fname.split(":") + # TODO: there seem to be zero tests for the package resource codepath + if not os.path.isabs(fname_or_resource) and ":" in fname_or_resource: + tokens = fname_or_resource.split(":") # from https://importlib-resources.readthedocs.io/en/latest/migration.html#pkg-resources-resource-filename # noqa E501 file_manager = ExitStack() atexit.register(file_manager.close) - ref = compat.importlib_resources.files(tokens[0]) + ref = resources.files(tokens[0]) for tok in tokens[1:]: ref = ref / tok - fname = file_manager.enter_context( # type: ignore[assignment] - compat.importlib_resources.as_file(ref) + fname_or_resource = file_manager.enter_context( # type: ignore[assignment] # noqa: E501 + resources.as_file(ref) ) - return fname + return pathlib.Path(fname_or_resource) -def pyc_file_from_path(path: str) -> Optional[str]: +def pyc_file_from_path( + path: Union[str, os.PathLike[str]], +) -> Optional[pathlib.Path]: """Given a python source path, locate the .pyc.""" - candidate = importlib.util.cache_from_source(path) - if os.path.exists(candidate): + pathpath = pathlib.Path(path) + candidate = pathlib.Path( + importlib.util.cache_from_source(pathpath.as_posix()) + ) + if candidate.exists(): return candidate # even for pep3147, fall back to the old way of finding .pyc files, # to support sourceless operation - filepath, ext = os.path.splitext(path) + ext = pathpath.suffix for ext in importlib.machinery.BYTECODE_SUFFIXES: - if os.path.exists(filepath + ext): - return filepath + ext + if pathpath.with_suffix(ext).exists(): + return pathpath.with_suffix(ext) else: return None -def load_python_file(dir_: str, filename: str) -> ModuleType: +def load_python_file( + dir_: Union[str, os.PathLike[str]], filename: Union[str, os.PathLike[str]] +) -> ModuleType: """Load a file from the given path as a Python module.""" + dir_ = pathlib.Path(dir_) + filename_as_path = pathlib.Path(filename) + filename = filename_as_path.name + module_id = re.sub(r"\W", "_", filename) - path = os.path.join(dir_, filename) - _, ext = os.path.splitext(filename) + path = dir_ / filename + ext = path.suffix if ext == ".py": - if os.path.exists(path): + if path.exists(): module = load_module_py(module_id, path) else: pyc_path = pyc_file_from_path(path) @@ -106,9 +127,27 @@ def load_python_file(dir_: str, filename: str) -> ModuleType: return module -def load_module_py(module_id: str, path: str) -> ModuleType: +def load_module_py( + module_id: str, path: Union[str, os.PathLike[str]] +) -> ModuleType: spec = importlib.util.spec_from_file_location(module_id, path) assert spec module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) # type: ignore return module + + +def _preserving_path_as_str(path: Union[str, os.PathLike[str]]) -> str: + """receive str/pathlike and return a string. + + Does not convert an incoming string path to a Path first, to help with + unit tests that are doing string path round trips without OS-specific + processing if not necessary. + + """ + if isinstance(path, str): + return path + elif isinstance(path, pathlib.PurePath): + return str(path) + else: + return str(pathlib.Path(path)) diff --git a/libs/alembic/util/sqla_compat.py b/libs/alembic/util/sqla_compat.py index d4ed0fdd59..ff2f2c9390 100644 --- a/libs/alembic/util/sqla_compat.py +++ b/libs/alembic/util/sqla_compat.py @@ -10,7 +10,6 @@ from typing import Dict from typing import Iterable from typing import Iterator -from typing import Mapping from typing import Optional from typing import Protocol from typing import Set @@ -20,11 +19,9 @@ from typing import Union from sqlalchemy import __version__ -from sqlalchemy import inspect from sqlalchemy import schema from sqlalchemy import sql from sqlalchemy import types as sqltypes -from sqlalchemy.engine import url from sqlalchemy.schema import CheckConstraint from sqlalchemy.schema import Column from sqlalchemy.schema import ForeignKeyConstraint @@ -32,28 +29,25 @@ from sqlalchemy.sql.base import DialectKWArgs from sqlalchemy.sql.elements import BindParameter from sqlalchemy.sql.elements import ColumnClause -from sqlalchemy.sql.elements import quoted_name from sqlalchemy.sql.elements import TextClause from sqlalchemy.sql.elements import UnaryExpression +from sqlalchemy.sql.naming import _NONE_NAME as _NONE_NAME # type: ignore[attr-defined] # noqa: E501 from sqlalchemy.sql.visitors import traverse from typing_extensions import TypeGuard if TYPE_CHECKING: from sqlalchemy import ClauseElement + from sqlalchemy import Identity from sqlalchemy import Index from sqlalchemy import Table from sqlalchemy.engine import Connection from sqlalchemy.engine import Dialect from sqlalchemy.engine import Transaction - from sqlalchemy.engine.reflection import Inspector from sqlalchemy.sql.base import ColumnCollection from sqlalchemy.sql.compiler import SQLCompiler - from sqlalchemy.sql.dml import Insert from sqlalchemy.sql.elements import ColumnElement from sqlalchemy.sql.schema import Constraint from sqlalchemy.sql.schema import SchemaItem - from sqlalchemy.sql.selectable import Select - from sqlalchemy.sql.selectable import TableClause _CE = TypeVar("_CE", bound=Union["ColumnElement[Any]", "SchemaItem"]) @@ -72,24 +66,14 @@ def _safe_int(value: str) -> Union[int, str]: _vers = tuple( [_safe_int(x) for x in re.findall(r"(\d+|[abc]\d)", __version__)] ) -sqla_13 = _vers >= (1, 3) -sqla_14 = _vers >= (1, 4) # https://docs.sqlalchemy.org/en/latest/changelog/changelog_14.html#change-0c6e0cc67dfe6fac5164720e57ef307d sqla_14_18 = _vers >= (1, 4, 18) sqla_14_26 = _vers >= (1, 4, 26) sqla_2 = _vers >= (2,) +sqla_2_0_25 = _vers >= (2, 25) +sqla_2_1 = _vers >= (2, 1) sqlalchemy_version = __version__ -try: - from sqlalchemy.sql.naming import _NONE_NAME as _NONE_NAME # type: ignore[attr-defined] # noqa: E501 -except ImportError: - from sqlalchemy.sql.elements import _NONE_NAME as _NONE_NAME # type: ignore # noqa: E501 - - -class _Unsupported: - "Placeholder for unsupported SQLAlchemy classes" - - if TYPE_CHECKING: def compiles( @@ -97,74 +81,52 @@ def compiles( ) -> Callable[[_CompilerProtocol], _CompilerProtocol]: ... else: - from sqlalchemy.ext.compiler import compiles - -try: - from sqlalchemy import Computed as Computed -except ImportError: - if not TYPE_CHECKING: + from sqlalchemy.ext.compiler import compiles # noqa: I100,I202 - class Computed(_Unsupported): - pass - has_computed = False - has_computed_reflection = False -else: - has_computed = True - has_computed_reflection = _vers >= (1, 3, 16) +identity_has_dialect_kwargs = issubclass(schema.Identity, DialectKWArgs) -try: - from sqlalchemy import Identity as Identity -except ImportError: - if not TYPE_CHECKING: - class Identity(_Unsupported): - pass +def _get_identity_options_dict( + identity: Union[Identity, schema.Sequence, None], + dialect_kwargs: bool = False, +) -> Dict[str, Any]: + if identity is None: + return {} + elif identity_has_dialect_kwargs: + assert hasattr(identity, "_as_dict") + as_dict = identity._as_dict() + if dialect_kwargs: + assert isinstance(identity, DialectKWArgs) + as_dict.update(identity.dialect_kwargs) + else: + as_dict = {} + if isinstance(identity, schema.Identity): + # always=None means something different than always=False + as_dict["always"] = identity.always + if identity.on_null is not None: + as_dict["on_null"] = identity.on_null + # attributes common to Identity and Sequence + attrs = ( + "start", + "increment", + "minvalue", + "maxvalue", + "nominvalue", + "nomaxvalue", + "cycle", + "cache", + "order", + ) + as_dict.update( + { + key: getattr(identity, key, None) + for key in attrs + if getattr(identity, key, None) is not None + } + ) + return as_dict - has_identity = False -else: - identity_has_dialect_kwargs = issubclass(Identity, DialectKWArgs) - - def _get_identity_options_dict( - identity: Union[Identity, schema.Sequence, None], - dialect_kwargs: bool = False, - ) -> Dict[str, Any]: - if identity is None: - return {} - elif identity_has_dialect_kwargs: - as_dict = identity._as_dict() # type: ignore - if dialect_kwargs: - assert isinstance(identity, DialectKWArgs) - as_dict.update(identity.dialect_kwargs) - else: - as_dict = {} - if isinstance(identity, Identity): - # always=None means something different than always=False - as_dict["always"] = identity.always - if identity.on_null is not None: - as_dict["on_null"] = identity.on_null - # attributes common to Identity and Sequence - attrs = ( - "start", - "increment", - "minvalue", - "maxvalue", - "nominvalue", - "nomaxvalue", - "cycle", - "cache", - "order", - ) - as_dict.update( - { - key: getattr(identity, key, None) - for key in attrs - if getattr(identity, key, None) is not None - } - ) - return as_dict - - has_identity = True if sqla_2: from sqlalchemy.sql.base import _NoneName @@ -173,7 +135,6 @@ def _get_identity_options_dict( _ConstraintName = Union[None, str, _NoneName] - _ConstraintNameDefined = Union[str, _NoneName] @@ -183,15 +144,11 @@ def constraint_name_defined( return name is _NONE_NAME or isinstance(name, (str, _NoneName)) -def constraint_name_string( - name: _ConstraintName, -) -> TypeGuard[str]: +def constraint_name_string(name: _ConstraintName) -> TypeGuard[str]: return isinstance(name, str) -def constraint_name_or_none( - name: _ConstraintName, -) -> Optional[str]: +def constraint_name_or_none(name: _ConstraintName) -> Optional[str]: return name if constraint_name_string(name) else None @@ -221,17 +178,10 @@ def _ensure_scope_for_ddl( yield -def url_render_as_string(url, hide_password=True): - if sqla_14: - return url.render_as_string(hide_password=hide_password) - else: - return url.__to_string__(hide_password=hide_password) - - def _safe_begin_connection_transaction( connection: Connection, ) -> Transaction: - transaction = _get_connection_transaction(connection) + transaction = connection.get_transaction() if transaction: return transaction else: @@ -241,7 +191,7 @@ def _safe_begin_connection_transaction( def _safe_commit_connection_transaction( connection: Connection, ) -> None: - transaction = _get_connection_transaction(connection) + transaction = connection.get_transaction() if transaction: transaction.commit() @@ -249,7 +199,7 @@ def _safe_commit_connection_transaction( def _safe_rollback_connection_transaction( connection: Connection, ) -> None: - transaction = _get_connection_transaction(connection) + transaction = connection.get_transaction() if transaction: transaction.rollback() @@ -275,65 +225,29 @@ def _copy(schema_item: _CE, **kw) -> _CE: return schema_item.copy(**kw) # type: ignore[union-attr] -def _get_connection_transaction( - connection: Connection, -) -> Optional[Transaction]: - if sqla_14: - return connection.get_transaction() - else: - r = connection._root # type: ignore[attr-defined] - return r._Connection__transaction - - -def _create_url(*arg, **kw) -> url.URL: - if hasattr(url.URL, "create"): - return url.URL.create(*arg, **kw) - else: - return url.URL(*arg, **kw) - - def _connectable_has_table( connectable: Connection, tablename: str, schemaname: Union[str, None] ) -> bool: - if sqla_14: - return inspect(connectable).has_table(tablename, schemaname) - else: - return connectable.dialect.has_table( - connectable, tablename, schemaname - ) + return connectable.dialect.has_table(connectable, tablename, schemaname) def _exec_on_inspector(inspector, statement, **params): - if sqla_14: - with inspector._operation_context() as conn: - return conn.execute(statement, params) - else: - return inspector.bind.execute(statement, params) + with inspector._operation_context() as conn: + return conn.execute(statement, params) def _nullability_might_be_unset(metadata_column): - if not sqla_14: - return metadata_column.nullable - else: - from sqlalchemy.sql import schema + from sqlalchemy.sql import schema - return ( - metadata_column._user_defined_nullable is schema.NULL_UNSPECIFIED - ) + return metadata_column._user_defined_nullable is schema.NULL_UNSPECIFIED def _server_default_is_computed(*server_default) -> bool: - if not has_computed: - return False - else: - return any(isinstance(sd, Computed) for sd in server_default) + return any(isinstance(sd, schema.Computed) for sd in server_default) def _server_default_is_identity(*server_default) -> bool: - if not sqla_14: - return False - else: - return any(isinstance(sd, Identity) for sd in server_default) + return any(isinstance(sd, schema.Identity) for sd in server_default) def _table_for_constraint(constraint: Constraint) -> Table: @@ -354,15 +268,6 @@ def _columns_for_constraint(constraint): return list(constraint.columns) -def _reflect_table(inspector: Inspector, table: Table) -> None: - if sqla_14: - return inspector.reflect_table(table, None) - else: - return inspector.reflecttable( # type: ignore[attr-defined] - table, None - ) - - def _resolve_for_variant(type_, dialect): if _type_has_variants(type_): base_type, mapping = _get_variant_mapping(type_) @@ -371,7 +276,7 @@ def _resolve_for_variant(type_, dialect): return type_ -if hasattr(sqltypes.TypeEngine, "_variant_mapping"): +if hasattr(sqltypes.TypeEngine, "_variant_mapping"): # 2.0 def _type_has_variants(type_): return bool(type_._variant_mapping) @@ -388,6 +293,13 @@ def _get_variant_mapping(type_): return type_.impl, type_.mapping +def _get_table_key(name: str, schema: Optional[str]) -> str: + if schema is None: + return name + else: + return schema + "." + name + + def _fk_spec(constraint: ForeignKeyConstraint) -> Any: if TYPE_CHECKING: assert constraint.columns is not None @@ -549,103 +461,32 @@ def _render_literal_bindparam( return compiler.render_literal_bindparam(element, **kw) -def _column_kwargs(col: Column) -> Mapping: - if sqla_13: - return col.kwargs - else: - return {} - - def _get_constraint_final_name( constraint: Union[Index, Constraint], dialect: Optional[Dialect] ) -> Optional[str]: if constraint.name is None: return None assert dialect is not None - if sqla_14: - # for SQLAlchemy 1.4 we would like to have the option to expand - # the use of "deferred" names for constraints as well as to have - # some flexibility with "None" name and similar; make use of new - # SQLAlchemy API to return what would be the final compiled form of - # the name for this dialect. - return dialect.identifier_preparer.format_constraint( - constraint, _alembic_quote=False - ) - else: - # prior to SQLAlchemy 1.4, work around quoting logic to get at the - # final compiled name without quotes. - if hasattr(constraint.name, "quote"): - # might be quoted_name, might be truncated_name, keep it the - # same - quoted_name_cls: type = type(constraint.name) - else: - quoted_name_cls = quoted_name - - new_name = quoted_name_cls(str(constraint.name), quote=False) - constraint = constraint.__class__(name=new_name) - - if isinstance(constraint, schema.Index): - # name should not be quoted. - d = dialect.ddl_compiler(dialect, None) # type: ignore[arg-type] - return d._prepared_index_name(constraint) - else: - # name should not be quoted. - return dialect.identifier_preparer.format_constraint(constraint) + # for SQLAlchemy 1.4 we would like to have the option to expand + # the use of "deferred" names for constraints as well as to have + # some flexibility with "None" name and similar; make use of new + # SQLAlchemy API to return what would be the final compiled form of + # the name for this dialect. + return dialect.identifier_preparer.format_constraint( + constraint, _alembic_quote=False + ) def _constraint_is_named( constraint: Union[Constraint, Index], dialect: Optional[Dialect] ) -> bool: - if sqla_14: - if constraint.name is None: - return False - assert dialect is not None - name = dialect.identifier_preparer.format_constraint( - constraint, _alembic_quote=False - ) - return name is not None - else: - return constraint.name is not None - - -def _is_mariadb(mysql_dialect: Dialect) -> bool: - if sqla_14: - return mysql_dialect.is_mariadb # type: ignore[attr-defined] - else: - return bool( - mysql_dialect.server_version_info - and mysql_dialect._is_mariadb # type: ignore[attr-defined] - ) - - -def _mariadb_normalized_version_info(mysql_dialect): - return mysql_dialect._mariadb_normalized_version_info - - -def _insert_inline(table: Union[TableClause, Table]) -> Insert: - if sqla_14: - return table.insert().inline() - else: - return table.insert(inline=True) # type: ignore[call-arg] - - -if sqla_14: - from sqlalchemy import create_mock_engine - - # weird mypy workaround - from sqlalchemy import select as _sa_select - - _select = _sa_select -else: - from sqlalchemy import create_engine - - def create_mock_engine(url, executor, **kw): # type: ignore[misc] - return create_engine( - "postgresql://", strategy="mock", executor=executor - ) - - def _select(*columns, **kw) -> Select: - return sql.select(list(columns), **kw) # type: ignore[call-overload] + if constraint.name is None: + return False + assert dialect is not None + name = dialect.identifier_preparer.format_constraint( + constraint, _alembic_quote=False + ) + return name is not None def is_expression_index(index: Index) -> bool: @@ -661,3 +502,9 @@ def is_expression(expr: Any) -> bool: if not isinstance(expr, ColumnClause) or expr.is_literal: return True return False + + +def _inherit_schema_deprecated() -> bool: + # at some point in 2.1 inherit_schema was replaced with a property + # so that's preset at the class level, while before it wasn't. + return sqla_2_1 and hasattr(sqltypes.Enum, "inherit_schema") diff --git a/libs/aniso8601-10.0.0.dist-info/METADATA b/libs/aniso8601-10.0.0.dist-info/METADATA deleted file mode 100644 index 01ae669726..0000000000 --- a/libs/aniso8601-10.0.0.dist-info/METADATA +++ /dev/null @@ -1,509 +0,0 @@ -Metadata-Version: 2.1 -Name: aniso8601 -Version: 10.0.0 -Summary: A library for parsing ISO 8601 strings. -Home-page: https://bitbucket.org/nielsenb/aniso8601 -Author: Brandon Nielsen -Author-email: nielsenb@jetfuse.net -Project-URL: Documentation, https://aniso8601.readthedocs.io/ -Project-URL: Source, https://bitbucket.org/nielsenb/aniso8601 -Project-URL: Tracker, https://bitbucket.org/nielsenb/aniso8601/issues -Keywords: iso8601 parser -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Description-Content-Type: text/x-rst -License-File: LICENSE -Provides-Extra: dev -Requires-Dist: black ; extra == 'dev' -Requires-Dist: coverage ; extra == 'dev' -Requires-Dist: isort ; extra == 'dev' -Requires-Dist: pre-commit ; extra == 'dev' -Requires-Dist: pyenchant ; extra == 'dev' -Requires-Dist: pylint ; extra == 'dev' - -aniso8601 -========= - -Another ISO 8601 parser for Python ----------------------------------- - -Features -======== -* Pure Python implementation -* Logical behavior - - - Parse a time, get a `datetime.time `_ - - Parse a date, get a `datetime.date `_ - - Parse a datetime, get a `datetime.datetime `_ - - Parse a duration, get a `datetime.timedelta `_ - - Parse an interval, get a tuple of dates or datetimes - - Parse a repeating interval, get a date or datetime `generator `_ - -* UTC offset represented as fixed-offset tzinfo -* Parser separate from representation, allowing parsing to different datetime representations (see `Builders`_) -* No regular expressions - -Installation -============ - -The recommended installation method is to use pip:: - - $ pip install aniso8601 - -Alternatively, you can download the source (git repository hosted at `Bitbucket `_) and install directly:: - - $ python setup.py install - -Use -=== - -Parsing datetimes ------------------ - -*Consider* `datetime.datetime.fromisoformat `_ *for basic ISO 8601 datetime parsing* - -To parse a typical ISO 8601 datetime string:: - - >>> import aniso8601 - >>> aniso8601.parse_datetime('1977-06-10T12:00:00Z') - datetime.datetime(1977, 6, 10, 12, 0, tzinfo=+0:00:00 UTC) - -Alternative delimiters can be specified, for example, a space:: - - >>> aniso8601.parse_datetime('1977-06-10 12:00:00Z', delimiter=' ') - datetime.datetime(1977, 6, 10, 12, 0, tzinfo=+0:00:00 UTC) - -UTC offsets are supported:: - - >>> aniso8601.parse_datetime('1979-06-05T08:00:00-08:00') - datetime.datetime(1979, 6, 5, 8, 0, tzinfo=-8:00:00 UTC) - -If a UTC offset is not specified, the returned datetime will be naive:: - - >>> aniso8601.parse_datetime('1983-01-22T08:00:00') - datetime.datetime(1983, 1, 22, 8, 0) - -Leap seconds are currently not supported and attempting to parse one raises a :code:`LeapSecondError`:: - - >>> aniso8601.parse_datetime('2018-03-06T23:59:60') - Traceback (most recent call last): - File "", line 1, in - File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/time.py", line 196, in parse_datetime - return builder.build_datetime(datepart, timepart) - File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/builders/python.py", line 237, in build_datetime - cls._build_object(time)) - File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/builders/__init__.py", line 336, in _build_object - return cls.build_time(hh=parsetuple.hh, mm=parsetuple.mm, - File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/builders/python.py", line 191, in build_time - hh, mm, ss, tz = cls.range_check_time(hh, mm, ss, tz) - File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/builders/__init__.py", line 266, in range_check_time - raise LeapSecondError('Leap seconds are not supported.') - aniso8601.exceptions.LeapSecondError: Leap seconds are not supported. - -To get the resolution of an ISO 8601 datetime string:: - - >>> aniso8601.get_datetime_resolution('1977-06-10T12:00:00Z') == aniso8601.resolution.TimeResolution.Seconds - True - >>> aniso8601.get_datetime_resolution('1977-06-10T12:00') == aniso8601.resolution.TimeResolution.Minutes - True - >>> aniso8601.get_datetime_resolution('1977-06-10T12') == aniso8601.resolution.TimeResolution.Hours - True - -Note that datetime resolutions map to :code:`TimeResolution` as a valid datetime must have at least one time member so the resolution mapping is equivalent. - -Parsing dates -------------- - -*Consider* `datetime.date.fromisoformat `_ *for basic ISO 8601 date parsing* - -To parse a date represented in an ISO 8601 string:: - - >>> import aniso8601 - >>> aniso8601.parse_date('1984-04-23') - datetime.date(1984, 4, 23) - -Basic format is supported as well:: - - >>> aniso8601.parse_date('19840423') - datetime.date(1984, 4, 23) - -To parse a date using the ISO 8601 week date format:: - - >>> aniso8601.parse_date('1986-W38-1') - datetime.date(1986, 9, 15) - -To parse an ISO 8601 ordinal date:: - - >>> aniso8601.parse_date('1988-132') - datetime.date(1988, 5, 11) - -To get the resolution of an ISO 8601 date string:: - - >>> aniso8601.get_date_resolution('1981-04-05') == aniso8601.resolution.DateResolution.Day - True - >>> aniso8601.get_date_resolution('1981-04') == aniso8601.resolution.DateResolution.Month - True - >>> aniso8601.get_date_resolution('1981') == aniso8601.resolution.DateResolution.Year - True - -Parsing times -------------- - -*Consider* `datetime.time.fromisoformat `_ *for basic ISO 8601 time parsing* - -To parse a time formatted as an ISO 8601 string:: - - >>> import aniso8601 - >>> aniso8601.parse_time('11:31:14') - datetime.time(11, 31, 14) - -As with all of the above, basic format is supported:: - - >>> aniso8601.parse_time('113114') - datetime.time(11, 31, 14) - -A UTC offset can be specified for times:: - - >>> aniso8601.parse_time('17:18:19-02:30') - datetime.time(17, 18, 19, tzinfo=-2:30:00 UTC) - >>> aniso8601.parse_time('171819Z') - datetime.time(17, 18, 19, tzinfo=+0:00:00 UTC) - -Reduced accuracy is supported:: - - >>> aniso8601.parse_time('21:42') - datetime.time(21, 42) - >>> aniso8601.parse_time('22') - datetime.time(22, 0) - -A decimal fraction is always allowed on the lowest order element of an ISO 8601 formatted time:: - - >>> aniso8601.parse_time('22:33.5') - datetime.time(22, 33, 30) - >>> aniso8601.parse_time('23.75') - datetime.time(23, 45) - -The decimal fraction can be specified with a comma instead of a full-stop:: - - >>> aniso8601.parse_time('22:33,5') - datetime.time(22, 33, 30) - >>> aniso8601.parse_time('23,75') - datetime.time(23, 45) - -Leap seconds are currently not supported and attempting to parse one raises a :code:`LeapSecondError`:: - - >>> aniso8601.parse_time('23:59:60') - Traceback (most recent call last): - File "", line 1, in - File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/time.py", line 174, in parse_time - return builder.build_time(hh=hourstr, mm=minutestr, ss=secondstr, tz=tz) - File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/builders/python.py", line 191, in build_time - hh, mm, ss, tz = cls.range_check_time(hh, mm, ss, tz) - File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/builders/__init__.py", line 266, in range_check_time - raise LeapSecondError('Leap seconds are not supported.') - aniso8601.exceptions.LeapSecondError: Leap seconds are not supported. - -To get the resolution of an ISO 8601 time string:: - - >>> aniso8601.get_time_resolution('11:31:14') == aniso8601.resolution.TimeResolution.Seconds - True - >>> aniso8601.get_time_resolution('11:31') == aniso8601.resolution.TimeResolution.Minutes - True - >>> aniso8601.get_time_resolution('11') == aniso8601.resolution.TimeResolution.Hours - True - -Parsing durations ------------------ - -To parse a duration formatted as an ISO 8601 string:: - - >>> import aniso8601 - >>> aniso8601.parse_duration('P1Y2M3DT4H54M6S') - datetime.timedelta(428, 17646) - -Reduced accuracy is supported:: - - >>> aniso8601.parse_duration('P1Y') - datetime.timedelta(365) - -A decimal fraction is allowed on the lowest order element:: - - >>> aniso8601.parse_duration('P1YT3.5M') - datetime.timedelta(365, 210) - -The decimal fraction can be specified with a comma instead of a full-stop:: - - >>> aniso8601.parse_duration('P1YT3,5M') - datetime.timedelta(365, 210) - -Parsing a duration from a combined date and time is supported as well:: - - >>> aniso8601.parse_duration('P0001-01-02T01:30:05') - datetime.timedelta(397, 5405) - -To get the resolution of an ISO 8601 duration string:: - - >>> aniso8601.get_duration_resolution('P1Y2M3DT4H54M6S') == aniso8601.resolution.DurationResolution.Seconds - True - >>> aniso8601.get_duration_resolution('P1Y2M3DT4H54M') == aniso8601.resolution.DurationResolution.Minutes - True - >>> aniso8601.get_duration_resolution('P1Y2M3DT4H') == aniso8601.resolution.DurationResolution.Hours - True - >>> aniso8601.get_duration_resolution('P1Y2M3D') == aniso8601.resolution.DurationResolution.Days - True - >>> aniso8601.get_duration_resolution('P1Y2M') == aniso8601.resolution.DurationResolution.Months - True - >>> aniso8601.get_duration_resolution('P1Y') == aniso8601.resolution.DurationResolution.Years - True - -The default :code:`PythonTimeBuilder` assumes years are 365 days, and months are 30 days. Where calendar level accuracy is required, a `RelativeTimeBuilder `_ can be used, see also `Builders`_. - -Parsing intervals ------------------ - -To parse an interval specified by a start and end:: - - >>> import aniso8601 - >>> aniso8601.parse_interval('2007-03-01T13:00:00/2008-05-11T15:30:00') - (datetime.datetime(2007, 3, 1, 13, 0), datetime.datetime(2008, 5, 11, 15, 30)) - -Intervals specified by a start time and a duration are supported:: - - >>> aniso8601.parse_interval('2007-03-01T13:00:00Z/P1Y2M10DT2H30M') - (datetime.datetime(2007, 3, 1, 13, 0, tzinfo=+0:00:00 UTC), datetime.datetime(2008, 5, 9, 15, 30, tzinfo=+0:00:00 UTC)) - -A duration can also be specified by a duration and end time:: - - >>> aniso8601.parse_interval('P1M/1981-04-05') - (datetime.date(1981, 4, 5), datetime.date(1981, 3, 6)) - -Notice that the result of the above parse is not in order from earliest to latest. If sorted intervals are required, simply use the :code:`sorted` keyword as shown below:: - - >>> sorted(aniso8601.parse_interval('P1M/1981-04-05')) - [datetime.date(1981, 3, 6), datetime.date(1981, 4, 5)] - -The end of an interval is returned as a datetime when required to maintain the resolution specified by a duration, even if the duration start is given as a date:: - - >>> aniso8601.parse_interval('2014-11-12/PT4H54M6.5S') - (datetime.date(2014, 11, 12), datetime.datetime(2014, 11, 12, 4, 54, 6, 500000)) - >>> aniso8601.parse_interval('2007-03-01/P1.5D') - (datetime.date(2007, 3, 1), datetime.datetime(2007, 3, 2, 12, 0)) - -Concise representations are supported:: - - >>> aniso8601.parse_interval('2020-01-01/02') - (datetime.date(2020, 1, 1), datetime.date(2020, 1, 2)) - >>> aniso8601.parse_interval('2007-12-14T13:30/15:30') - (datetime.datetime(2007, 12, 14, 13, 30), datetime.datetime(2007, 12, 14, 15, 30)) - >>> aniso8601.parse_interval('2008-02-15/03-14') - (datetime.date(2008, 2, 15), datetime.date(2008, 3, 14)) - >>> aniso8601.parse_interval('2007-11-13T09:00/15T17:00') - (datetime.datetime(2007, 11, 13, 9, 0), datetime.datetime(2007, 11, 15, 17, 0)) - -Repeating intervals are supported as well, and return a `generator `_:: - - >>> aniso8601.parse_repeating_interval('R3/1981-04-05/P1D') - - >>> list(aniso8601.parse_repeating_interval('R3/1981-04-05/P1D')) - [datetime.date(1981, 4, 5), datetime.date(1981, 4, 6), datetime.date(1981, 4, 7)] - -Repeating intervals are allowed to go in the reverse direction:: - - >>> list(aniso8601.parse_repeating_interval('R2/PT1H2M/1980-03-05T01:01:00')) - [datetime.datetime(1980, 3, 5, 1, 1), datetime.datetime(1980, 3, 4, 23, 59)] - -Unbounded intervals are also allowed (Python 2):: - - >>> result = aniso8601.parse_repeating_interval('R/PT1H2M/1980-03-05T01:01:00') - >>> result.next() - datetime.datetime(1980, 3, 5, 1, 1) - >>> result.next() - datetime.datetime(1980, 3, 4, 23, 59) - -or for Python 3:: - - >>> result = aniso8601.parse_repeating_interval('R/PT1H2M/1980-03-05T01:01:00') - >>> next(result) - datetime.datetime(1980, 3, 5, 1, 1) - >>> next(result) - datetime.datetime(1980, 3, 4, 23, 59) - -Note that you should never try to convert a generator produced by an unbounded interval to a list:: - - >>> list(aniso8601.parse_repeating_interval('R/PT1H2M/1980-03-05T01:01:00')) - Traceback (most recent call last): - File "", line 1, in - File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/builders/python.py", line 560, in _date_generator_unbounded - currentdate += timedelta - OverflowError: date value out of range - -To get the resolution of an ISO 8601 interval string:: - - >>> aniso8601.get_interval_resolution('2007-03-01T13:00:00/2008-05-11T15:30:00') == aniso8601.resolution.IntervalResolution.Seconds - True - >>> aniso8601.get_interval_resolution('2007-03-01T13:00/2008-05-11T15:30') == aniso8601.resolution.IntervalResolution.Minutes - True - >>> aniso8601.get_interval_resolution('2007-03-01T13/2008-05-11T15') == aniso8601.resolution.IntervalResolution.Hours - True - >>> aniso8601.get_interval_resolution('2007-03-01/2008-05-11') == aniso8601.resolution.IntervalResolution.Day - True - >>> aniso8601.get_interval_resolution('2007-03/P1Y') == aniso8601.resolution.IntervalResolution.Month - True - >>> aniso8601.get_interval_resolution('2007/P1Y') == aniso8601.resolution.IntervalResolution.Year - True - -And for repeating ISO 8601 interval strings:: - - >>> aniso8601.get_repeating_interval_resolution('R3/1981-04-05/P1D') == aniso8601.resolution.IntervalResolution.Day - True - >>> aniso8601.get_repeating_interval_resolution('R/PT1H2M/1980-03-05T01:01:00') == aniso8601.resolution.IntervalResolution.Seconds - True - -Builders -======== - -Builders can be used to change the output format of a parse operation. All parse functions have a :code:`builder` keyword argument which accepts a builder class. - -Two builders are included. The :code:`PythonTimeBuilder` (the default) in the :code:`aniso8601.builders.python` module, and the :code:`TupleBuilder` which returns the parse result as a corresponding named tuple and is located in the :code:`aniso8601.builders` module. - -Information on writing a builder can be found in `BUILDERS `_. - -The following builders are available as separate projects: - -* `RelativeTimeBuilder `_ supports parsing to `datetutil relativedelta types `_ for calendar level accuracy -* `AttoTimeBuilder `_ supports parsing directly to `attotime attodatetime and attotimedelta types `_ which support sub-nanosecond precision -* `NumPyTimeBuilder `_ supports parsing directly to `NumPy datetime64 and timedelta64 types `_ - -TupleBuilder ------------- - -The :code:`TupleBuilder` returns parse results as `named tuples `_. It is located in the :code:`aniso8601.builders` module. - -Datetimes -^^^^^^^^^ - -Parsing a datetime returns a :code:`DatetimeTuple` containing :code:`Date` and :code:`Time` tuples . The date tuple contains the following parse components: :code:`YYYY`, :code:`MM`, :code:`DD`, :code:`Www`, :code:`D`, :code:`DDD`. The time tuple contains the following parse components :code:`hh`, :code:`mm`, :code:`ss`, :code:`tz`, where :code:`tz` itself is a tuple with the following components :code:`negative`, :code:`Z`, :code:`hh`, :code:`mm`, :code:`name` with :code:`negative` and :code:`Z` being booleans:: - - >>> import aniso8601 - >>> from aniso8601.builders import TupleBuilder - >>> aniso8601.parse_datetime('1977-06-10T12:00:00', builder=TupleBuilder) - Datetime(date=Date(YYYY='1977', MM='06', DD='10', Www=None, D=None, DDD=None), time=Time(hh='12', mm='00', ss='00', tz=None)) - >>> aniso8601.parse_datetime('1979-06-05T08:00:00-08:00', builder=TupleBuilder) - Datetime(date=Date(YYYY='1979', MM='06', DD='05', Www=None, D=None, DDD=None), time=Time(hh='08', mm='00', ss='00', tz=Timezone(negative=True, Z=None, hh='08', mm='00', name='-08:00'))) - -Dates -^^^^^ - -Parsing a date returns a :code:`DateTuple` containing the following parse components: :code:`YYYY`, :code:`MM`, :code:`DD`, :code:`Www`, :code:`D`, :code:`DDD`:: - - >>> import aniso8601 - >>> from aniso8601.builders import TupleBuilder - >>> aniso8601.parse_date('1984-04-23', builder=TupleBuilder) - Date(YYYY='1984', MM='04', DD='23', Www=None, D=None, DDD=None) - >>> aniso8601.parse_date('1986-W38-1', builder=TupleBuilder) - Date(YYYY='1986', MM=None, DD=None, Www='38', D='1', DDD=None) - >>> aniso8601.parse_date('1988-132', builder=TupleBuilder) - Date(YYYY='1988', MM=None, DD=None, Www=None, D=None, DDD='132') - -Times -^^^^^ - -Parsing a time returns a :code:`TimeTuple` containing following parse components: :code:`hh`, :code:`mm`, :code:`ss`, :code:`tz`, where :code:`tz` is a :code:`TimezoneTuple` with the following components :code:`negative`, :code:`Z`, :code:`hh`, :code:`mm`, :code:`name`, with :code:`negative` and :code:`Z` being booleans:: - - >>> import aniso8601 - >>> from aniso8601.builders import TupleBuilder - >>> aniso8601.parse_time('11:31:14', builder=TupleBuilder) - Time(hh='11', mm='31', ss='14', tz=None) - >>> aniso8601.parse_time('171819Z', builder=TupleBuilder) - Time(hh='17', mm='18', ss='19', tz=Timezone(negative=False, Z=True, hh=None, mm=None, name='Z')) - >>> aniso8601.parse_time('17:18:19-02:30', builder=TupleBuilder) - Time(hh='17', mm='18', ss='19', tz=Timezone(negative=True, Z=None, hh='02', mm='30', name='-02:30')) - -Durations -^^^^^^^^^ - -Parsing a duration returns a :code:`DurationTuple` containing the following parse components: :code:`PnY`, :code:`PnM`, :code:`PnW`, :code:`PnD`, :code:`TnH`, :code:`TnM`, :code:`TnS`:: - - >>> import aniso8601 - >>> from aniso8601.builders import TupleBuilder - >>> aniso8601.parse_duration('P1Y2M3DT4H54M6S', builder=TupleBuilder) - Duration(PnY='1', PnM='2', PnW=None, PnD='3', TnH='4', TnM='54', TnS='6') - >>> aniso8601.parse_duration('P7W', builder=TupleBuilder) - Duration(PnY=None, PnM=None, PnW='7', PnD=None, TnH=None, TnM=None, TnS=None) - -Intervals -^^^^^^^^^ - -Parsing an interval returns an :code:`IntervalTuple` containing the following parse components: :code:`start`, :code:`end`, :code:`duration`, :code:`start` and :code:`end` may both be datetime or date tuples, :code:`duration` is a duration tuple:: - - >>> import aniso8601 - >>> from aniso8601.builders import TupleBuilder - >>> aniso8601.parse_interval('2007-03-01T13:00:00/2008-05-11T15:30:00', builder=TupleBuilder) - Interval(start=Datetime(date=Date(YYYY='2007', MM='03', DD='01', Www=None, D=None, DDD=None), time=Time(hh='13', mm='00', ss='00', tz=None)), end=Datetime(date=Date(YYYY='2008', MM='05', DD='11', Www=None, D=None, DDD=None), time=Time(hh='15', mm='30', ss='00', tz=None)), duration=None) - >>> aniso8601.parse_interval('2007-03-01T13:00:00Z/P1Y2M10DT2H30M', builder=TupleBuilder) - Interval(start=Datetime(date=Date(YYYY='2007', MM='03', DD='01', Www=None, D=None, DDD=None), time=Time(hh='13', mm='00', ss='00', tz=Timezone(negative=False, Z=True, hh=None, mm=None, name='Z'))), end=None, duration=Duration(PnY='1', PnM='2', PnW=None, PnD='10', TnH='2', TnM='30', TnS=None)) - >>> aniso8601.parse_interval('P1M/1981-04-05', builder=TupleBuilder) - Interval(start=None, end=Date(YYYY='1981', MM='04', DD='05', Www=None, D=None, DDD=None), duration=Duration(PnY=None, PnM='1', PnW=None, PnD=None, TnH=None, TnM=None, TnS=None)) - -A repeating interval returns a :code:`RepeatingIntervalTuple` containing the following parse components: :code:`R`, :code:`Rnn`, :code:`interval`, where :code:`R` is a boolean, :code:`True` for an unbounded interval, :code:`False` otherwise.:: - - >>> aniso8601.parse_repeating_interval('R3/1981-04-05/P1D', builder=TupleBuilder) - RepeatingInterval(R=False, Rnn='3', interval=Interval(start=Date(YYYY='1981', MM='04', DD='05', Www=None, D=None, DDD=None), end=None, duration=Duration(PnY=None, PnM=None, PnW=None, PnD='1', TnH=None, TnM=None, TnS=None))) - >>> aniso8601.parse_repeating_interval('R/PT1H2M/1980-03-05T01:01:00', builder=TupleBuilder) - RepeatingInterval(R=True, Rnn=None, interval=Interval(start=None, end=Datetime(date=Date(YYYY='1980', MM='03', DD='05', Www=None, D=None, DDD=None), time=Time(hh='01', mm='01', ss='00', tz=None)), duration=Duration(PnY=None, PnM=None, PnW=None, PnD=None, TnH='1', TnM='2', TnS=None))) - -Development -=========== - -Setup ------ - -It is recommended to develop using a `virtualenv `_. - -Inside a virtualenv, development dependencies can be installed automatically:: - - $ pip install -e .[dev] - -`pre-commit `_ is used for managing pre-commit hooks:: - - $ pre-commit install - -To run the pre-commit hooks manually:: - - $ pre-commit run --all-files - -Tests ------ - -Tests can be run using the `unittest testing framework `_:: - - $ python -m unittest discover aniso8601 - -Contributing -============ - -aniso8601 is an open source project hosted on `Bitbucket `_. - -Any and all bugs are welcome on our `issue tracker `_. -Of particular interest are valid ISO 8601 strings that don't parse, or invalid ones that do. At a minimum, -bug reports should include an example of the misbehaving string, as well as the expected result. Of course -patches containing unit tests (or fixed bugs) are welcome! - -References -========== - -* `ISO 8601:2004(E) `_ (Caution, PDF link) -* `Wikipedia article on ISO 8601 `_ -* `Discussion on alternative ISO 8601 parsers for Python `_ diff --git a/libs/aniso8601-10.0.0.dist-info/RECORD b/libs/aniso8601-10.0.0.dist-info/RECORD deleted file mode 100644 index 0133ce0c27..0000000000 --- a/libs/aniso8601-10.0.0.dist-info/RECORD +++ /dev/null @@ -1,34 +0,0 @@ -aniso8601-10.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -aniso8601-10.0.0.dist-info/LICENSE,sha256=w8yguadP0pZovZm13PAnTVO-kE3md4kW3IUnCPQHsPA,1501 -aniso8601-10.0.0.dist-info/METADATA,sha256=X3OCmIqObDsY5xQ0FnwtcbDWZ6ryJ70ADDbImMVzdck,23371 -aniso8601-10.0.0.dist-info/RECORD,, -aniso8601-10.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -aniso8601-10.0.0.dist-info/WHEEL,sha256=P2T-6epvtXQ2cBOE_U1K4_noqlJFN3tj15djMgEu4NM,110 -aniso8601-10.0.0.dist-info/top_level.txt,sha256=MVQomyeED8nGIH7PUQdMzxgLppIB48oYHtcmL17ETB0,10 -aniso8601/__init__.py,sha256=LwoL2Wj0kdYdHS3YqvGPH6SJ1HQWpsN9jKsAMHlAdwE,705 -aniso8601/builders/__init__.py,sha256=jXJ75D-QRhB8huW2GyShjh_I4Z83ua4Qgzn1DQiX-1M,17975 -aniso8601/builders/python.py,sha256=U0Tvqt4vPVcH9epfkbz_o4550yr_4_Q8SvoUdj9JAvs,22072 -aniso8601/builders/tests/__init__.py,sha256=qjC0jrTWf2UUlZtXE3AKcMFSLC2kQPOvAI36t5gc8q0,209 -aniso8601/builders/tests/test_init.py,sha256=pyES5pMJUWy16KK4MLsfzRmPRcQvj6_vxcM8yzeZxOc,29997 -aniso8601/builders/tests/test_python.py,sha256=dhkaGiE0ToMPBhUmhOAeM32aC2SDu6Rj6d4YniV2M7A,62032 -aniso8601/compat.py,sha256=CvNkC-tCr3hzv1i9VOzgiPaS2EUxCfxtfo8SkJ2Juyc,571 -aniso8601/date.py,sha256=D3Kffr6Ln_z0fNTP5KAQa0R0T00VdmZgIS_O-pVmxnI,4496 -aniso8601/decimalfraction.py,sha256=NBUies6Gp1NrVkSU_9MwJNu28OhUIeWXNpkTRcClGYA,333 -aniso8601/duration.py,sha256=XEMm8t3Vipw3YsCgXf17XA10dHmUCQuJo4-PX4z8U7I,9550 -aniso8601/exceptions.py,sha256=uCQIrrIpCJV-n3WDUuhB3wMdgmHuu1UqOXrE9ct3xPY,1313 -aniso8601/interval.py,sha256=ruz49D2BoyoyYggM2oLrz0HSB8CIPxdl4LZCvwq3kCg,10752 -aniso8601/resolution.py,sha256=RJGfir0k6IiR3L1ZCrPvjhsQliVT_ZOcaeKqJqH6JHM,684 -aniso8601/tests/__init__.py,sha256=qjC0jrTWf2UUlZtXE3AKcMFSLC2kQPOvAI36t5gc8q0,209 -aniso8601/tests/compat.py,sha256=J4Ocl6zpo7WmA_pItOc2KUH8xZU156L9fTbCBBFX9xY,346 -aniso8601/tests/test_compat.py,sha256=55qZ_Uu7XRQo8hXNDUjy-IB3WaTqATnVtMlAJP4TGr4,763 -aniso8601/tests/test_date.py,sha256=bJy2ve-iFmWlBCWDidfK2lB-7OBMjeATfK3SBFSet4U,9266 -aniso8601/tests/test_decimalfraction.py,sha256=bvPPSuWnS2XfD10wBUjg5nFFbI1kfBshR2Wkmfggu6I,578 -aniso8601/tests/test_duration.py,sha256=QRLpd_bdgEzHjcLkLMFMWnGF4bN_YxPDdtwACtBekc4,44952 -aniso8601/tests/test_init.py,sha256=1GF0Yms8adNM3Ax13w2ncSz6toHhK3pRkPYoNaRcPVk,1689 -aniso8601/tests/test_interval.py,sha256=GjeU8HrIT-klpselgsgMcF1KfuV7sDKLlyVU5S4XMCQ,60457 -aniso8601/tests/test_time.py,sha256=SS0jXkEl5bYFSe1qFPpjZ0GNXycgBMH16Uc885ujR7I,19147 -aniso8601/tests/test_timezone.py,sha256=EJk2cTsHddhe2Pqtzl250gKF8XoXynEAngNctk23b48,4649 -aniso8601/tests/test_utcoffset.py,sha256=wQ7ivBqax2KP340tlC0DBxM7DTK9SNy_Zq_13FqeaKM,1926 -aniso8601/time.py,sha256=CZRisJz6u7fBfMthZvNcUuVFSL_GCYi9WEjXsbrnDF8,5687 -aniso8601/timezone.py,sha256=xpukG_AuvyMNGs57y4bf40eHRcpe3b1fKC8x-H8Epo8,2124 -aniso8601/utcoffset.py,sha256=dm7-eFl6WQFPpDamcTVl46aEjmGObpWJdSZJ-QzblfU,2421 diff --git a/libs/aniso8601-10.0.0.dist-info/WHEEL b/libs/aniso8601-10.0.0.dist-info/WHEEL deleted file mode 100644 index f31e450fda..0000000000 --- a/libs/aniso8601-10.0.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.3) -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any - diff --git a/libs/Flask_SocketIO-5.5.1.dist-info/INSTALLER b/libs/aniso8601-10.0.1.dist-info/INSTALLER similarity index 100% rename from libs/Flask_SocketIO-5.5.1.dist-info/INSTALLER rename to libs/aniso8601-10.0.1.dist-info/INSTALLER diff --git a/libs/aniso8601-10.0.1.dist-info/METADATA b/libs/aniso8601-10.0.1.dist-info/METADATA new file mode 100644 index 0000000000..02d6daf939 --- /dev/null +++ b/libs/aniso8601-10.0.1.dist-info/METADATA @@ -0,0 +1,521 @@ +Metadata-Version: 2.4 +Name: aniso8601 +Version: 10.0.1 +Summary: A library for parsing ISO 8601 strings. +Home-page: https://codeberg.org/nielsenb-jf/aniso8601 +Author: Brandon Nielsen +Author-email: nielsenb@jetfuse.net +Project-URL: Changelog, https://codeberg.org/nielsenb-jf/aniso8601/src/branch/main/CHANGELOG.rst +Project-URL: Documentation, https://aniso8601.readthedocs.io/ +Project-URL: Source, https://codeberg.org/nielsenb-jf/aniso8601 +Project-URL: Tracker, https://codeberg.org/nielsenb-jf/aniso8601/issues +Keywords: iso8601 parser +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Description-Content-Type: text/x-rst +License-File: LICENSE +Provides-Extra: dev +Requires-Dist: black; extra == "dev" +Requires-Dist: coverage; extra == "dev" +Requires-Dist: isort; extra == "dev" +Requires-Dist: pre-commit; extra == "dev" +Requires-Dist: pyenchant; extra == "dev" +Requires-Dist: pylint; extra == "dev" +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: description-content-type +Dynamic: home-page +Dynamic: keywords +Dynamic: license-file +Dynamic: project-url +Dynamic: provides-extra +Dynamic: summary + +aniso8601 +========= + +Another ISO 8601 parser for Python +---------------------------------- + +Features +======== +* Pure Python implementation +* Logical behavior + + - Parse a time, get a `datetime.time `_ + - Parse a date, get a `datetime.date `_ + - Parse a datetime, get a `datetime.datetime `_ + - Parse a duration, get a `datetime.timedelta `_ + - Parse an interval, get a tuple of dates or datetimes + - Parse a repeating interval, get a date or datetime `generator `_ + +* UTC offset represented as fixed-offset tzinfo +* Parser separate from representation, allowing parsing to different datetime representations (see `Builders`_) +* No regular expressions + +Installation +============ + +The recommended installation method is to use pip:: + + $ pip install aniso8601 + +Alternatively, you can download the source (git repository hosted at `Codeberg `_) and install directly:: + + $ python setup.py install + +Use +=== + +Parsing datetimes +----------------- + +*Consider* `datetime.datetime.fromisoformat `_ *for basic ISO 8601 datetime parsing* + +To parse a typical ISO 8601 datetime string:: + + >>> import aniso8601 + >>> aniso8601.parse_datetime('1977-06-10T12:00:00Z') + datetime.datetime(1977, 6, 10, 12, 0, tzinfo=+0:00:00 UTC) + +Alternative delimiters can be specified, for example, a space:: + + >>> aniso8601.parse_datetime('1977-06-10 12:00:00Z', delimiter=' ') + datetime.datetime(1977, 6, 10, 12, 0, tzinfo=+0:00:00 UTC) + +UTC offsets are supported:: + + >>> aniso8601.parse_datetime('1979-06-05T08:00:00-08:00') + datetime.datetime(1979, 6, 5, 8, 0, tzinfo=-8:00:00 UTC) + +If a UTC offset is not specified, the returned datetime will be naive:: + + >>> aniso8601.parse_datetime('1983-01-22T08:00:00') + datetime.datetime(1983, 1, 22, 8, 0) + +Leap seconds are currently not supported and attempting to parse one raises a :code:`LeapSecondError`:: + + >>> aniso8601.parse_datetime('2018-03-06T23:59:60') + Traceback (most recent call last): + File "", line 1, in + File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/time.py", line 196, in parse_datetime + return builder.build_datetime(datepart, timepart) + File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/builders/python.py", line 237, in build_datetime + cls._build_object(time)) + File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/builders/__init__.py", line 336, in _build_object + return cls.build_time(hh=parsetuple.hh, mm=parsetuple.mm, + File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/builders/python.py", line 191, in build_time + hh, mm, ss, tz = cls.range_check_time(hh, mm, ss, tz) + File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/builders/__init__.py", line 266, in range_check_time + raise LeapSecondError('Leap seconds are not supported.') + aniso8601.exceptions.LeapSecondError: Leap seconds are not supported. + +To get the resolution of an ISO 8601 datetime string:: + + >>> aniso8601.get_datetime_resolution('1977-06-10T12:00:00Z') == aniso8601.resolution.TimeResolution.Seconds + True + >>> aniso8601.get_datetime_resolution('1977-06-10T12:00') == aniso8601.resolution.TimeResolution.Minutes + True + >>> aniso8601.get_datetime_resolution('1977-06-10T12') == aniso8601.resolution.TimeResolution.Hours + True + +Note that datetime resolutions map to :code:`TimeResolution` as a valid datetime must have at least one time member so the resolution mapping is equivalent. + +Parsing dates +------------- + +*Consider* `datetime.date.fromisoformat `_ *for basic ISO 8601 date parsing* + +To parse a date represented in an ISO 8601 string:: + + >>> import aniso8601 + >>> aniso8601.parse_date('1984-04-23') + datetime.date(1984, 4, 23) + +Basic format is supported as well:: + + >>> aniso8601.parse_date('19840423') + datetime.date(1984, 4, 23) + +To parse a date using the ISO 8601 week date format:: + + >>> aniso8601.parse_date('1986-W38-1') + datetime.date(1986, 9, 15) + +To parse an ISO 8601 ordinal date:: + + >>> aniso8601.parse_date('1988-132') + datetime.date(1988, 5, 11) + +To get the resolution of an ISO 8601 date string:: + + >>> aniso8601.get_date_resolution('1981-04-05') == aniso8601.resolution.DateResolution.Day + True + >>> aniso8601.get_date_resolution('1981-04') == aniso8601.resolution.DateResolution.Month + True + >>> aniso8601.get_date_resolution('1981') == aniso8601.resolution.DateResolution.Year + True + +Parsing times +------------- + +*Consider* `datetime.time.fromisoformat `_ *for basic ISO 8601 time parsing* + +To parse a time formatted as an ISO 8601 string:: + + >>> import aniso8601 + >>> aniso8601.parse_time('11:31:14') + datetime.time(11, 31, 14) + +As with all of the above, basic format is supported:: + + >>> aniso8601.parse_time('113114') + datetime.time(11, 31, 14) + +A UTC offset can be specified for times:: + + >>> aniso8601.parse_time('17:18:19-02:30') + datetime.time(17, 18, 19, tzinfo=-2:30:00 UTC) + >>> aniso8601.parse_time('171819Z') + datetime.time(17, 18, 19, tzinfo=+0:00:00 UTC) + +Reduced accuracy is supported:: + + >>> aniso8601.parse_time('21:42') + datetime.time(21, 42) + >>> aniso8601.parse_time('22') + datetime.time(22, 0) + +A decimal fraction is always allowed on the lowest order element of an ISO 8601 formatted time:: + + >>> aniso8601.parse_time('22:33.5') + datetime.time(22, 33, 30) + >>> aniso8601.parse_time('23.75') + datetime.time(23, 45) + +The decimal fraction can be specified with a comma instead of a full-stop:: + + >>> aniso8601.parse_time('22:33,5') + datetime.time(22, 33, 30) + >>> aniso8601.parse_time('23,75') + datetime.time(23, 45) + +Leap seconds are currently not supported and attempting to parse one raises a :code:`LeapSecondError`:: + + >>> aniso8601.parse_time('23:59:60') + Traceback (most recent call last): + File "", line 1, in + File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/time.py", line 174, in parse_time + return builder.build_time(hh=hourstr, mm=minutestr, ss=secondstr, tz=tz) + File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/builders/python.py", line 191, in build_time + hh, mm, ss, tz = cls.range_check_time(hh, mm, ss, tz) + File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/builders/__init__.py", line 266, in range_check_time + raise LeapSecondError('Leap seconds are not supported.') + aniso8601.exceptions.LeapSecondError: Leap seconds are not supported. + +To get the resolution of an ISO 8601 time string:: + + >>> aniso8601.get_time_resolution('11:31:14') == aniso8601.resolution.TimeResolution.Seconds + True + >>> aniso8601.get_time_resolution('11:31') == aniso8601.resolution.TimeResolution.Minutes + True + >>> aniso8601.get_time_resolution('11') == aniso8601.resolution.TimeResolution.Hours + True + +Parsing durations +----------------- + +To parse a duration formatted as an ISO 8601 string:: + + >>> import aniso8601 + >>> aniso8601.parse_duration('P1Y2M3DT4H54M6S') + datetime.timedelta(428, 17646) + +Reduced accuracy is supported:: + + >>> aniso8601.parse_duration('P1Y') + datetime.timedelta(365) + +A decimal fraction is allowed on the lowest order element:: + + >>> aniso8601.parse_duration('P1YT3.5M') + datetime.timedelta(365, 210) + +The decimal fraction can be specified with a comma instead of a full-stop:: + + >>> aniso8601.parse_duration('P1YT3,5M') + datetime.timedelta(365, 210) + +Parsing a duration from a combined date and time is supported as well:: + + >>> aniso8601.parse_duration('P0001-01-02T01:30:05') + datetime.timedelta(397, 5405) + +To get the resolution of an ISO 8601 duration string:: + + >>> aniso8601.get_duration_resolution('P1Y2M3DT4H54M6S') == aniso8601.resolution.DurationResolution.Seconds + True + >>> aniso8601.get_duration_resolution('P1Y2M3DT4H54M') == aniso8601.resolution.DurationResolution.Minutes + True + >>> aniso8601.get_duration_resolution('P1Y2M3DT4H') == aniso8601.resolution.DurationResolution.Hours + True + >>> aniso8601.get_duration_resolution('P1Y2M3D') == aniso8601.resolution.DurationResolution.Days + True + >>> aniso8601.get_duration_resolution('P1Y2M') == aniso8601.resolution.DurationResolution.Months + True + >>> aniso8601.get_duration_resolution('P1Y') == aniso8601.resolution.DurationResolution.Years + True + +The default :code:`PythonTimeBuilder` assumes years are 365 days, and months are 30 days. Where calendar level accuracy is required, a `RelativeTimeBuilder `_ can be used, see also `Builders`_. + +Parsing intervals +----------------- + +To parse an interval specified by a start and end:: + + >>> import aniso8601 + >>> aniso8601.parse_interval('2007-03-01T13:00:00/2008-05-11T15:30:00') + (datetime.datetime(2007, 3, 1, 13, 0), datetime.datetime(2008, 5, 11, 15, 30)) + +Intervals specified by a start time and a duration are supported:: + + >>> aniso8601.parse_interval('2007-03-01T13:00:00Z/P1Y2M10DT2H30M') + (datetime.datetime(2007, 3, 1, 13, 0, tzinfo=+0:00:00 UTC), datetime.datetime(2008, 5, 9, 15, 30, tzinfo=+0:00:00 UTC)) + +A duration can also be specified by a duration and end time:: + + >>> aniso8601.parse_interval('P1M/1981-04-05') + (datetime.date(1981, 4, 5), datetime.date(1981, 3, 6)) + +Notice that the result of the above parse is not in order from earliest to latest. If sorted intervals are required, simply use the :code:`sorted` keyword as shown below:: + + >>> sorted(aniso8601.parse_interval('P1M/1981-04-05')) + [datetime.date(1981, 3, 6), datetime.date(1981, 4, 5)] + +The end of an interval is returned as a datetime when required to maintain the resolution specified by a duration, even if the duration start is given as a date:: + + >>> aniso8601.parse_interval('2014-11-12/PT4H54M6.5S') + (datetime.date(2014, 11, 12), datetime.datetime(2014, 11, 12, 4, 54, 6, 500000)) + >>> aniso8601.parse_interval('2007-03-01/P1.5D') + (datetime.date(2007, 3, 1), datetime.datetime(2007, 3, 2, 12, 0)) + +Concise representations are supported:: + + >>> aniso8601.parse_interval('2020-01-01/02') + (datetime.date(2020, 1, 1), datetime.date(2020, 1, 2)) + >>> aniso8601.parse_interval('2007-12-14T13:30/15:30') + (datetime.datetime(2007, 12, 14, 13, 30), datetime.datetime(2007, 12, 14, 15, 30)) + >>> aniso8601.parse_interval('2008-02-15/03-14') + (datetime.date(2008, 2, 15), datetime.date(2008, 3, 14)) + >>> aniso8601.parse_interval('2007-11-13T09:00/15T17:00') + (datetime.datetime(2007, 11, 13, 9, 0), datetime.datetime(2007, 11, 15, 17, 0)) + +Repeating intervals are supported as well, and return a `generator `_:: + + >>> aniso8601.parse_repeating_interval('R3/1981-04-05/P1D') + + >>> list(aniso8601.parse_repeating_interval('R3/1981-04-05/P1D')) + [datetime.date(1981, 4, 5), datetime.date(1981, 4, 6), datetime.date(1981, 4, 7)] + +Repeating intervals are allowed to go in the reverse direction:: + + >>> list(aniso8601.parse_repeating_interval('R2/PT1H2M/1980-03-05T01:01:00')) + [datetime.datetime(1980, 3, 5, 1, 1), datetime.datetime(1980, 3, 4, 23, 59)] + +Unbounded intervals are also allowed (Python 2):: + + >>> result = aniso8601.parse_repeating_interval('R/PT1H2M/1980-03-05T01:01:00') + >>> result.next() + datetime.datetime(1980, 3, 5, 1, 1) + >>> result.next() + datetime.datetime(1980, 3, 4, 23, 59) + +or for Python 3:: + + >>> result = aniso8601.parse_repeating_interval('R/PT1H2M/1980-03-05T01:01:00') + >>> next(result) + datetime.datetime(1980, 3, 5, 1, 1) + >>> next(result) + datetime.datetime(1980, 3, 4, 23, 59) + +Note that you should never try to convert a generator produced by an unbounded interval to a list:: + + >>> list(aniso8601.parse_repeating_interval('R/PT1H2M/1980-03-05T01:01:00')) + Traceback (most recent call last): + File "", line 1, in + File "/home/nielsenb/Jetfuse/aniso8601/aniso8601/aniso8601/builders/python.py", line 560, in _date_generator_unbounded + currentdate += timedelta + OverflowError: date value out of range + +To get the resolution of an ISO 8601 interval string:: + + >>> aniso8601.get_interval_resolution('2007-03-01T13:00:00/2008-05-11T15:30:00') == aniso8601.resolution.IntervalResolution.Seconds + True + >>> aniso8601.get_interval_resolution('2007-03-01T13:00/2008-05-11T15:30') == aniso8601.resolution.IntervalResolution.Minutes + True + >>> aniso8601.get_interval_resolution('2007-03-01T13/2008-05-11T15') == aniso8601.resolution.IntervalResolution.Hours + True + >>> aniso8601.get_interval_resolution('2007-03-01/2008-05-11') == aniso8601.resolution.IntervalResolution.Day + True + >>> aniso8601.get_interval_resolution('2007-03/P1Y') == aniso8601.resolution.IntervalResolution.Month + True + >>> aniso8601.get_interval_resolution('2007/P1Y') == aniso8601.resolution.IntervalResolution.Year + True + +And for repeating ISO 8601 interval strings:: + + >>> aniso8601.get_repeating_interval_resolution('R3/1981-04-05/P1D') == aniso8601.resolution.IntervalResolution.Day + True + >>> aniso8601.get_repeating_interval_resolution('R/PT1H2M/1980-03-05T01:01:00') == aniso8601.resolution.IntervalResolution.Seconds + True + +Builders +======== + +Builders can be used to change the output format of a parse operation. All parse functions have a :code:`builder` keyword argument which accepts a builder class. + +Two builders are included. The :code:`PythonTimeBuilder` (the default) in the :code:`aniso8601.builders.python` module, and the :code:`TupleBuilder` which returns the parse result as a corresponding named tuple and is located in the :code:`aniso8601.builders` module. + +Information on writing a builder can be found in `BUILDERS `_. + +The following builders are available as separate projects: + +* `RelativeTimeBuilder `_ supports parsing to `datetutil relativedelta types `_ for calendar level accuracy +* `AttoTimeBuilder `_ supports parsing directly to `attotime attodatetime and attotimedelta types `_ which support sub-nanosecond precision +* `NumPyTimeBuilder `_ supports parsing directly to `NumPy datetime64 and timedelta64 types `_ + +TupleBuilder +------------ + +The :code:`TupleBuilder` returns parse results as `named tuples `_. It is located in the :code:`aniso8601.builders` module. + +Datetimes +^^^^^^^^^ + +Parsing a datetime returns a :code:`DatetimeTuple` containing :code:`Date` and :code:`Time` tuples . The date tuple contains the following parse components: :code:`YYYY`, :code:`MM`, :code:`DD`, :code:`Www`, :code:`D`, :code:`DDD`. The time tuple contains the following parse components :code:`hh`, :code:`mm`, :code:`ss`, :code:`tz`, where :code:`tz` itself is a tuple with the following components :code:`negative`, :code:`Z`, :code:`hh`, :code:`mm`, :code:`name` with :code:`negative` and :code:`Z` being booleans:: + + >>> import aniso8601 + >>> from aniso8601.builders import TupleBuilder + >>> aniso8601.parse_datetime('1977-06-10T12:00:00', builder=TupleBuilder) + Datetime(date=Date(YYYY='1977', MM='06', DD='10', Www=None, D=None, DDD=None), time=Time(hh='12', mm='00', ss='00', tz=None)) + >>> aniso8601.parse_datetime('1979-06-05T08:00:00-08:00', builder=TupleBuilder) + Datetime(date=Date(YYYY='1979', MM='06', DD='05', Www=None, D=None, DDD=None), time=Time(hh='08', mm='00', ss='00', tz=Timezone(negative=True, Z=None, hh='08', mm='00', name='-08:00'))) + +Dates +^^^^^ + +Parsing a date returns a :code:`DateTuple` containing the following parse components: :code:`YYYY`, :code:`MM`, :code:`DD`, :code:`Www`, :code:`D`, :code:`DDD`:: + + >>> import aniso8601 + >>> from aniso8601.builders import TupleBuilder + >>> aniso8601.parse_date('1984-04-23', builder=TupleBuilder) + Date(YYYY='1984', MM='04', DD='23', Www=None, D=None, DDD=None) + >>> aniso8601.parse_date('1986-W38-1', builder=TupleBuilder) + Date(YYYY='1986', MM=None, DD=None, Www='38', D='1', DDD=None) + >>> aniso8601.parse_date('1988-132', builder=TupleBuilder) + Date(YYYY='1988', MM=None, DD=None, Www=None, D=None, DDD='132') + +Times +^^^^^ + +Parsing a time returns a :code:`TimeTuple` containing following parse components: :code:`hh`, :code:`mm`, :code:`ss`, :code:`tz`, where :code:`tz` is a :code:`TimezoneTuple` with the following components :code:`negative`, :code:`Z`, :code:`hh`, :code:`mm`, :code:`name`, with :code:`negative` and :code:`Z` being booleans:: + + >>> import aniso8601 + >>> from aniso8601.builders import TupleBuilder + >>> aniso8601.parse_time('11:31:14', builder=TupleBuilder) + Time(hh='11', mm='31', ss='14', tz=None) + >>> aniso8601.parse_time('171819Z', builder=TupleBuilder) + Time(hh='17', mm='18', ss='19', tz=Timezone(negative=False, Z=True, hh=None, mm=None, name='Z')) + >>> aniso8601.parse_time('17:18:19-02:30', builder=TupleBuilder) + Time(hh='17', mm='18', ss='19', tz=Timezone(negative=True, Z=None, hh='02', mm='30', name='-02:30')) + +Durations +^^^^^^^^^ + +Parsing a duration returns a :code:`DurationTuple` containing the following parse components: :code:`PnY`, :code:`PnM`, :code:`PnW`, :code:`PnD`, :code:`TnH`, :code:`TnM`, :code:`TnS`:: + + >>> import aniso8601 + >>> from aniso8601.builders import TupleBuilder + >>> aniso8601.parse_duration('P1Y2M3DT4H54M6S', builder=TupleBuilder) + Duration(PnY='1', PnM='2', PnW=None, PnD='3', TnH='4', TnM='54', TnS='6') + >>> aniso8601.parse_duration('P7W', builder=TupleBuilder) + Duration(PnY=None, PnM=None, PnW='7', PnD=None, TnH=None, TnM=None, TnS=None) + +Intervals +^^^^^^^^^ + +Parsing an interval returns an :code:`IntervalTuple` containing the following parse components: :code:`start`, :code:`end`, :code:`duration`, :code:`start` and :code:`end` may both be datetime or date tuples, :code:`duration` is a duration tuple:: + + >>> import aniso8601 + >>> from aniso8601.builders import TupleBuilder + >>> aniso8601.parse_interval('2007-03-01T13:00:00/2008-05-11T15:30:00', builder=TupleBuilder) + Interval(start=Datetime(date=Date(YYYY='2007', MM='03', DD='01', Www=None, D=None, DDD=None), time=Time(hh='13', mm='00', ss='00', tz=None)), end=Datetime(date=Date(YYYY='2008', MM='05', DD='11', Www=None, D=None, DDD=None), time=Time(hh='15', mm='30', ss='00', tz=None)), duration=None) + >>> aniso8601.parse_interval('2007-03-01T13:00:00Z/P1Y2M10DT2H30M', builder=TupleBuilder) + Interval(start=Datetime(date=Date(YYYY='2007', MM='03', DD='01', Www=None, D=None, DDD=None), time=Time(hh='13', mm='00', ss='00', tz=Timezone(negative=False, Z=True, hh=None, mm=None, name='Z'))), end=None, duration=Duration(PnY='1', PnM='2', PnW=None, PnD='10', TnH='2', TnM='30', TnS=None)) + >>> aniso8601.parse_interval('P1M/1981-04-05', builder=TupleBuilder) + Interval(start=None, end=Date(YYYY='1981', MM='04', DD='05', Www=None, D=None, DDD=None), duration=Duration(PnY=None, PnM='1', PnW=None, PnD=None, TnH=None, TnM=None, TnS=None)) + +A repeating interval returns a :code:`RepeatingIntervalTuple` containing the following parse components: :code:`R`, :code:`Rnn`, :code:`interval`, where :code:`R` is a boolean, :code:`True` for an unbounded interval, :code:`False` otherwise.:: + + >>> aniso8601.parse_repeating_interval('R3/1981-04-05/P1D', builder=TupleBuilder) + RepeatingInterval(R=False, Rnn='3', interval=Interval(start=Date(YYYY='1981', MM='04', DD='05', Www=None, D=None, DDD=None), end=None, duration=Duration(PnY=None, PnM=None, PnW=None, PnD='1', TnH=None, TnM=None, TnS=None))) + >>> aniso8601.parse_repeating_interval('R/PT1H2M/1980-03-05T01:01:00', builder=TupleBuilder) + RepeatingInterval(R=True, Rnn=None, interval=Interval(start=None, end=Datetime(date=Date(YYYY='1980', MM='03', DD='05', Www=None, D=None, DDD=None), time=Time(hh='01', mm='01', ss='00', tz=None)), duration=Duration(PnY=None, PnM=None, PnW=None, PnD=None, TnH='1', TnM='2', TnS=None))) + +Development +=========== + +Setup +----- + +It is recommended to develop using a `virtualenv `_. + +Inside a virtualenv, development dependencies can be installed automatically:: + + $ pip install -e .[dev] + +`pre-commit `_ is used for managing pre-commit hooks:: + + $ pre-commit install + +To run the pre-commit hooks manually:: + + $ pre-commit run --all-files + +Tests +----- + +Tests can be run using the `unittest testing framework `_:: + + $ python -m unittest discover aniso8601 + +Contributing +============ + +aniso8601 is an open source project hosted on `Codeberg `_. + +Any and all bugs are welcome on our `issue tracker `_. +Of particular interest are valid ISO 8601 strings that don't parse, or invalid ones that do. At a minimum, +bug reports should include an example of the misbehaving string, as well as the expected result. Of course +patches containing unit tests (or fixed bugs) are welcome! + +References +========== + +* `ISO 8601:2004(E) `_ (Caution, PDF link) +* `Wikipedia article on ISO 8601 `_ +* `Discussion on alternative ISO 8601 parsers for Python `_ diff --git a/libs/aniso8601-10.0.1.dist-info/RECORD b/libs/aniso8601-10.0.1.dist-info/RECORD new file mode 100644 index 0000000000..33f31ea27c --- /dev/null +++ b/libs/aniso8601-10.0.1.dist-info/RECORD @@ -0,0 +1,34 @@ +aniso8601-10.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +aniso8601-10.0.1.dist-info/METADATA,sha256=bhCfpaJ_wz-09tdv4oh4b51DnxocRpDJukuyrI4ZYzE,23716 +aniso8601-10.0.1.dist-info/RECORD,, +aniso8601-10.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +aniso8601-10.0.1.dist-info/WHEEL,sha256=Mk1ST5gDzEO5il5kYREiBnzzM469m5sI8ESPl7TRhJY,110 +aniso8601-10.0.1.dist-info/licenses/LICENSE,sha256=w8yguadP0pZovZm13PAnTVO-kE3md4kW3IUnCPQHsPA,1501 +aniso8601-10.0.1.dist-info/top_level.txt,sha256=MVQomyeED8nGIH7PUQdMzxgLppIB48oYHtcmL17ETB0,10 +aniso8601/__init__.py,sha256=_EfRSVz4e-QRXKqc9r9WkeEI-vfWL7NCUnvTyEJ6X3U,705 +aniso8601/builders/__init__.py,sha256=jXJ75D-QRhB8huW2GyShjh_I4Z83ua4Qgzn1DQiX-1M,17975 +aniso8601/builders/python.py,sha256=qhyXMeXMsRQsJHJNee-0LofIUj33mpHU4BLZ0fijLME,22110 +aniso8601/builders/tests/__init__.py,sha256=qjC0jrTWf2UUlZtXE3AKcMFSLC2kQPOvAI36t5gc8q0,209 +aniso8601/builders/tests/test_init.py,sha256=pyES5pMJUWy16KK4MLsfzRmPRcQvj6_vxcM8yzeZxOc,29997 +aniso8601/builders/tests/test_python.py,sha256=pd8Bz6dc4c_Fvm8YWMcKJfojoRImQHp2RLqMEN9FX0E,61424 +aniso8601/compat.py,sha256=CvNkC-tCr3hzv1i9VOzgiPaS2EUxCfxtfo8SkJ2Juyc,571 +aniso8601/date.py,sha256=D3Kffr6Ln_z0fNTP5KAQa0R0T00VdmZgIS_O-pVmxnI,4496 +aniso8601/decimalfraction.py,sha256=NBUies6Gp1NrVkSU_9MwJNu28OhUIeWXNpkTRcClGYA,333 +aniso8601/duration.py,sha256=XEMm8t3Vipw3YsCgXf17XA10dHmUCQuJo4-PX4z8U7I,9550 +aniso8601/exceptions.py,sha256=uCQIrrIpCJV-n3WDUuhB3wMdgmHuu1UqOXrE9ct3xPY,1313 +aniso8601/interval.py,sha256=ruz49D2BoyoyYggM2oLrz0HSB8CIPxdl4LZCvwq3kCg,10752 +aniso8601/resolution.py,sha256=RJGfir0k6IiR3L1ZCrPvjhsQliVT_ZOcaeKqJqH6JHM,684 +aniso8601/tests/__init__.py,sha256=qjC0jrTWf2UUlZtXE3AKcMFSLC2kQPOvAI36t5gc8q0,209 +aniso8601/tests/compat.py,sha256=J4Ocl6zpo7WmA_pItOc2KUH8xZU156L9fTbCBBFX9xY,346 +aniso8601/tests/test_compat.py,sha256=55qZ_Uu7XRQo8hXNDUjy-IB3WaTqATnVtMlAJP4TGr4,763 +aniso8601/tests/test_date.py,sha256=bJy2ve-iFmWlBCWDidfK2lB-7OBMjeATfK3SBFSet4U,9266 +aniso8601/tests/test_decimalfraction.py,sha256=bvPPSuWnS2XfD10wBUjg5nFFbI1kfBshR2Wkmfggu6I,578 +aniso8601/tests/test_duration.py,sha256=QRLpd_bdgEzHjcLkLMFMWnGF4bN_YxPDdtwACtBekc4,44952 +aniso8601/tests/test_init.py,sha256=1GF0Yms8adNM3Ax13w2ncSz6toHhK3pRkPYoNaRcPVk,1689 +aniso8601/tests/test_interval.py,sha256=GjeU8HrIT-klpselgsgMcF1KfuV7sDKLlyVU5S4XMCQ,60457 +aniso8601/tests/test_time.py,sha256=SS0jXkEl5bYFSe1qFPpjZ0GNXycgBMH16Uc885ujR7I,19147 +aniso8601/tests/test_timezone.py,sha256=EJk2cTsHddhe2Pqtzl250gKF8XoXynEAngNctk23b48,4649 +aniso8601/tests/test_utcoffset.py,sha256=wQ7ivBqax2KP340tlC0DBxM7DTK9SNy_Zq_13FqeaKM,1926 +aniso8601/time.py,sha256=CZRisJz6u7fBfMthZvNcUuVFSL_GCYi9WEjXsbrnDF8,5687 +aniso8601/timezone.py,sha256=xpukG_AuvyMNGs57y4bf40eHRcpe3b1fKC8x-H8Epo8,2124 +aniso8601/utcoffset.py,sha256=dm7-eFl6WQFPpDamcTVl46aEjmGObpWJdSZJ-QzblfU,2421 diff --git a/libs/Flask_SocketIO-5.5.1.dist-info/REQUESTED b/libs/aniso8601-10.0.1.dist-info/REQUESTED similarity index 100% rename from libs/Flask_SocketIO-5.5.1.dist-info/REQUESTED rename to libs/aniso8601-10.0.1.dist-info/REQUESTED diff --git a/libs/aniso8601-10.0.1.dist-info/WHEEL b/libs/aniso8601-10.0.1.dist-info/WHEEL new file mode 100644 index 0000000000..798dd6195b --- /dev/null +++ b/libs/aniso8601-10.0.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.10.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/libs/aniso8601-10.0.0.dist-info/LICENSE b/libs/aniso8601-10.0.1.dist-info/licenses/LICENSE similarity index 100% rename from libs/aniso8601-10.0.0.dist-info/LICENSE rename to libs/aniso8601-10.0.1.dist-info/licenses/LICENSE diff --git a/libs/aniso8601-10.0.0.dist-info/top_level.txt b/libs/aniso8601-10.0.1.dist-info/top_level.txt similarity index 100% rename from libs/aniso8601-10.0.0.dist-info/top_level.txt rename to libs/aniso8601-10.0.1.dist-info/top_level.txt diff --git a/libs/aniso8601/__init__.py b/libs/aniso8601/__init__.py index c85d218b31..a3754fd6da 100644 --- a/libs/aniso8601/__init__.py +++ b/libs/aniso8601/__init__.py @@ -23,4 +23,4 @@ parse_time, ) -__version__ = "10.0.0" +__version__ = "10.0.1" diff --git a/libs/aniso8601/builders/python.py b/libs/aniso8601/builders/python.py index b60382e1bb..a471d33f4a 100644 --- a/libs/aniso8601/builders/python.py +++ b/libs/aniso8601/builders/python.py @@ -61,8 +61,8 @@ def year_range_check(valuestr, limit): YYYYstr = valuestr # Truncated dates, like '19', refer to 1900-1999 inclusive, - # we simply parse to 1900 - if len(valuestr) < 4: + # we simply parse to 1900, Y and YYY strings are not supported + if len(valuestr) == 2: # Shift 0s in from the left to form complete year YYYYstr = valuestr.ljust(4, "0") diff --git a/libs/aniso8601/builders/tests/test_python.py b/libs/aniso8601/builders/tests/test_python.py index ccbed27d17..4cbbcbb433 100644 --- a/libs/aniso8601/builders/tests/test_python.py +++ b/libs/aniso8601/builders/tests/test_python.py @@ -195,17 +195,6 @@ def test_build_date(self): datetime.date(1980, 12, 31), ), # Make sure we shift in zeros - ( - { - "YYYY": "1", - "MM": None, - "DD": None, - "Www": None, - "D": None, - "DDD": None, - }, - datetime.date(1000, 1, 1), - ), ( { "YYYY": "12", @@ -217,17 +206,6 @@ def test_build_date(self): }, datetime.date(1200, 1, 1), ), - ( - { - "YYYY": "123", - "MM": None, - "DD": None, - "Www": None, - "D": None, - "DDD": None, - }, - datetime.date(1230, 1, 1), - ), ) for testtuple in testtuples: diff --git a/libs/appdirs-1.4.4.dist-info/METADATA b/libs/appdirs-1.4.4.dist-info/METADATA index 26a62703f5..201ab584d6 100644 --- a/libs/appdirs-1.4.4.dist-info/METADATA +++ b/libs/appdirs-1.4.4.dist-info/METADATA @@ -1,4 +1,4 @@ -Metadata-Version: 2.1 +Metadata-Version: 2.4 Name: appdirs Version: 1.4.4 Summary: A small Python module for determining appropriate platform-specific dirs, e.g. a "user data dir". @@ -25,6 +25,17 @@ Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Topic :: Software Development :: Libraries :: Python Modules License-File: LICENSE.txt +Dynamic: author +Dynamic: author-email +Dynamic: classifier +Dynamic: description +Dynamic: home-page +Dynamic: keywords +Dynamic: license +Dynamic: license-file +Dynamic: maintainer +Dynamic: maintainer-email +Dynamic: summary .. image:: https://secure.travis-ci.org/ActiveState/appdirs.png diff --git a/libs/appdirs-1.4.4.dist-info/RECORD b/libs/appdirs-1.4.4.dist-info/RECORD index 439f34a9a9..e6bb283afc 100644 --- a/libs/appdirs-1.4.4.dist-info/RECORD +++ b/libs/appdirs-1.4.4.dist-info/RECORD @@ -1,8 +1,8 @@ appdirs-1.4.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -appdirs-1.4.4.dist-info/LICENSE.txt,sha256=Nt200KdFqTqyAyA9cZCBSxuJcn0lTK_0jHp6-71HAAs,1097 -appdirs-1.4.4.dist-info/METADATA,sha256=2kntSNh0a5RTW7_nUoezsk50bHfrXp7iQSNNEWM8HGk,8991 +appdirs-1.4.4.dist-info/METADATA,sha256=egp_cRL5I7SyfWvG8oQN4vpOUcZ4As8V-s5yKa2l7R0,9209 appdirs-1.4.4.dist-info/RECORD,, appdirs-1.4.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -appdirs-1.4.4.dist-info/WHEEL,sha256=P2T-6epvtXQ2cBOE_U1K4_noqlJFN3tj15djMgEu4NM,110 +appdirs-1.4.4.dist-info/WHEEL,sha256=Mk1ST5gDzEO5il5kYREiBnzzM469m5sI8ESPl7TRhJY,110 +appdirs-1.4.4.dist-info/licenses/LICENSE.txt,sha256=Nt200KdFqTqyAyA9cZCBSxuJcn0lTK_0jHp6-71HAAs,1097 appdirs-1.4.4.dist-info/top_level.txt,sha256=nKncE8CUqZERJ6VuQWL4_bkunSPDNfn7KZqb4Tr5YEM,8 appdirs.py,sha256=g99s2sXhnvTEm79oj4bWI0Toapc-_SmKKNXvOXHkVic,24720 diff --git a/libs/appdirs-1.4.4.dist-info/WHEEL b/libs/appdirs-1.4.4.dist-info/WHEEL index f31e450fda..798dd6195b 100644 --- a/libs/appdirs-1.4.4.dist-info/WHEEL +++ b/libs/appdirs-1.4.4.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.3) +Generator: setuptools (80.10.2) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any diff --git a/libs/appdirs-1.4.4.dist-info/LICENSE.txt b/libs/appdirs-1.4.4.dist-info/licenses/LICENSE.txt similarity index 100% rename from libs/appdirs-1.4.4.dist-info/LICENSE.txt rename to libs/appdirs-1.4.4.dist-info/licenses/LICENSE.txt diff --git a/libs/apprise-1.9.2.dist-info/LICENSE b/libs/apprise-1.9.2.dist-info/LICENSE deleted file mode 100644 index 1bb88d1df4..0000000000 --- a/libs/apprise-1.9.2.dist-info/LICENSE +++ /dev/null @@ -1,25 +0,0 @@ -BSD 2-Clause License - -Copyright (c) 2025, Chris Caron -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/libs/apprise-1.9.2.dist-info/METADATA b/libs/apprise-1.9.2.dist-info/METADATA deleted file mode 100644 index d6ebb290d9..0000000000 --- a/libs/apprise-1.9.2.dist-info/METADATA +++ /dev/null @@ -1,723 +0,0 @@ -Metadata-Version: 2.1 -Name: apprise -Version: 1.9.2 -Summary: Push Notifications that work with just about every platform! -Home-page: https://github.com/caronc/apprise -Author: Chris Caron -Author-email: lead2gold@gmail.com -License: BSD 2-Clause -Keywords: Africas Talking Alerts Apprise API Automated Packet Reporting System AWS BulkSMS BulkVS Burst SMS Chanify Chat CLI ClickSend D7Networks Dapnet DBus DingTalk Discord Email Emby Enigma2 FCM Feishu Flock Form Free Mobile Gnome Google Chat Gotify Growl Guilded Home Assistant httpSMS IFTTT Join JSON Kavenegar KODI Kumulos LaMetric Line LunaSea MacOSX Mailgun Mastodon Matrix Mattermost MessageBird Microsoft Misskey MQTT MSG91 MSTeams Nextcloud NextcloudTalk Notica Notifiarr Notifico Ntfy Office365 OneSignal Opsgenie PagerDuty PagerTree ParsePlatform Plivo PopcornNotify Power Automate Prowl PushBullet Pushed Pushjet PushMe Push Notifications Pushover PushSafer Pushy PushDeer Reddit Revolt Rocket.Chat RSyslog Ryver SendGrid ServerChan Seven SES SFR Signal SimplePush Sinch Slack SMSEagle SMS Manager SMTP2Go SNS SparkPost Splunk Streamlabs Stride Synology Chat Syslog Techulus Telegram Threema Gateway Twilio Twist Twitter VictorOps Voipms Vonage Webex WeCom Bot WhatsApp Windows Workflows WxPusher XBMC XML Zulip -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: System Administrators -Classifier: Operating System :: OS Independent -Classifier: Natural Language :: English -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: License :: OSI Approved :: BSD License -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Software Development :: Libraries :: Application Frameworks -Requires-Python: >=3.6 -Description-Content-Type: text/markdown -License-File: LICENSE -Requires-Dist: certifi -Requires-Dist: requests -Requires-Dist: requests-oauthlib -Requires-Dist: click >=5.0 -Requires-Dist: markdown -Requires-Dist: PyYAML - -![Apprise Logo](https://raw.githubusercontent.com/caronc/apprise/master/apprise/assets/themes/default/apprise-logo.png) - -
- -**ap·prise** / *verb*
-To inform or tell (someone). To make one aware of something. -
- -*Apprise* allows you to send a notification to *almost* all of the most popular *notification* services available to us today such as: Telegram, Discord, Slack, Amazon SNS, Gotify, etc. - -* One notification library to rule them all. -* A common and intuitive notification syntax. -* Supports the handling of images and attachments (_to the notification services that will accept them_). -* It's incredibly lightweight. -* Amazing response times because all messages sent asynchronously. - -Developers who wish to provide a notification service no longer need to research each and every one out there. They no longer need to try to adapt to the new ones that comeout thereafter. They just need to include this one library and then they can immediately gain access to almost all of the notifications services available to us today. - -System Administrators and DevOps who wish to send a notification now no longer need to find the right tool for the job. Everything is already wrapped and supported within the `apprise` command line tool (CLI) that ships with this product. - -[![Paypal](https://img.shields.io/badge/paypal-donate-green.svg)](https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=MHANV39UZNQ5E) -[![Follow](https://img.shields.io/twitter/follow/l2gnux)](https://twitter.com/l2gnux/)
-[![Discord](https://img.shields.io/discord/558793703356104724.svg?colorB=7289DA&label=Discord&logo=Discord&logoColor=7289DA&style=flat-square)](https://discord.gg/MMPeN2D) -[![Python](https://img.shields.io/pypi/pyversions/apprise.svg?style=flat-square)](https://pypi.org/project/apprise/) -[![Build Status](https://github.com/caronc/apprise/actions/workflows/tests.yml/badge.svg)](https://github.com/caronc/apprise/actions/workflows/tests.yml) -[![CodeCov Status](https://codecov.io/github/caronc/apprise/branch/master/graph/badge.svg)](https://codecov.io/github/caronc/apprise) -[![PyPi](https://img.shields.io/pypi/dm/apprise.svg?style=flat-square)](https://pypi.org/project/apprise/) - -# Table of Contents - -* [Supported Notifications](#supported-notifications) - * [Productivity Based Notifications](#productivity-based-notifications) - * [SMS Notifications](#sms-notifications) - * [Desktop Notifications](#desktop-notifications) - * [Email Notifications](#email-notifications) - * [Custom Notifications](#custom-notifications) -* [Installation](#installation) -* [Command Line Usage](#command-line-usage) - * [Configuration Files](#cli-configuration-files) - * [File Attachments](#cli-file-attachments) - * [Loading Custom Notifications/Hooks](#cli-loading-custom-notificationshooks) - * [Environment Variables](#cli-environment-variables) -* [Developer API Usage](#developer-api-usage) - * [Configuration Files](#api-configuration-files) - * [File Attachments](#api-file-attachments) - * [Loading Custom Notifications/Hooks](#api-loading-custom-notificationshooks) -* [Persistent Storage](#persistent-storage) -* [More Supported Links and Documentation](#want-to-learn-more) - - -# Supported Notifications - -The section identifies all of the services supported by this library. [Check out the wiki for more information on the supported modules here](https://github.com/caronc/apprise/wiki). - -## Productivity Based Notifications - -The table below identifies the services this tool supports and some example service urls you need to use in order to take advantage of it. Click on any of the services listed below to get more details on how you can configure Apprise to access them. - -| Notification Service | Service ID | Default Port | Example Syntax | -| -------------------- | ---------- | ------------ | -------------- | -| [Apprise API](https://github.com/caronc/apprise/wiki/Notify_apprise_api) | apprise:// or apprises:// | (TCP) 80 or 443 | apprise://hostname/Token -| [AWS SES](https://github.com/caronc/apprise/wiki/Notify_ses) | ses:// | (TCP) 443 | ses://user@domain/AccessKeyID/AccessSecretKey/RegionName
ses://user@domain/AccessKeyID/AccessSecretKey/RegionName/email1/email2/emailN -| [Bark](https://github.com/caronc/apprise/wiki/Notify_bark) | bark:// | (TCP) 80 or 443 | bark://hostname
bark://hostname/device_key
bark://hostname/device_key1/device_key2/device_keyN
barks://hostname
barks://hostname/device_key
barks://hostname/device_key1/device_key2/device_keyN -| [Chanify](https://github.com/caronc/apprise/wiki/Notify_chanify) | chantify:// | (TCP) 443 | chantify://token -| [Discord](https://github.com/caronc/apprise/wiki/Notify_discord) | discord:// | (TCP) 443 | discord://webhook_id/webhook_token
discord://avatar@webhook_id/webhook_token -| [Emby](https://github.com/caronc/apprise/wiki/Notify_emby) | emby:// or embys:// | (TCP) 8096 | emby://user@hostname/
emby://user:password@hostname -| [Enigma2](https://github.com/caronc/apprise/wiki/Notify_enigma2) | enigma2:// or enigma2s:// | (TCP) 80 or 443 | enigma2://hostname -| [FCM](https://github.com/caronc/apprise/wiki/Notify_fcm) | fcm:// | (TCP) 443 | fcm://project@apikey/DEVICE_ID
fcm://project@apikey/#TOPIC
fcm://project@apikey/DEVICE_ID1/#topic1/#topic2/DEVICE_ID2/ -| [Feishu](https://github.com/caronc/apprise/wiki/Notify_feishu) | feishu:// | (TCP) 443 | feishu://token -| [Flock](https://github.com/caronc/apprise/wiki/Notify_flock) | flock:// | (TCP) 443 | flock://token
flock://botname@token
flock://app_token/u:userid
flock://app_token/g:channel_id
flock://app_token/u:userid/g:channel_id -| [Google Chat](https://github.com/caronc/apprise/wiki/Notify_googlechat) | gchat:// | (TCP) 443 | gchat://workspace/key/token -| [Gotify](https://github.com/caronc/apprise/wiki/Notify_gotify) | gotify:// or gotifys:// | (TCP) 80 or 443 | gotify://hostname/token
gotifys://hostname/token?priority=high -| [Growl](https://github.com/caronc/apprise/wiki/Notify_growl) | growl:// | (UDP) 23053 | growl://hostname
growl://hostname:portno
growl://password@hostname
growl://password@hostname:port
**Note**: you can also use the get parameter _version_ which can allow the growl request to behave using the older v1.x protocol. An example would look like: growl://hostname?version=1 -| [Guilded](https://github.com/caronc/apprise/wiki/Notify_guilded) | guilded:// | (TCP) 443 | guilded://webhook_id/webhook_token
guilded://avatar@webhook_id/webhook_token -| [Home Assistant](https://github.com/caronc/apprise/wiki/Notify_homeassistant) | hassio:// or hassios:// | (TCP) 8123 or 443 | hassio://hostname/accesstoken
hassio://user@hostname/accesstoken
hassio://user:password@hostname:port/accesstoken
hassio://hostname/optional/path/accesstoken -| [IFTTT](https://github.com/caronc/apprise/wiki/Notify_ifttt) | ifttt:// | (TCP) 443 | ifttt://webhooksID/Event
ifttt://webhooksID/Event1/Event2/EventN
ifttt://webhooksID/Event1/?+Key=Value
ifttt://webhooksID/Event1/?-Key=value1 -| [Join](https://github.com/caronc/apprise/wiki/Notify_join) | join:// | (TCP) 443 | join://apikey/device
join://apikey/device1/device2/deviceN/
join://apikey/group
join://apikey/groupA/groupB/groupN
join://apikey/DeviceA/groupA/groupN/DeviceN/ -| [KODI](https://github.com/caronc/apprise/wiki/Notify_kodi) | kodi:// or kodis:// | (TCP) 8080 or 443 | kodi://hostname
kodi://user@hostname
kodi://user:password@hostname:port -| [Kumulos](https://github.com/caronc/apprise/wiki/Notify_kumulos) | kumulos:// | (TCP) 443 | kumulos://apikey/serverkey -| [LaMetric Time](https://github.com/caronc/apprise/wiki/Notify_lametric) | lametric:// | (TCP) 443 | lametric://apikey@device_ipaddr
lametric://apikey@hostname:port
lametric://client_id@client_secret -| [Line](https://github.com/caronc/apprise/wiki/Notify_line) | line:// | (TCP) 443 | line://Token@User
line://Token/User1/User2/UserN -| [LunaSea](https://github.com/caronc/apprise/wiki/Notify_lunasea) | lunasea:// | (TCP) 80 or 443 | lunasea://user:pass@+FireBaseDevice/
lunasea://user:pass@FireBaseUser/
lunasea://user:pass@hostname/+FireBaseDevice/
lunasea://user:pass@hostname/@FireBaseUser/ -| [Mailgun](https://github.com/caronc/apprise/wiki/Notify_mailgun) | mailgun:// | (TCP) 443 | mailgun://user@hostname/apikey
mailgun://user@hostname/apikey/email
mailgun://user@hostname/apikey/email1/email2/emailN
mailgun://user@hostname/apikey/?name="From%20User" -| [Mastodon](https://github.com/caronc/apprise/wiki/Notify_mastodon) | mastodon:// or mastodons://| (TCP) 80 or 443 | mastodon://access_key@hostname
mastodon://access_key@hostname/@user
mastodon://access_key@hostname/@user1/@user2/@userN -| [Matrix](https://github.com/caronc/apprise/wiki/Notify_matrix) | matrix:// or matrixs:// | (TCP) 80 or 443 | matrix://hostname
matrix://user@hostname
matrixs://user:pass@hostname:port/#room_alias
matrixs://user:pass@hostname:port/!room_id
matrixs://user:pass@hostname:port/#room_alias/!room_id/#room2
matrixs://token@hostname:port/?webhook=matrix
matrix://user:token@hostname/?webhook=slack&format=markdown -| [Mattermost](https://github.com/caronc/apprise/wiki/Notify_mattermost) | mmost:// or mmosts:// | (TCP) 8065 | mmost://hostname/authkey
mmost://hostname:80/authkey
mmost://user@hostname:80/authkey
mmost://hostname/authkey?channel=channel
mmosts://hostname/authkey
mmosts://user@hostname/authkey
-| [Microsoft Power Automate / Workflows (MSTeams)](https://github.com/caronc/apprise/wiki/Notify_workflows) | workflows:// | (TCP) 443 | workflows://WorkflowID/Signature/ -| [Microsoft Teams](https://github.com/caronc/apprise/wiki/Notify_msteams) | msteams:// | (TCP) 443 | msteams://TokenA/TokenB/TokenC/ -| [Misskey](https://github.com/caronc/apprise/wiki/Notify_misskey) | misskey:// or misskeys://| (TCP) 80 or 443 | misskey://access_token@hostname -| [MQTT](https://github.com/caronc/apprise/wiki/Notify_mqtt) | mqtt:// or mqtts:// | (TCP) 1883 or 8883 | mqtt://hostname/topic
mqtt://user@hostname/topic
mqtts://user:pass@hostname:9883/topic -| [Nextcloud](https://github.com/caronc/apprise/wiki/Notify_nextcloud) | ncloud:// or nclouds:// | (TCP) 80 or 443 | ncloud://adminuser:pass@host/User
nclouds://adminuser:pass@host/User1/User2/UserN -| [NextcloudTalk](https://github.com/caronc/apprise/wiki/Notify_nextcloudtalk) | nctalk:// or nctalks:// | (TCP) 80 or 443 | nctalk://user:pass@host/RoomId
nctalks://user:pass@host/RoomId1/RoomId2/RoomIdN -| [Notica](https://github.com/caronc/apprise/wiki/Notify_notica) | notica:// | (TCP) 443 | notica://Token/ -| [Notifiarr](https://github.com/caronc/apprise/wiki/Notify_notifiarr) | notifiarr:// | (TCP) 443 | notifiarr://apikey/#channel
notifiarr://apikey/#channel1/#channel2/#channeln -| [Notifico](https://github.com/caronc/apprise/wiki/Notify_notifico) | notifico:// | (TCP) 443 | notifico://ProjectID/MessageHook/ -| [ntfy](https://github.com/caronc/apprise/wiki/Notify_ntfy) | ntfy:// | (TCP) 80 or 443 | ntfy://topic/
ntfys://topic/ -| [Office 365](https://github.com/caronc/apprise/wiki/Notify_office365) | o365:// | (TCP) 443 | o365://TenantID:AccountEmail/ClientID/ClientSecret
o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail
o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail1/TargetEmail2/TargetEmailN -| [OneSignal](https://github.com/caronc/apprise/wiki/Notify_onesignal) | onesignal:// | (TCP) 443 | onesignal://AppID@APIKey/PlayerID
onesignal://TemplateID:AppID@APIKey/UserID
onesignal://AppID@APIKey/#IncludeSegment
onesignal://AppID@APIKey/Email -| [Opsgenie](https://github.com/caronc/apprise/wiki/Notify_opsgenie) | opsgenie:// | (TCP) 443 | opsgenie://APIKey
opsgenie://APIKey/UserID
opsgenie://APIKey/#Team
opsgenie://APIKey/\*Schedule
opsgenie://APIKey/^Escalation -| [PagerDuty](https://github.com/caronc/apprise/wiki/Notify_pagerduty) | pagerduty:// | (TCP) 443 | pagerduty://IntegrationKey@ApiKey
pagerduty://IntegrationKey@ApiKey/Source/Component -| [PagerTree](https://github.com/caronc/apprise/wiki/Notify_pagertree) | pagertree:// | (TCP) 443 | pagertree://integration_id -| [ParsePlatform](https://github.com/caronc/apprise/wiki/Notify_parseplatform) | parsep:// or parseps:// | (TCP) 80 or 443 | parsep://AppID:MasterKey@Hostname
parseps://AppID:MasterKey@Hostname -| [PopcornNotify](https://github.com/caronc/apprise/wiki/Notify_popcornnotify) | popcorn:// | (TCP) 443 | popcorn://ApiKey/ToPhoneNo
popcorn://ApiKey/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/
popcorn://ApiKey/ToEmail
popcorn://ApiKey/ToEmail1/ToEmail2/ToEmailN/
popcorn://ApiKey/ToPhoneNo1/ToEmail1/ToPhoneNoN/ToEmailN -| [Prowl](https://github.com/caronc/apprise/wiki/Notify_prowl) | prowl:// | (TCP) 443 | prowl://apikey
prowl://apikey/providerkey -| [PushBullet](https://github.com/caronc/apprise/wiki/Notify_pushbullet) | pbul:// | (TCP) 443 | pbul://accesstoken
pbul://accesstoken/#channel
pbul://accesstoken/A_DEVICE_ID
pbul://accesstoken/email@address.com
pbul://accesstoken/#channel/#channel2/email@address.net/DEVICE -| [Pushjet](https://github.com/caronc/apprise/wiki/Notify_pushjet) | pjet:// or pjets:// | (TCP) 80 or 443 | pjet://hostname/secret
pjet://hostname:port/secret
pjets://secret@hostname/secret
pjets://hostname:port/secret -| [Push (Techulus)](https://github.com/caronc/apprise/wiki/Notify_techulus) | push:// | (TCP) 443 | push://apikey/ -| [Pushed](https://github.com/caronc/apprise/wiki/Notify_pushed) | pushed:// | (TCP) 443 | pushed://appkey/appsecret/
pushed://appkey/appsecret/#ChannelAlias
pushed://appkey/appsecret/#ChannelAlias1/#ChannelAlias2/#ChannelAliasN
pushed://appkey/appsecret/@UserPushedID
pushed://appkey/appsecret/@UserPushedID1/@UserPushedID2/@UserPushedIDN -| [PushMe](https://github.com/caronc/apprise/wiki/Notify_pushme) | pushme:// | (TCP) 443 | pushme://Token/ -| [Pushover](https://github.com/caronc/apprise/wiki/Notify_pushover) | pover:// | (TCP) 443 | pover://user@token
pover://user@token/DEVICE
pover://user@token/DEVICE1/DEVICE2/DEVICEN
**Note**: you must specify both your user_id and token -| [PushSafer](https://github.com/caronc/apprise/wiki/Notify_pushsafer) | psafer:// or psafers:// | (TCP) 80 or 443 | psafer://privatekey
psafers://privatekey/DEVICE
psafer://privatekey/DEVICE1/DEVICE2/DEVICEN -| [Pushy](https://github.com/caronc/apprise/wiki/Notify_pushy) | pushy:// | (TCP) 443 | pushy://apikey/DEVICE
pushy://apikey/DEVICE1/DEVICE2/DEVICEN
pushy://apikey/TOPIC
pushy://apikey/TOPIC1/TOPIC2/TOPICN -| [PushDeer](https://github.com/caronc/apprise/wiki/Notify_pushdeer) | pushdeer:// or pushdeers:// | (TCP) 80 or 443 | pushdeer://pushKey
pushdeer://hostname/pushKey
pushdeer://hostname:port/pushKey -| [Reddit](https://github.com/caronc/apprise/wiki/Notify_reddit) | reddit:// | (TCP) 443 | reddit://user:password@app_id/app_secret/subreddit
reddit://user:password@app_id/app_secret/sub1/sub2/subN -| [Revolt](https://github.com/caronc/apprise/wiki/Notify_Revolt) | revolt:// | (TCP) 443 | revolt://bottoken/ChannelID
revolt://bottoken/ChannelID1/ChannelID2/ChannelIDN | -| [Rocket.Chat](https://github.com/caronc/apprise/wiki/Notify_rocketchat) | rocket:// or rockets:// | (TCP) 80 or 443 | rocket://user:password@hostname/RoomID/Channel
rockets://user:password@hostname:443/#Channel1/#Channel1/RoomID
rocket://user:password@hostname/#Channel
rocket://webhook@hostname
rockets://webhook@hostname/@User/#Channel -| [RSyslog](https://github.com/caronc/apprise/wiki/Notify_rsyslog) | rsyslog:// | (UDP) 514 | rsyslog://hostname
rsyslog://hostname/Facility -| [Ryver](https://github.com/caronc/apprise/wiki/Notify_ryver) | ryver:// | (TCP) 443 | ryver://Organization/Token
ryver://botname@Organization/Token -| [SendGrid](https://github.com/caronc/apprise/wiki/Notify_sendgrid) | sendgrid:// | (TCP) 443 | sendgrid://APIToken:FromEmail/
sendgrid://APIToken:FromEmail/ToEmail
sendgrid://APIToken:FromEmail/ToEmail1/ToEmail2/ToEmailN/ -| [ServerChan](https://github.com/caronc/apprise/wiki/Notify_serverchan) | schan:// | (TCP) 443 | schan://sendkey/ -| [Signal API](https://github.com/caronc/apprise/wiki/Notify_signal) | signal:// or signals:// | (TCP) 80 or 443 | signal://hostname:port/FromPhoneNo
signal://hostname:port/FromPhoneNo/ToPhoneNo
signal://hostname:port/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [SimplePush](https://github.com/caronc/apprise/wiki/Notify_simplepush) | spush:// | (TCP) 443 | spush://apikey
spush://salt:password@apikey
spush://apikey?event=Apprise -| [Slack](https://github.com/caronc/apprise/wiki/Notify_slack) | slack:// | (TCP) 443 | slack://TokenA/TokenB/TokenC/
slack://TokenA/TokenB/TokenC/Channel
slack://botname@TokenA/TokenB/TokenC/Channel
slack://user@TokenA/TokenB/TokenC/Channel1/Channel2/ChannelN -| [SMTP2Go](https://github.com/caronc/apprise/wiki/Notify_smtp2go) | smtp2go:// | (TCP) 443 | smtp2go://user@hostname/apikey
smtp2go://user@hostname/apikey/email
smtp2go://user@hostname/apikey/email1/email2/emailN
smtp2go://user@hostname/apikey/?name="From%20User" -| [SparkPost](https://github.com/caronc/apprise/wiki/Notify_sparkpost) | sparkpost:// | (TCP) 443 | sparkpost://user@hostname/apikey
sparkpost://user@hostname/apikey/email
sparkpost://user@hostname/apikey/email1/email2/emailN
sparkpost://user@hostname/apikey/?name="From%20User" -| [Splunk](https://github.com/caronc/apprise/wiki/Notify_splunk) | splunk:// or victorops:/ | (TCP) 443 | splunk://route_key@apikey
splunk://route_key@apikey/entity_id -| [Streamlabs](https://github.com/caronc/apprise/wiki/Notify_streamlabs) | strmlabs:// | (TCP) 443 | strmlabs://AccessToken/
strmlabs://AccessToken/?name=name&identifier=identifier&amount=0¤cy=USD -| [Synology Chat](https://github.com/caronc/apprise/wiki/Notify_synology_chat) | synology:// or synologys:// | (TCP) 80 or 443 | synology://hostname/token
synology://hostname:port/token -| [Syslog](https://github.com/caronc/apprise/wiki/Notify_syslog) | syslog:// | n/a | syslog://
syslog://Facility -| [Telegram](https://github.com/caronc/apprise/wiki/Notify_telegram) | tgram:// | (TCP) 443 | tgram://bottoken/ChatID
tgram://bottoken/ChatID1/ChatID2/ChatIDN -| [Twitter](https://github.com/caronc/apprise/wiki/Notify_twitter) | twitter:// | (TCP) 443 | twitter://CKey/CSecret/AKey/ASecret
twitter://user@CKey/CSecret/AKey/ASecret
twitter://CKey/CSecret/AKey/ASecret/User1/User2/User2
twitter://CKey/CSecret/AKey/ASecret?mode=tweet -| [Twist](https://github.com/caronc/apprise/wiki/Notify_twist) | twist:// | (TCP) 443 | twist://pasword:login
twist://password:login/#channel
twist://password:login/#team:channel
twist://password:login/#team:channel1/channel2/#team3:channel -| [Webex Teams (Cisco)](https://github.com/caronc/apprise/wiki/Notify_wxteams) | wxteams:// | (TCP) 443 | wxteams://Token -| [WeCom Bot](https://github.com/caronc/apprise/wiki/Notify_wecombot) | wecombot:// | (TCP) 443 | wecombot://BotKey -| [WhatsApp](https://github.com/caronc/apprise/wiki/Notify_whatsapp) | whatsapp:// | (TCP) 443 | whatsapp://AccessToken@FromPhoneID/ToPhoneNo
whatsapp://Template:AccessToken@FromPhoneID/ToPhoneNo -| [WxPusher](https://github.com/caronc/apprise/wiki/Notify_wxpusher) | wxpusher:// | (TCP) 443 | wxpusher://AppToken@UserID1/UserID2/UserIDN
wxpusher://AppToken@Topic1/Topic2/Topic3
wxpusher://AppToken@UserID1/Topic1/ -| [XBMC](https://github.com/caronc/apprise/wiki/Notify_xbmc) | xbmc:// or xbmcs:// | (TCP) 8080 or 443 | xbmc://hostname
xbmc://user@hostname
xbmc://user:password@hostname:port -| [Zulip Chat](https://github.com/caronc/apprise/wiki/Notify_zulip) | zulip:// | (TCP) 443 | zulip://botname@Organization/Token
zulip://botname@Organization/Token/Stream
zulip://botname@Organization/Token/Email - -## SMS Notifications - -| Notification Service | Service ID | Default Port | Example Syntax | -| -------------------- | ---------- | ------------ | -------------- | -| [Africas Talking](https://github.com/caronc/apprise/wiki/Notify_africas_talking) | atalk:// | (TCP) 443 | atalk://AppUser@ApiKey/ToPhoneNo
atalk://AppUser@ApiKey/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [Automated Packet Reporting System (ARPS)](https://github.com/caronc/apprise/wiki/Notify_aprs) | aprs:// | (TCP) 10152 | aprs://user:pass@callsign
aprs://user:pass@callsign1/callsign2/callsignN -| [AWS SNS](https://github.com/caronc/apprise/wiki/Notify_sns) | sns:// | (TCP) 443 | sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo
sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo1/+PhoneNo2/+PhoneNoN
sns://AccessKeyID/AccessSecretKey/RegionName/Topic
sns://AccessKeyID/AccessSecretKey/RegionName/Topic1/Topic2/TopicN -| [BulkSMS](https://github.com/caronc/apprise/wiki/Notify_bulksms) | bulksms:// | (TCP) 443 | bulksms://user:password@ToPhoneNo
bulksms://User:Password@ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [BulkVS](https://github.com/caronc/apprise/wiki/Notify_bulkvs) | bulkvs:// | (TCP) 443 | bulkvs://user:password@FromPhoneNo
bulkvs://user:password@FromPhoneNo/ToPhoneNo
bulkvs://user:password@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [Burst SMS](https://github.com/caronc/apprise/wiki/Notify_burst_sms) | burstsms:// | (TCP) 443 | burstsms://ApiKey:ApiSecret@FromPhoneNo/ToPhoneNo
burstsms://ApiKey:ApiSecret@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [ClickSend](https://github.com/caronc/apprise/wiki/Notify_clicksend) | clicksend:// | (TCP) 443 | clicksend://user:pass@PhoneNo
clicksend://user:pass@ToPhoneNo1/ToPhoneNo2/ToPhoneNoN -| [DAPNET](https://github.com/caronc/apprise/wiki/Notify_dapnet) | dapnet:// | (TCP) 80 | dapnet://user:pass@callsign
dapnet://user:pass@callsign1/callsign2/callsignN -| [D7 Networks](https://github.com/caronc/apprise/wiki/Notify_d7networks) | d7sms:// | (TCP) 443 | d7sms://token@PhoneNo
d7sms://token@ToPhoneNo1/ToPhoneNo2/ToPhoneNoN -| [DingTalk](https://github.com/caronc/apprise/wiki/Notify_dingtalk) | dingtalk:// | (TCP) 443 | dingtalk://token/
dingtalk://token/ToPhoneNo
dingtalk://token/ToPhoneNo1/ToPhoneNo2/ToPhoneNo1/ -| [Free-Mobile](https://github.com/caronc/apprise/wiki/Notify_freemobile) | freemobile:// | (TCP) 443 | freemobile://user@password/ -| [httpSMS](https://github.com/caronc/apprise/wiki/Notify_httpsms) | httpsms:// | (TCP) 443 | httpsms://ApiKey@FromPhoneNo
httpsms://ApiKey@FromPhoneNo/ToPhoneNo
httpsms://ApiKey@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [Kavenegar](https://github.com/caronc/apprise/wiki/Notify_kavenegar) | kavenegar:// | (TCP) 443 | kavenegar://ApiKey/ToPhoneNo
kavenegar://FromPhoneNo@ApiKey/ToPhoneNo
kavenegar://ApiKey/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN -| [MessageBird](https://github.com/caronc/apprise/wiki/Notify_messagebird) | msgbird:// | (TCP) 443 | msgbird://ApiKey/FromPhoneNo
msgbird://ApiKey/FromPhoneNo/ToPhoneNo
msgbird://ApiKey/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [MSG91](https://github.com/caronc/apprise/wiki/Notify_msg91) | msg91:// | (TCP) 443 | msg91://TemplateID@AuthKey/ToPhoneNo
msg91://TemplateID@AuthKey/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [Plivo](https://github.com/caronc/apprise/wiki/Notify_plivo) | plivo:// | (TCP) 443 | plivo://AuthID@Token@FromPhoneNo
plivo://AuthID@Token/FromPhoneNo/ToPhoneNo
plivo://AuthID@Token/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [Seven](https://github.com/caronc/apprise/wiki/Notify_seven) | seven:// | (TCP) 443 | seven://ApiKey/FromPhoneNo
seven://ApiKey/FromPhoneNo/ToPhoneNo
seven://ApiKey/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [Société Française du Radiotéléphone (SFR)](https://github.com/caronc/apprise/wiki/Notify_sfr) | sfr:// | (TCP) 443 | sfr://user:password>@spaceId/ToPhoneNo
sfr://user:password>@spaceId/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [Signal API](https://github.com/caronc/apprise/wiki/Notify_signal) | signal:// or signals:// | (TCP) 80 or 443 | signal://hostname:port/FromPhoneNo
signal://hostname:port/FromPhoneNo/ToPhoneNo
signal://hostname:port/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [Sinch](https://github.com/caronc/apprise/wiki/Notify_sinch) | sinch:// | (TCP) 443 | sinch://ServicePlanId:ApiToken@FromPhoneNo
sinch://ServicePlanId:ApiToken@FromPhoneNo/ToPhoneNo
sinch://ServicePlanId:ApiToken@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/
sinch://ServicePlanId:ApiToken@ShortCode/ToPhoneNo
sinch://ServicePlanId:ApiToken@ShortCode/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [SMSEagle](https://github.com/caronc/apprise/wiki/Notify_smseagle) | smseagle:// or smseagles:// | (TCP) 80 or 443 | smseagles://hostname:port/ToPhoneNo
smseagles://hostname:port/@ToContact
smseagles://hostname:port/#ToGroup
smseagles://hostname:port/ToPhoneNo1/#ToGroup/@ToContact/ -| [SMS Manager](https://github.com/caronc/apprise/wiki/Notify_sms_manager) | smsmgr:// | (TCP) 443 | smsmgr://ApiKey@ToPhoneNo
smsmgr://ApiKey@ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [Threema Gateway](https://github.com/caronc/apprise/wiki/Notify_threema) | threema:// | (TCP) 443 | threema://GatewayID@secret/ToPhoneNo
threema://GatewayID@secret/ToEmail
threema://GatewayID@secret/ToThreemaID/
threema://GatewayID@secret/ToEmail/ToThreemaID/ToPhoneNo/... -| [Twilio](https://github.com/caronc/apprise/wiki/Notify_twilio) | twilio:// | (TCP) 443 | twilio://AccountSid:AuthToken@FromPhoneNo
twilio://AccountSid:AuthToken@FromPhoneNo/ToPhoneNo
twilio://AccountSid:AuthToken@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/
twilio://AccountSid:AuthToken@FromPhoneNo/ToPhoneNo?apikey=Key
twilio://AccountSid:AuthToken@ShortCode/ToPhoneNo
twilio://AccountSid:AuthToken@ShortCode/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [Voipms](https://github.com/caronc/apprise/wiki/Notify_voipms) | voipms:// | (TCP) 443 | voipms://password:email/FromPhoneNo
voipms://password:email/FromPhoneNo/ToPhoneNo
voipms://password:email/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ -| [Vonage](https://github.com/caronc/apprise/wiki/Notify_nexmo) (formerly Nexmo) | nexmo:// | (TCP) 443 | nexmo://ApiKey:ApiSecret@FromPhoneNo
nexmo://ApiKey:ApiSecret@FromPhoneNo/ToPhoneNo
nexmo://ApiKey:ApiSecret@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ - -## Desktop Notifications - -| Notification Service | Service ID | Default Port | Example Syntax | -| -------------------- | ---------- | ------------ | -------------- | -| [Linux DBus Notifications](https://github.com/caronc/apprise/wiki/Notify_dbus) | dbus://
qt://
glib://
kde:// | n/a | dbus://
qt://
glib://
kde:// -| [Linux Gnome Notifications](https://github.com/caronc/apprise/wiki/Notify_gnome) | gnome:// | n/a | gnome:// -| [MacOS X Notifications](https://github.com/caronc/apprise/wiki/Notify_macosx) | macosx:// | n/a | macosx:// -| [Windows Notifications](https://github.com/caronc/apprise/wiki/Notify_windows) | windows:// | n/a | windows:// - -## Email Notifications - -| Service ID | Default Port | Example Syntax | -| ---------- | ------------ | -------------- | -| [mailto://](https://github.com/caronc/apprise/wiki/Notify_email) | (TCP) 25 | mailto://userid:pass@domain.com
mailto://domain.com?user=userid&pass=password
mailto://domain.com:2525?user=userid&pass=password
mailto://user@gmail.com&pass=password
mailto://mySendingUsername:mySendingPassword@example.com?to=receivingAddress@example.com
mailto://userid:password@example.com?smtp=mail.example.com&from=noreply@example.com&name=no%20reply -| [mailtos://](https://github.com/caronc/apprise/wiki/Notify_email) | (TCP) 587 | mailtos://userid:pass@domain.com
mailtos://domain.com?user=userid&pass=password
mailtos://domain.com:465?user=userid&pass=password
mailtos://user@hotmail.com&pass=password
mailtos://mySendingUsername:mySendingPassword@example.com?to=receivingAddress@example.com
mailtos://userid:password@example.com?smtp=mail.example.com&from=noreply@example.com&name=no%20reply - -Apprise have some email services built right into it (such as yahoo, fastmail, hotmail, gmail, etc) that greatly simplify the mailto:// service. See more details [here](https://github.com/caronc/apprise/wiki/Notify_email). - -## Custom Notifications - -| Post Method | Service ID | Default Port | Example Syntax | -| -------------------- | ---------- | ------------ | -------------- | -| [Form](https://github.com/caronc/apprise/wiki/Notify_Custom_Form) | form:// or forms:// | (TCP) 80 or 443 | form://hostname
form://user@hostname
form://user:password@hostname:port
form://hostname/a/path/to/post/to -| [JSON](https://github.com/caronc/apprise/wiki/Notify_Custom_JSON) | json:// or jsons:// | (TCP) 80 or 443 | json://hostname
json://user@hostname
json://user:password@hostname:port
json://hostname/a/path/to/post/to -| [XML](https://github.com/caronc/apprise/wiki/Notify_Custom_XML) | xml:// or xmls:// | (TCP) 80 or 443 | xml://hostname
xml://user@hostname
xml://user:password@hostname:port
xml://hostname/a/path/to/post/to - -# Installation - -The easiest way is to install this package is from pypi: -```bash -pip install apprise -``` - -Apprise is also packaged as an RPM and available through [EPEL](https://docs.fedoraproject.org/en-US/epel/) supporting CentOS, Redhat, Rocky, Oracle Linux, etc. -```bash -# Follow instructions on https://docs.fedoraproject.org/en-US/epel -# to get your system connected up to EPEL and then: -# Redhat/CentOS 7.x users -yum install apprise - -# Redhat/CentOS 8.x+ and/or Fedora Users -dnf install apprise -``` - -You can also check out the [Graphical version of Apprise](https://github.com/caronc/apprise-api) to centralize your configuration and notifications through a managable webpage. - -# Command Line Usage - -A small command line interface (CLI) tool is also provided with this package called *apprise*. If you know the server urls you wish to notify, you can simply provide them all on the command line and send your notifications that way: -```bash -# Send a notification to as many servers as you want -# as you can easily chain one after another (the -vv provides some -# additional verbosity to help let you know what is going on): -apprise -vv -t 'my title' -b 'my notification body' \ - 'mailto://myemail:mypass@gmail.com' \ - 'pbul://o.gn5kj6nfhv736I7jC3cj3QLRiyhgl98b' - -# If you don't specify a --body (-b) then stdin is used allowing -# you to use the tool as part of your every day administration: -cat /proc/cpuinfo | apprise -vv -t 'cpu info' \ - 'mailto://myemail:mypass@gmail.com' - -# The title field is totally optional -uptime | apprise -vv \ - 'discord:///4174216298/JHMHI8qBe7bk2ZwO5U711o3dV_js' -``` - -## CLI Configuration Files - -No one wants to put their credentials out for everyone to see on the command line. No problem *apprise* also supports configuration files. It can handle both a specific [YAML format](https://github.com/caronc/apprise/wiki/config_yaml) or a very simple [TEXT format](https://github.com/caronc/apprise/wiki/config_text). You can also pull these configuration files via an HTTP query too! You can read more about the expected structure of the configuration files [here](https://github.com/caronc/apprise/wiki/config). -```bash -# By default if no url or configuration is specified apprise will attempt to load -# configuration files (if present) from: -# ~/.apprise -# ~/.apprise.yaml -# ~/.config/apprise.conf -# ~/.config/apprise.yaml -# /etc/apprise.conf -# /etc/apprise.yaml - -# Also a subdirectory handling allows you to leverage plugins -# ~/.apprise/apprise -# ~/.apprise/apprise.yaml -# ~/.config/apprise/apprise.conf -# ~/.config/apprise/apprise.yaml -# /etc/apprise/apprise.yaml -# /etc/apprise/apprise.conf - -# Windows users can store their default configuration files here: -# %APPDATA%/Apprise/apprise.conf -# %APPDATA%/Apprise/apprise.yaml -# %LOCALAPPDATA%/Apprise/apprise.conf -# %LOCALAPPDATA%/Apprise/apprise.yaml -# %ALLUSERSPROFILE%\Apprise\apprise.conf -# %ALLUSERSPROFILE%\Apprise\apprise.yaml -# %PROGRAMFILES%\Apprise\apprise.conf -# %PROGRAMFILES%\Apprise\apprise.yaml -# %COMMONPROGRAMFILES%\Apprise\apprise.conf -# %COMMONPROGRAMFILES%\Apprise\apprise.yaml - -# The configuration files specified above can also be identified with a `.yml` -# extension or even just entirely removing the `.conf` extension altogether. - -# If you loaded one of those files, your command line gets really easy: -apprise -vv -t 'my title' -b 'my notification body' - -# If you want to deviate from the default paths or specify more than one, -# just specify them using the --config switch: -apprise -vv -t 'my title' -b 'my notification body' \ - --config=/path/to/my/config.yml - -# Got lots of configuration locations? No problem, you can specify them all: -# Apprise can even fetch the configuration from over a network! -apprise -vv -t 'my title' -b 'my notification body' \ - --config=/path/to/my/config.yml \ - --config=https://localhost/my/apprise/config -``` - -## CLI File Attachments - -Apprise also supports file attachments too! Specify as many attachments to a notification as you want. -```bash -# Send a funny image you found on the internet to a colleague: -apprise -vv --title 'Agile Joke' \ - --body 'Did you see this one yet?' \ - --attach https://i.redd.it/my2t4d2fx0u31.jpg \ - 'mailto://myemail:mypass@gmail.com' - -# Easily send an update from a critical server to your dev team -apprise -vv --title 'system crash' \ - --body 'I do not think Jim fixed the bug; see attached...' \ - --attach /var/log/myprogram.log \ - --attach /var/debug/core.2345 \ - --tag devteam -``` - -## CLI Loading Custom Notifications/Hooks - -To create your own custom `schema://` hook so that you can trigger your own custom code, -simply include the `@notify` decorator to wrap your function. -```python -from apprise.decorators import notify -# -# The below assumes you want to catch foobar:// calls: -# -@notify(on="foobar", name="My Custom Foobar Plugin") -def my_custom_notification_wrapper(body, title, notify_type, *args, **kwargs): - """My custom notification function that triggers on all foobar:// calls - """ - # Write all of your code here... as an example... - print("{}: {} - {}".format(notify_type.upper(), title, body)) - - # Returning True/False is a way to relay your status back to Apprise. - # Returning nothing (None by default) is always interpreted as a Success -``` - -Once you've defined your custom hook, you just need to tell Apprise where it is at runtime. -```bash -# By default if no plugin path is specified apprise will attempt to load -# all plugin files (if present) from the following directory paths: -# ~/.apprise/plugins -# ~/.config/apprise/plugins -# /var/lib/apprise/plugins - -# Windows users can store their default plugin files in these directories: -# %APPDATA%/Apprise/plugins -# %LOCALAPPDATA%/Apprise/plugins -# %ALLUSERSPROFILE%\Apprise\plugins -# %PROGRAMFILES%\Apprise\plugins -# %COMMONPROGRAMFILES%\Apprise\plugins - -# If you placed your plugin file within one of the directories already defined -# above, then your call simply needs to look like: -apprise -vv --title 'custom override' \ - --body 'the body of my message' \ - foobar:\\ - -# However you can over-ride the path like so -apprise -vv --title 'custom override' \ - --body 'the body of my message' \ - --plugin-path /path/to/my/plugin.py \ - foobar:\\ -``` - -You can read more about creating your own custom notifications and/or hooks [here](https://github.com/caronc/apprise/wiki/decorator_notify). - -## CLI Environment Variables - -Those using the Command Line Interface (CLI) can also leverage environment variables to pre-set the default settings: - -| Variable | Description | -|------------------------ | ----------------- | -| `APPRISE_URLS` | Specify the default URLs to notify IF none are otherwise specified on the command line explicitly. If the `--config` (`-c`) is specified, then this will over-rides any reference to this variable. Use white space and/or a comma (`,`) to delimit multiple entries. -| `APPRISE_CONFIG_PATH` | Explicitly specify the config search path to use (over-riding the default). The path(s) defined here must point to the absolute filename to open/reference. Use a semi-colon (`;`), line-feed (`\n`), and/or carriage return (`\r`) to delimit multiple entries. -| `APPRISE_PLUGIN_PATH` | Explicitly specify the custom plugin search path to use (over-riding the default). Use a semi-colon (`;`), line-feed (`\n`), and/or carriage return (`\r`) to delimit multiple entries. -| `APPRISE_STORAGE_PATH` | Explicitly specify the persistent storage path to use (over-riding the default). - -# Developer API Usage - -To send a notification from within your python application, just do the following: -```python -import apprise - -# Create an Apprise instance -apobj = apprise.Apprise() - -# Add all of the notification services by their server url. -# A sample email notification: -apobj.add('mailto://myuserid:mypass@gmail.com') - -# A sample pushbullet notification -apobj.add('pbul://o.gn5kj6nfhv736I7jC3cj3QLRiyhgl98b') - -# Then notify these services any time you desire. The below would -# notify all of the services loaded into our Apprise object. -apobj.notify( - body='what a great notification service!', - title='my notification title', -) -``` - -## API Configuration Files - -Developers need access to configuration files too. The good news is their use just involves declaring another object (called *AppriseConfig*) that the *Apprise* object can ingest. You can also freely mix and match config and notification entries as often as you wish! You can read more about the expected structure of the configuration files [here](https://github.com/caronc/apprise/wiki/config). -```python -import apprise - -# Create an Apprise instance -apobj = apprise.Apprise() - -# Create an Config instance -config = apprise.AppriseConfig() - -# Add a configuration source: -config.add('/path/to/my/config.yml') - -# Add another... -config.add('https://myserver:8080/path/to/config') - -# Make sure to add our config into our apprise object -apobj.add(config) - -# You can mix and match; add an entry directly if you want too -# In this entry we associate the 'admin' tag with our notification -apobj.add('mailto://myuser:mypass@hotmail.com', tag='admin') - -# Then notify these services any time you desire. The below would -# notify all of the services that have not been bound to any specific -# tag. -apobj.notify( - body='what a great notification service!', - title='my notification title', -) - -# Tagging allows you to specifically target only specific notification -# services you've loaded: -apobj.notify( - body='send a notification to our admin group', - title='Attention Admins', - # notify any services tagged with the 'admin' tag - tag='admin', -) - -# If you want to notify absolutely everything (regardless of whether -# it's been tagged or not), just use the reserved tag of 'all': -apobj.notify( - body='send a notification to our admin group', - title='Attention Admins', - # notify absolutely everything loaded, regardless on wether - # it has a tag associated with it or not: - tag='all', -) -``` - -## API File Attachments - -Attachments are very easy to send using the Apprise API: -```python -import apprise - -# Create an Apprise instance -apobj = apprise.Apprise() - -# Add at least one service you want to notify -apobj.add('mailto://myuser:mypass@hotmail.com') - -# Then send your attachment. -apobj.notify( - title='A great photo of our family', - body='The flash caused Jane to close her eyes! hah! :)', - attach='/local/path/to/my/DSC_003.jpg', -) - -# Send a web based attachment too! In the below example, we connect to a home -# security camera and send a live image to an email. By default remote web -# content is cached, but for a security camera we might want to call notify -# again later in our code, so we want our last image retrieved to expire(in -# this case after 3 seconds). -apobj.notify( - title='Latest security image', - attach='http://admin:password@hikvision-cam01/ISAPI/Streaming/channels/101/picture?cache=3' -) -``` - -To send more than one attachment, just use a list, set, or tuple instead: -```python -import apprise - -# Create an Apprise instance -apobj = apprise.Apprise() - -# Add at least one service you want to notify -apobj.add('mailto://myuser:mypass@hotmail.com') - -# Now add all of the entries we're interested in: -attach = ( - # ?name= allows us to rename the actual jpeg as found on the site - # to be another name when sent to our receipient(s) - 'https://i.redd.it/my2t4d2fx0u31.jpg?name=FlyingToMars.jpg', - - # Now add another: - '/path/to/funny/joke.gif', -) - -# Send your multiple attachments with a single notify call: -apobj.notify( - title='Some good jokes.', - body='Hey guys, check out these!', - attach=attach, -) -``` - -## API Loading Custom Notifications/Hooks - -By default, no custom plugins are loaded at all for those building from within the Apprise API. -It's at the developers discretion to load custom modules. But should you choose to do so, it's as easy -as including the path reference in the `AppriseAsset()` object prior to the initialization of your `Apprise()` -instance. - -For example: -```python -from apprise import Apprise -from apprise import AppriseAsset - -# Prepare your Asset object so that you can enable the custom plugins to -# be loaded for your instance of Apprise... -asset = AppriseAsset(plugin_paths="/path/to/scan") - -# OR You can also generate scan more then one file too: -asset = AppriseAsset( - plugin_paths=[ - # Iterate over all python libraries found in the root of the - # specified path. This is NOT a recursive (directory) scan; only - # the first level is parsed. HOWEVER, if a directory containing - # an __init__.py is found, it will be included in the load. - "/dir/containing/many/python/libraries", - - # An absolute path to a plugin.py to exclusively load - "/path/to/plugin.py", - - # if you point to a directory that has an __init__.py file found in - # it, then only that file is loaded (it's similar to point to a - # absolute .py file. Hence, there is no (level 1) scanning at all - # within the directory specified. - "/path/to/dir/library" - ] -) - -# Now that we've got our asset, we just work with our Apprise object as we -# normally do -aobj = Apprise(asset=asset) - -# If our new custom `foobar://` library was loaded (presuming we prepared -# one like in the examples above). then you would be able to safely add it -# into Apprise at this point -aobj.add('foobar://') - -# Send our notification out through our foobar:// -aobj.notify("test") -``` - -You can read more about creating your own custom notifications and/or hooks [here](https://github.com/caronc/apprise/wiki/decorator_notify). - -# Persistent Storage - -Persistent storage allows Apprise to cache re-occurring actions optionaly to disk. This can greatly reduce the overhead used to send a notification. - -There are 3 Persistent Storage operational states Apprise can operate using: -1. `auto`: Flush gathered cache information to the filesystem on demand. This option is incredibly light weight. This is the default behavior for all CLI usage. - * Developers who choose to use this operational mode can also force cached information manually if they choose. - * The CLI will use this operational mode by default. -1. `flush`: Flushes any cache information to the filesystem during every transaction. -1. `memory`: Effectively disable Persistent Storage. Any caching of data required by each plugin used is done in memory. Apprise effectively operates as it always did before peristent storage was available. This setting ensures no content is every written to disk. - * By default this is the mode Apprise will operate under for those developing with it unless they configure it to otherwise operate as `auto` or `flush`. This is done through the `AppriseAsset()` object and is explained further on in this documentation. - -## CLI Persistent Storage Commands - -You can provide the keyword `storage` on your CLI call to see the persistent storage options available to you. -```bash -# List all of the occupied space used by Apprise's Persistent Storage: -apprise storage list - -# list is the default option, so the following does the same thing: -apprise storage - -# You can prune all of your storage older then 30 days -# and not accessed for this period like so: -apprise storage prune - -# You can do a hard reset (and wipe all persistent storage) with: -apprise storage clean - -``` - -You can also filter your results by adding tags and/or URL Identifiers. When you get a listing (`apprise storage list`), you may see: -``` - # example output of 'apprise storage list': - 1. f7077a65 0.00B unused - - matrixs://abcdef:****@synapse.example12.com/%23general?image=no&mode=off&version=3&msgtype... - tags: team - - 2. 0e873a46 81.10B active - - tgram://W...U//?image=False&detect=yes&silent=no&preview=no&content=before&mdv=v1&format=m... - tags: personal - - 3. abcd123 12.00B stale - -``` -The (persistent storage) cache states are: - - `unused`: This plugin has not commited anything to disk for reuse/cache purposes - - `active`: This plugin has written content to disk. Or at the very least, it has prepared a persistent storage location it can write into. - - `stale`: The system detected a location where a URL may have possibly written to in the past, but there is nothing linking to it using the URLs provided. It is likely wasting space or is no longer of any use. - -You can use this information to filter your results by specifying _URL ID_ (UID) values after your command. For example: -```bash -# The below commands continue with the example already identified above -# the following would match abcd123 (even though just ab was provided) -# The output would only list the 'stale' entry above -apprise storage list ab - -# knowing our filter is safe, we could remove it -# the below command would not obstruct our other to URLs and would only -# remove our stale one: -apprise storage clean ab - -# Entries can be filtered by tag as well: -apprise storage list --tag=team - -# You can match on multiple URL ID's as well: -# The followin would actually match the URL ID's of 1. and .2 above -apprise storage list f 0 -``` -When using the CLI, Persistent storage is set to the operational mode of `auto` by default, you can change this by providing `--storage-mode=` (`-SM`) during your calls. If you want to ensure it's always set to a value of your choice. - -For more information on persistent storage, [visit here](https://github.com/caronc/apprise/wiki/persistent_storage). - -## API Persistent Storage Commands -For developers, persistent storage is set in the operational mode of `memory` by default. - -It's at the developers discretion to enable it (by switching it to either `auto` or `flush`). Should you choose to do so: it's as easy as including the information in the `AppriseAsset()` object prior to the initialization of your `Apprise()` instance. - -For example: -```python -from apprise import Apprise -from apprise import AppriseAsset -from apprise import PersistentStoreMode - -# Prepare a location the persistent storage can write it's cached content to. -# By setting this path, this immediately assumes you wish to operate the -# persistent storage in the operational 'auto' mode -asset = AppriseAsset(storage_path="/path/to/save/data") - -# If you want to be more explicit and set more options, then you may do the -# following -asset = AppriseAsset( - # Set our storage path directory (minimum requirement to enable it) - storage_path="/path/to/save/data", - - # Set the mode... the options are: - # 1. PersistentStoreMode.MEMORY - # - disable persistent storage from writing to disk - # 2. PersistentStoreMode.AUTO - # - write to disk on demand - # 3. PersistentStoreMode.FLUSH - # - write to disk always and often - storage_mode=PersistentStoreMode.FLUSH - - # The URL IDs are by default 8 characters in length. You can increase and - # decrease it's value here. The value must be > 2. The default value is 8 - # if not otherwise specified - storage_idlen=8, -) - -# Now that we've got our asset, we just work with our Apprise object as we -# normally do -aobj = Apprise(asset=asset) -``` - -For more information on persistent storage, [visit here](https://github.com/caronc/apprise/wiki/persistent_storage). - -# Want To Learn More? - -If you're interested in reading more about this and other methods on how to customize your own notifications, please check out the following links: -* 📣 [Using the CLI](https://github.com/caronc/apprise/wiki/CLI_Usage) -* ðŸ› ï¸ [Development API](https://github.com/caronc/apprise/wiki/Development_API) -* 🔧 [Troubleshooting](https://github.com/caronc/apprise/wiki/Troubleshooting) -* âš™ï¸ [Configuration File Help](https://github.com/caronc/apprise/wiki/config) -* âš¡ [Create Your Own Custom Notifications](https://github.com/caronc/apprise/wiki/decorator_notify) -* 💾 [Persistent Storage](https://github.com/caronc/apprise/wiki/persistent_storage) -* 🌎 [Apprise API/Web Interface](https://github.com/caronc/apprise-api) -* 🎉 [Showcase](https://github.com/caronc/apprise/wiki/showcase) - -Want to help make Apprise better? -* 💡 [Contribute to the Apprise Code Base](https://github.com/caronc/apprise/wiki/Development_Contribution) -* â¤ï¸ [Sponsorship and Donations](https://github.com/caronc/apprise/wiki/Sponsors) diff --git a/libs/apprise-1.9.2.dist-info/RECORD b/libs/apprise-1.9.2.dist-info/RECORD deleted file mode 100644 index e4cc8e8ca0..0000000000 --- a/libs/apprise-1.9.2.dist-info/RECORD +++ /dev/null @@ -1,204 +0,0 @@ -../../bin/apprise,sha256=ZJ-e4qqxNLtdW_DAvpuPPX5iROIiQd8I6nvg7vtAv-g,233 -apprise-1.9.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -apprise-1.9.2.dist-info/LICENSE,sha256=14Xm5_sr7ckw9aeWP1S8s2BdDw0zmZ-84BYbRG_25qk,1343 -apprise-1.9.2.dist-info/METADATA,sha256=x_o_QTrKa9ZvriOzP0u2ATsgIbK7WAQU_TEwI2Q58g4,52956 -apprise-1.9.2.dist-info/RECORD,, -apprise-1.9.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -apprise-1.9.2.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92 -apprise-1.9.2.dist-info/entry_points.txt,sha256=71YypBuNdjAKiaLsiMG40HEfLHxkU4Mi7o_S0s0d8wI,45 -apprise-1.9.2.dist-info/top_level.txt,sha256=JrCRn-_rXw5LMKXkIgMSE4E0t1Ks9TYrBH54Pflwjkk,8 -apprise/__init__.py,sha256=fNkCsrDHTksln2E4fuFuLCiUgT1fQiikqXecAVAJgts,3896 -apprise/apprise.py,sha256=zAvoPxYgXU5anlFUxvcSyc24W1YXxfNMVx7_nJWGSFM,32838 -apprise/apprise.pyi,sha256=_4TBKvT-QVj3s6PuTh3YX-BbQMeJTdBGdVpubLMY4_k,2203 -apprise/apprise_attachment.py,sha256=8alD6gXQoF1QLyTdJlzMXJ7m5gDA50vLBGMfJXQjVOM,12363 -apprise/apprise_attachment.pyi,sha256=R9-0dVqWpeaFrVpcREwPhGy3qHWztG5jEjYIOsbE5dM,1145 -apprise/apprise_config.py,sha256=8bXKAeWMMJULeXd9APz9QeSc4sBvdfiw4xE18-3AtW0,16890 -apprise/apprise_config.pyi,sha256=_mUlCnncqAq8sL01WxQTgZjnb2ic9kZXvtqZmVl-fc8,1568 -apprise/asset.py,sha256=ZFJW4IbiBGxdqvasCT9GVXyo1GyI0DGOuRU7znXF8Ag,15103 -apprise/asset.pyi,sha256=NYLXXYbScgRkspP27XGpRRM_uliPu1OCdWdZBPPvLng,979 -apprise/assets/NotifyXML-1.0.xsd,sha256=292qQ_IUl5EWDhPyzm9UTT0C2rVvJkyGar8jiODkJs8,986 -apprise/assets/NotifyXML-1.1.xsd,sha256=bjR3CGG4AEXoJjYkGCbDttKHSkPP1FlIWO02E7G59g4,1758 -apprise/assets/themes/default/apprise-failure-128x128.ico,sha256=Mt0ptfHJaN3Wsv5UCNDn9_3lyEDHxVDv1JdaDEI_xCA,67646 -apprise/assets/themes/default/apprise-failure-128x128.png,sha256=66ps8TDPxVH3g9PlObJqF-0x952CjnqQyN3zvpRcOT8,16135 -apprise/assets/themes/default/apprise-failure-256x256.png,sha256=bQBsKKCsKfR9EqgYOZrcVcVa5y8qG58PN2mEqO5eNRI,41931 -apprise/assets/themes/default/apprise-failure-32x32.png,sha256=vH0pZffIDCvkejpr3fJHGXW__8Yc3R_p0bacX6t6l18,2437 -apprise/assets/themes/default/apprise-failure-72x72.png,sha256=EP5A8DHRDr9srgupFSwOoyQ308bNJ8aL192J_L4K-ec,7600 -apprise/assets/themes/default/apprise-info-128x128.ico,sha256=F5_CirmXueRCRI5Z_Crf6TS6jVIXTJlRD83zw1oJ66g,67646 -apprise/assets/themes/default/apprise-info-128x128.png,sha256=bBqRZAgQey-gkmJrnFhPbzjILSrljE59mRkgj3raMQo,16671 -apprise/assets/themes/default/apprise-info-256x256.png,sha256=B5r_O4d9MHCmSWZwfbqQgZSp-ZetTdiBSwKcMTF1aFA,43331 -apprise/assets/themes/default/apprise-info-32x32.png,sha256=lt3NZ95TzkiCNVNlurrB2fE2nriMa1wftl7nrNXmb6c,2485 -apprise/assets/themes/default/apprise-info-72x72.png,sha256=kDnsZpqNUZGqs9t1ECUup7FOfXUIL-rupnQCYJp9So4,7875 -apprise/assets/themes/default/apprise-logo.png,sha256=85ttALudKkLmiqilJT7mUQLUXRFmM1AK89rnwLm313s,160907 -apprise/assets/themes/default/apprise-success-128x128.ico,sha256=uCopPwdQjxgfohKazHaDzYs9y4oiaOpL048PYC6WRlg,67646 -apprise/assets/themes/default/apprise-success-128x128.png,sha256=nvDuU_QqhGlw6cMtdj7Mv-gPgqCEx-0DaaXn1KBLVYg,17446 -apprise/assets/themes/default/apprise-success-256x256.png,sha256=vXfKuxY3n0eeXHKdb9hTxICxOEn7HjAQ4IZpX0HSLzc,48729 -apprise/assets/themes/default/apprise-success-32x32.png,sha256=Jg9pFJh3YPI-LiPBebyJ7Z4Vt7BRecaE8AsRjQVIkME,2471 -apprise/assets/themes/default/apprise-success-72x72.png,sha256=FQbgvIhqKOhEK0yvrhaSpai0R7hrkTt_-GaC2KUgCCk,7858 -apprise/assets/themes/default/apprise-warning-128x128.ico,sha256=6XaQPOx0oWK_xbhr4Yhb7qNazCWwSs9lk2SYR2MHTrQ,67646 -apprise/assets/themes/default/apprise-warning-128x128.png,sha256=pf5c4Ph7jWH7gf39dJoieSj8TzAsY3TXI-sGISGVIW4,16784 -apprise/assets/themes/default/apprise-warning-256x256.png,sha256=SY-xlaiXaj420iEYKC2_fJxU-yj2SuaQg6xfPNi83bw,43708 -apprise/assets/themes/default/apprise-warning-32x32.png,sha256=97R2ywNvcwczhBoWEIgajVtWjgT8fLs4FCCz4wu0dwc,2472 -apprise/assets/themes/default/apprise-warning-72x72.png,sha256=L8moEInkO_OLxoOcuvN7rmrGZo64iJeH20o-24MQghE,7913 -apprise/attachment/__init__.py,sha256=5g3VxLUO59xulyvRaTwztWWzold525uH3tEbEnb18go,1678 -apprise/attachment/base.py,sha256=ZDOcpORGcMEBlWuto98tIjPqJU74frlKiWQXB1GHJTI,16509 -apprise/attachment/base.pyi,sha256=w0XG_QKauiMLJ7eQ4S57IiLIURZHm_Snw7l6-ih9GP8,961 -apprise/attachment/file.py,sha256=dxgCKdIqW3qf0Ioe2rNcDEZxx7MGduTt0qDLIpORZ4A,4976 -apprise/attachment/http.py,sha256=MHjzgfyH-LSxuGennIh3H2vj7te_yqJfSg4-7jmpnqQ,13758 -apprise/attachment/memory.py,sha256=6z8ZqLlpQOm7hyZjNHv_ydAAGIdPhvJ9fnC3JQYS8Gw,6999 -apprise/cli.py,sha256=Hd5pndYCfGC0UzFqLQp6yO9UZdd2l2ehe2LZxqMjZng,35327 -apprise/common.py,sha256=Xdd6qyGQhnpXN_JunCTMRzV1iOQbXOFE7XhIVuasfCc,6524 -apprise/common.pyi,sha256=luF3QRiClDCk8Z23rI6FCGYsVmodOt_JYfYyzGogdNM,447 -apprise/config/__init__.py,sha256=oFL3nzr4dD4z7L_3n7_WTrc5avmaKfCsWq2zqCR8f2g,1679 -apprise/config/base.py,sha256=inMITk2xSHRzduIY63G9qfgwFAUHUXkbW1DfKaLEp_g,53129 -apprise/config/base.pyi,sha256=cngfobwH6v2vxYbQrObDi5Z-t5wcquWF-wR0kBCr3Eg,54 -apprise/config/file.py,sha256=kanHNPbCuOgEVnXn-Hl4MOA4356o7oDlF35VUl5zUsc,6245 -apprise/config/http.py,sha256=pgJyVkWA3y19eGGWRuWdYszF5iFEc8ighw4HJ8I6ohA,9440 -apprise/config/memory.py,sha256=x9Orap8oe1lp2LWqvHPQnEzmdkC7teYEXI6XfO896lQ,2816 -apprise/conversion.py,sha256=jL08RvFkWXJ_9zMR_tpMSJYWaiGQbZA7ybMp5ElLwZw,6350 -apprise/decorators/__init__.py,sha256=1S2yIQDvI6WWG7VQiqEQbWKfu-olntsFAjPYhHkqAqk,1487 -apprise/decorators/base.py,sha256=qYw-QbvxwOKoOuutdn-b_jfy4uONuCFwUywbkhY2NEs,8084 -apprise/decorators/notify.py,sha256=UIZOPjcPpIP3A1PVylyj_HdPAY6AuNW-UgXjvpJ2Mg8,5097 -apprise/emojis.py,sha256=i3Tkj_FQKRgStZKBpt8xMkitTPwJBYjeGY9OXqH2an8,87738 -apprise/exception.py,sha256=A41ILE-cy6hhF5V1LoxKvSwu-fV2H3CuBO1T1JWe4so,2337 -apprise/i18n/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -apprise/i18n/en/LC_MESSAGES/apprise.mo,sha256=aOLjg1I4K-9KQEqyFhs7wW13jUPjxY4X7EN2XvYR8Qw,3959 -apprise/locale.py,sha256=cDboaRDd011-7Wtd1D3y8wEWNCcz5IvFr1FSe-FHGAE,8994 -apprise/logger.py,sha256=aQM5_IABNvLieni87vzmpzbSU0MHcZUh9dbk0yURjY4,6921 -apprise/manager.py,sha256=HiKToGceJZWGwidUrgYjFSehhb-JpBkHLvpuTZfMxNA,27352 -apprise/manager_attachment.py,sha256=9VlNSuxT9dl773Sa8ESx-9owY4R7EI7hYRICfzvdkhA,2200 -apprise/manager_config.py,sha256=GOl781nYG3P6AgcEDAbge3mR4KFz06z-1OKyyglL5Og,2205 -apprise/manager_plugins.py,sha256=yh1CEm8KkdBjFMqzfJbEL9BkN3vSb14iNZHCvRf8LLw,2211 -apprise/persistent_store.py,sha256=nYRa1ajYVvwTpoPKHYhvxEog6iB4gOZgaBQVLjy3H_I,56446 -apprise/plugins/__init__.py,sha256=TMYQsbofZ5S4rRWYNfE86uMNiGIzpWw-jC7e6JqECMg,18695 -apprise/plugins/africas_talking.py,sha256=jTlKxm9YQhzvd0PigQv6Qs9_aAvZ3W-ECKPf_w7X2Vc,16028 -apprise/plugins/apprise_api.py,sha256=QFjg0_Gev3OMXfhoZJdPKyrlBzeyWjlEvezpL4ZdY44,16802 -apprise/plugins/aprs.py,sha256=G7_1gaUW9SKDID1Vv2Y4b5BUu1lhWh-18m8AhzNeKL4,25779 -apprise/plugins/bark.py,sha256=4V4qBjJRFczDJTuG2R1phGjnnUNfbO2cWx2YpZX5eiI,17079 -apprise/plugins/base.py,sha256=zdRrzKIfpe0zTDVv5rw6Q4KBnIhkidnjhacu86Ntgp8,32784 -apprise/plugins/base.pyi,sha256=aKlZXRYUgG8lz_ZgGkYYJ_GKhuf18youTmMU-FlG7z8,21 -apprise/plugins/bulksms.py,sha256=ydVHseuxMcsylatrM0zYJQDmEmaO4zK3mJyKm5gHOcM,16346 -apprise/plugins/bulkvs.py,sha256=kc6AWcc8MTeFmXZQtYkL4qlL8sfsoYQLev1SqHhRYys,13549 -apprise/plugins/burstsms.py,sha256=VjD0VFNpO6DXqs8yJQ7Ft3TC7ucXageHFEYCqTMszGY,15797 -apprise/plugins/chanify.py,sha256=MHhBTh5X0gwsdbEhQX-rp1A-jPrpeGjt5t-0d6cxm8I,6930 -apprise/plugins/clicksend.py,sha256=5oi1WMGfzhDm823heb9M-0kWr_4fZkp37s6YJVfWHdU,11671 -apprise/plugins/custom_form.py,sha256=V4sKjLZKGzCda9lC48GXO3eIFy1XQebKa5Xulq6txT0,18443 -apprise/plugins/custom_json.py,sha256=kQ5e2tDDGBUC_3sI2lW93pavK5c0alh89PEw6cg9KSU,14312 -apprise/plugins/custom_xml.py,sha256=1HXKLtTX4DrOvcV3AIKiQixYoWatysfo4KXZMKK6YjY,17569 -apprise/plugins/d7networks.py,sha256=ys7JZBdcqbNfkHJwGY_On3CyMHaMh3PsC6G15wd3ELo,15267 -apprise/plugins/dapnet.py,sha256=VLwR0XtR9Sj8kX0C9e4lBKpKrR7O4dSCTXtWJe4QnFw,13858 -apprise/plugins/dbus.py,sha256=6XIVCzfbVbakQNXwXFi--meDviTtNB9Vm5Ly7B03DV0,14491 -apprise/plugins/dingtalk.py,sha256=0OPh1aa5vdmQEIjON85BDpItavR2eLu6ugR0eeNzH38,12273 -apprise/plugins/discord.py,sha256=gi5COi8tE4ao4JZEwjodTnL-f1FlYQvR4fSLdJPKLic,26702 -apprise/plugins/email/__init__.py,sha256=Iajljg0rqGJMiWWHHVXJVm91glLeQEtBXQniE4xGcP0,2101 -apprise/plugins/email/base.py,sha256=44eo1Te7-Li4VgcNhEyzMLXGHITrYU2xBuZ3NEf23bw,38373 -apprise/plugins/email/common.py,sha256=UIg9ce2-CMj9SrI2HBBIbrWyWZNMUzmzsfOd1QclT-E,2569 -apprise/plugins/email/templates.py,sha256=DE_XonwemouEl-1qoUlXn4vCBTdZ2M5K9xnndMsw9JE,9376 -apprise/plugins/emby.py,sha256=iq2EGPoaxZv6OTHiRe8ATlrSmJTTzrIAFS_G5fM_nQc,24427 -apprise/plugins/enigma2.py,sha256=8lhjMJpP692nguMPxcxqwVE9bVJsOQOoE9IrWGd7JeE,11931 -apprise/plugins/fcm/__init__.py,sha256=yvJyuNa7p9q476cwR5GZwz0M5OO2pgmdSRoadUoUoOI,21936 -apprise/plugins/fcm/color.py,sha256=ALQCrxBo9LxvDboS6uOqmM6KyBHOqnHs8qGK__aEYK4,4591 -apprise/plugins/fcm/common.py,sha256=Dj0pMxMrmO0LUz8uHefDthJdYAIqO2hTRc6fefPwqiI,1718 -apprise/plugins/fcm/oauth.py,sha256=t21Dfbzi6bx1J-F4oqkXRrfVS-fW3iHrqwAN9sEPNN8,11197 -apprise/plugins/fcm/priority.py,sha256=eJa-3DSjfLuKJ7N0gJZ34QVRLB9hT_EqOE9yaplPcOA,8163 -apprise/plugins/feishu.py,sha256=c6u4jnJSkNckZzx0yngobpb36viLFAvjUZAW7RTXkcY,7540 -apprise/plugins/flock.py,sha256=fnXvAaEGo-bO3kdZJaEjeMJ0b-ROZH8IpYl4hXfbZBo,12984 -apprise/plugins/freemobile.py,sha256=wOwYVihSfK0OxuuEFsS2VaRM-CFytU7W42-7nM1LyRI,6974 -apprise/plugins/gnome.py,sha256=pey5eXFtFKzXtIE90jWT9SzYZtYNLAXYYXmJKZZnmbc,9227 -apprise/plugins/google_chat.py,sha256=-S1AyzjoOJNdqsu_KiLKOTnatT5nA8KOE530WLGEPg0,12973 -apprise/plugins/gotify.py,sha256=Rit01QDkyldDpGQEMlP-WmsK6xUbcLFxvW9uQi1rw04,11028 -apprise/plugins/growl.py,sha256=HzqSnp8z-n36uNiMnhKf4Ej91DLfJGyXNtOWYzfJH-Q,14446 -apprise/plugins/guilded.py,sha256=M_tUu56FniUx87SnaT4nU7UqbcwofTwy7p5n2r6FRnU,3707 -apprise/plugins/home_assistant.py,sha256=4v53BkIqh389bJEGh_3eMz_tK64HkzmBPiCwqgohnKA,11276 -apprise/plugins/httpsms.py,sha256=ILqURAj_nMIkvP359U25M1u5lhJ2iqnrDKjC_FvcB6U,11386 -apprise/plugins/ifttt.py,sha256=WPiiOGLPMwW8O88A9y2s7dnk06enC9_Z7HMIrOc0hZI,13685 -apprise/plugins/join.py,sha256=Tt7nNysedcEjNXZ7tX9XInoo0aVW0qY_e9JZtAtnWBs,13818 -apprise/plugins/kavenegar.py,sha256=mK9NXlrO1MzzKb3gNSWyPregtb1BXdx5RVapMgmlr2w,12870 -apprise/plugins/kumulos.py,sha256=PWLq75hYnoRHLIEZSDsVpgHv5PJmtVy3flJ599EYubI,8505 -apprise/plugins/lametric.py,sha256=exYazJnlzRbdy1SXceHKRUfF1PJno1EOBgkAC9vfhYY,38449 -apprise/plugins/line.py,sha256=js-B-tlXNacF_jfrZT3k8Sr6nWom-GldGjymFTB2oJ8,10908 -apprise/plugins/lunasea.py,sha256=vpD6x5Hc_0Ix5q8mIZtimZWKlONpgg1aAJYrQOoy-dk,15089 -apprise/plugins/macosx.py,sha256=vkpdYUehyqfBn2kWm0qnYzhfDEMgh19SyYof8A6Y8AU,8439 -apprise/plugins/mailgun.py,sha256=4FqH2_Y6VluXkwss3EHgtBBOmVjX0drGhN6g1reU6Os,25868 -apprise/plugins/mastodon.py,sha256=iXtZ3UdPFuMKy3O-sxWRfDqTetdacAhGnz4RAUgbJj4,35610 -apprise/plugins/matrix.py,sha256=OcS0r-PQNlWd8bR5uWeb-9COJAxf0rKqeoZfrTvFM50,63276 -apprise/plugins/mattermost.py,sha256=fHfSxf0qyFZY5M4TUcFL2n-NDs_FbW63jNrSSGK4uk0,14813 -apprise/plugins/messagebird.py,sha256=d6KseASKCkQesMtrSZDp9RnMpPwFjQTq3_woDpcsIUM,12498 -apprise/plugins/misskey.py,sha256=_loWj8kKIPsf9X52Zb7rYx7DDt-rZ1ii5DCQyPfPrLY,9909 -apprise/plugins/mqtt.py,sha256=AaV700N3rHHQ7uYbz_Veao7qxgXsuOMGEDu7AaiXZ6E,20630 -apprise/plugins/msg91.py,sha256=G6ne6G5Rlw-Q7rUQf80nQ0wbdgJ-hdU1-p5SL_XHijg,12937 -apprise/plugins/msteams.py,sha256=sTeLbj2q7OdHTJJTKZFQtu436NeuAlKLJ8zQwW43K5I,26653 -apprise/plugins/nextcloud.py,sha256=lrk1zhfcf-y7VllhNbBvSEmuyoKJ6wh3Q4bzfKRoF3w,13134 -apprise/plugins/nextcloudtalk.py,sha256=xlqvWvYtPgbRQsabkKhuM_xNhfbyvkYXySiD8El0JNk,11386 -apprise/plugins/notica.py,sha256=f4tN9KbErS1S1g5kGBQ2JocliP3qqmbyNYD3vG2keC8,13400 -apprise/plugins/notifiarr.py,sha256=6Y0lhM5sLLDCS7O3Wok6nTNbBDuCduRIDmhqVG8gCRI,15516 -apprise/plugins/notifico.py,sha256=4YKZJE5_JL0prsOHRdLkICUOQ62SSuq-jmYEpSy8WKk,12309 -apprise/plugins/ntfy.py,sha256=hbWADksgJG0GkU8zeIuTbELKX-KwhrdgwLBc97svfOY,30163 -apprise/plugins/office365.py,sha256=zlwaVBGetYvhwRCqtAUOe1qB1EoGxfTet3wFk6oYGQs,36877 -apprise/plugins/one_signal.py,sha256=CQ99QuB35UQ1yBFpGw2IcBljxh8mO0f-ZPmk-X3eMqE,22555 -apprise/plugins/opsgenie.py,sha256=zHRVWV2diEQRyH-2rDi6RSo1DjeyQns27xrRdU1fIVk,28428 -apprise/plugins/pagerduty.py,sha256=oAHVdBlZJ_dyCvwjjOIBuDLBWDFTkRM_8osnVslkA3c,18196 -apprise/plugins/pagertree.py,sha256=MJS6BeJaV3k-9w9BdZKFlcKXUHRRUQHfPvo4CubMySE,14110 -apprise/plugins/parseplatform.py,sha256=G_lL3M0dZpsjXpQ8DmIN2KktulSYYSm99Ijwk88rdFs,10709 -apprise/plugins/plivo.py,sha256=wYDlwG6XstiDO82N8uRLdAUGiq_hj0_q3Vle4g1NjyY,13840 -apprise/plugins/popcorn_notify.py,sha256=aI7ad-tPogvfxE8fvs9hU7aDEsLo7pPHzlg-GDu6vG4,10793 -apprise/plugins/prowl.py,sha256=5XH3Usgm-Uv-0yGmd4_J4h7LanhFZdRfHs-6c7pya_s,10075 -apprise/plugins/pushbullet.py,sha256=tEEQCBmuDYSw3fdqb6jcie-Du8hS5PnggehKO_Cb70U,15695 -apprise/plugins/pushdeer.py,sha256=xgmOIYcSbSfh1GKnFtZQW9f0XiurUg7X_gUwZLSV4zo,7290 -apprise/plugins/pushed.py,sha256=5snmqDvqoqUW5tSmbgEJMbrlcsafnaz1bYYGC8jscLU,12540 -apprise/plugins/pushjet.py,sha256=-p0i-f3zqoTVeluuu78DldzocCRl2F2DM-4ht5dHuCQ,9344 -apprise/plugins/pushme.py,sha256=1BF_RVPruaGAnQuEEEVYLke8sgMGljCwdWEspm7yE8M,7389 -apprise/plugins/pushover.py,sha256=9jzq2p0juF_GLg8vRS5bquoegqxhRbDLsotRh9fIwDM,21477 -apprise/plugins/pushsafer.py,sha256=IHu2JUpxTkpaOgvk3JhmODR-WB7nXOx7iL3t9doGRPw,27166 -apprise/plugins/pushy.py,sha256=HYTQRhvholpy6B0o9qY4F-AwDlCZ-UGN9A-z2HZMQco,12752 -apprise/plugins/reddit.py,sha256=agf0UZNPygPh5PsF6PvCwwYXu2KjFajRnomiGEa1Xuk,26102 -apprise/plugins/revolt.py,sha256=89tjfWBz27KVs63xbxmfSeJ7zc3fNUp-FROTC1PVX-4,14759 -apprise/plugins/rocketchat.py,sha256=jDgyRuaOb0QV9r8XLZK1i8jzjGIxLs--ygK7fXy4yDo,26293 -apprise/plugins/rsyslog.py,sha256=_dx033ZPJYsfbDfArDAiQwDpH78Wy7iUA2_y0JBXNeQ,12364 -apprise/plugins/ryver.py,sha256=z1-NpFE4EHKrBwdpe65dSOrI3WUSWgL4rwqu8sfAPDw,12097 -apprise/plugins/sendgrid.py,sha256=AVzVthR6OW9Qf-476Xtj59b2KESrxRdCsEamgaLOvKQ,18087 -apprise/plugins/serverchan.py,sha256=pWcw1ChbsLDHRjaDRGguSSMwFGj5-WnNWqRJVGcLUQU,6054 -apprise/plugins/ses.py,sha256=xDDN0vcgMlQ_XPVMcRMC_by6bzPY3TOj3Yz0ajHyK9I,34032 -apprise/plugins/seven.py,sha256=VxSHLS1LITYfblm2qDEyjPothgIzrgVvs18JDUSI5XE,10422 -apprise/plugins/sfr.py,sha256=pYi4UkIeXjAGPoW8Tevv5IspO7hSmRP0te9O_1y4nXE,15192 -apprise/plugins/signal_api.py,sha256=LgkwVA07_juOk-b6XPWxPOCwZjEovGFgcLbfZ-rdowM,17061 -apprise/plugins/simplepush.py,sha256=rsl9B-dfVIZ4sOLHJTbSoja6u1etRuiwRqrVvqFR4EE,12049 -apprise/plugins/sinch.py,sha256=ogAlYlig5NGsiLPVrwBwCg3ZOf_ATa3B0aDiNxayBS0,17153 -apprise/plugins/slack.py,sha256=_7x3Djr5T7P76HSEjhuDZR6FChfMo1FKgts95f9eMTY,44304 -apprise/plugins/smseagle.py,sha256=f7nPIIBZA6DA9eYWAVZXy6-KAQMMkPfG7XP0vY3jqoY,24483 -apprise/plugins/smsmanager.py,sha256=vDL1H7rGbWWTap-7klWjcDesAf7J2CkmI2H4CiErD6Q,14353 -apprise/plugins/smtp2go.py,sha256=G4V6DscnL2ePefgRNnDZvCYBAbnE_8TTeOPP8NDV7bc,19984 -apprise/plugins/sns.py,sha256=OL-NA_Edx_0_dq78jV_29P_iHvKb2sTZ4X7jTxw730Q,24488 -apprise/plugins/sparkpost.py,sha256=CQoMAslrPJVa6Bmx_boWKAbGpH1MfsKzcZA6duRhN_U,28113 -apprise/plugins/splunk.py,sha256=HbbW0TXp7Gne9JGkylZE7Sw15nAk0mR3rLEhnNJikMc,16607 -apprise/plugins/streamlabs.py,sha256=c9YoB9t_gzKW4UfP3aSZsCvsEvxF7j9ybSiwdvPCfCo,16306 -apprise/plugins/synology.py,sha256=xfRY9ecjgENX0oZxg79_EeeXLnmcjJyhjAY2NcCGmVU,11525 -apprise/plugins/syslog.py,sha256=WNCwRJKBpTrky130EwcTzoIuzMMG5lPYxb3qCSGmDCY,10785 -apprise/plugins/techuluspush.py,sha256=gpsFzuQcuexSGpV9FZAO2s9JRDo8OXxvZkbeX1EvZvE,7528 -apprise/plugins/telegram.py,sha256=wf0W45ohxpdbC2-jv7fDMr02R6OD4B7ygPMiD7VeTOs,38487 -apprise/plugins/threema.py,sha256=n-gOBIpUCVZnICL8CtWwLhKlWDsTMZt8-fL8VlRUzcg,12110 -apprise/plugins/twilio.py,sha256=tLb6prAJbStWafIOxCMNZc6ere2f6cypXcusVPn_5Pc,18468 -apprise/plugins/twist.py,sha256=M4pGZcbE_DpblZG_vhCZD48YkCWdV5O0nv2fZPKRiwg,29197 -apprise/plugins/twitter.py,sha256=QtP2PMZPyBSFzUupq5Egsh0VCopvVson8CAGMl6dALk,30612 -apprise/plugins/voipms.py,sha256=N6fdKjiAXox2R_y0eDEn2BZ72jxW1i3U4yzUFgZcQQQ,13114 -apprise/plugins/vonage.py,sha256=VmA78rJEPdeFpZ5xwalK7ynH9I1CGTqr3D7nhV4e9d4,13680 -apprise/plugins/webexteams.py,sha256=k5vlb4jvI3_D2xty03W1rQhgotzW_gy6XZPfIKfa2tw,9441 -apprise/plugins/wecombot.py,sha256=9oc2KXuhLHUknmckkQpXl2HKrEEwWVf0Gr23FSDpezM,9061 -apprise/plugins/whatsapp.py,sha256=JA7hZsnEcN7azUq_mTQwRdGp5F508RhxMH6BT49Vz6g,20180 -apprise/plugins/windows.py,sha256=stTRSJnowyJGW3ALg3NucOES2iZzeBoCB9a6344ev0M,8731 -apprise/plugins/workflows.py,sha256=tM11grGORuAfdjcp_Zw3_cfUTh_1kjdJcjry81UEc3c,19850 -apprise/plugins/wxpusher.py,sha256=4sJjz8AzhQfLh7wutctG1oiBpVSI3JwH9-g3oq3D2t0,12688 -apprise/plugins/xbmc.py,sha256=yoLCiSYk9MQ5q4rNliMRYb8zcNrwF5bsI4jCP_rvPso,12742 -apprise/plugins/zulip.py,sha256=SPD-DUA5cU1QGGnkrqesW732hisTjnAZQp1V_1vJhgE,14360 -apprise/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -apprise/url.py,sha256=lOo1pjN33LKIWMiubw0wClWmstNj9fl5NOctCAp9HJk,35026 -apprise/url.pyi,sha256=WLaRREH7FzZ5x3-qkDkupojWGFC4uFwJ1EDt02lVs8c,520 -apprise/utils/__init__.py,sha256=VNCibX3QQp5esDkefhDCEDjA2Ajx0mWt-lWXf0yElqc,1430 -apprise/utils/base64.py,sha256=AVY45VxRMr-RnYQJXSbjWhzpiVSTv8qJ2VXZPHwaZPM,3167 -apprise/utils/cwe312.py,sha256=dcFsa86lvPfxOQIlcx1C_b0IYNjLEWQ_CGSh_wcuZZE,7375 -apprise/utils/disk.py,sha256=EdWn6awOYhl7dej8T5xw8ZrnRRn7IcyOu7AYqog53Kw,5801 -apprise/utils/logic.py,sha256=ZGYBpOQwSOY6veaEtx4ADcpmGG_wlZCtO7vjBgKN-jo,4573 -apprise/utils/module.py,sha256=bJuSGXdbydoBkSslFjE1SP9la6lVJ4tBSsNU5gLhXgo,2117 -apprise/utils/parse.py,sha256=dUcgRJrmhfcnnNGLN2bubqDgH_vQMFqlhoHWYgOgUts,39574 -apprise/utils/pgp.py,sha256=gJO4d7CDzL8URjwwHGXoNBY9MRFz5F38p_KokB8Zbrk,11489 -apprise/utils/singleton.py,sha256=lh3pt2NnnnGMPtX1Por3Cu7kSl1Gv6K1NzNpYGCrC5A,1844 -apprise/utils/templates.py,sha256=xh2le2hsAjs7u0DtpR60XHVqMETb09ElOKs2bzIAwlk,3252 diff --git a/libs/apprise-1.9.2.dist-info/WHEEL b/libs/apprise-1.9.2.dist-info/WHEEL deleted file mode 100644 index ba48cbcf92..0000000000 --- a/libs/apprise-1.9.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.3) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/libs/Mako-1.3.8.dist-info/INSTALLER b/libs/apprise-1.9.8.dist-info/INSTALLER similarity index 100% rename from libs/Mako-1.3.8.dist-info/INSTALLER rename to libs/apprise-1.9.8.dist-info/INSTALLER diff --git a/libs/apprise-1.9.8.dist-info/METADATA b/libs/apprise-1.9.8.dist-info/METADATA new file mode 100644 index 0000000000..f0197b00fc --- /dev/null +++ b/libs/apprise-1.9.8.dist-info/METADATA @@ -0,0 +1,787 @@ +Metadata-Version: 2.4 +Name: apprise +Version: 1.9.8 +Summary: Push Notifications that work with just about every platform! +Author-email: Chris Caron +License: BSD-2-Clause +Project-URL: Homepage, https://appriseit.com +Project-URL: Source, https://github.com/caronc/apprise +Project-URL: Tracker, https://github.com/caronc/apprise/issues +Project-URL: Documentation, https://appriseit.com +Keywords: 46elks,Africas Talking,Alerts,Apprise API,Automated Packet Reporting System,AWS,Bark,BlueSky,Brevo,BulkSMS,BulkVS,Burst SMS,Chanify,Chat,CLI,Clickatell,ClickSend,D7Networks,Dapnet,DBus,DingTalk,Discord,Dot,Email,Emby,Enigma2,FCM,Feishu,Flock,Fluxer,Form,Free Mobile,Gnome,Google Chat,Gotify,Growl,Guilded,Home Assistant,httpSMS,IFTTT,IRC,Jellyfin,Join,JSON,Kavenegar,KODI,Kumulos,LaMetric,Lark,Line,MacOSX,Mailgun,Mastodon,Matrix,Mattermost,MessageBird,Microsoft,Misskey,MQTT,MSG91,MSTeams,Nextcloud,NextcloudTalk,Notica,NotificationAPI,Notifiarr,Notifico,Ntfy,Office365,OneSignal,Opsgenie,PagerDuty,PagerTree,ParsePlatform,Plivo,PopcornNotify,Power Automate,Prowl,Push Notifications,PushBullet,PushDeer,Pushed,Pushjet,PushMe,Pushover,Pushplus,PushSafer,Pushy,QQ Push,Quote/0,Reddit,Resend,Revolt,Rocket.Chat,RSyslog,Ryver,SendGrid,SendPulse,ServerChan,SES,Seven,SFR,Signal,SIGNL4,SimplePush,Sinch,Slack,SMPP,SMS Manager,SMSEagle,SMTP2Go,SNS,SparkPost,Spike,Splunk,SpugPush,Streamlabs,Stride,Synology Chat,Syslog,Techulus,Telegram,Threema Gateway,Twilio,Twist,Twitter,Vapid,Viber,VictorOps,Voipms,Vonage,Webex,Webpush,WeCom Bot,WhatsApp,Windows,Workflows,WxPusher,XBMC,XML,XMPP,Zulip +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: Operating System :: OS Independent +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Requires-Python: >=3.9 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: requests +Requires-Dist: requests-oauthlib +Requires-Dist: click>=5.0 +Requires-Dist: markdown +Requires-Dist: PyYAML +Requires-Dist: certifi +Requires-Dist: tzdata; platform_system == "Windows" +Provides-Extra: dev +Requires-Dist: coverage; extra == "dev" +Requires-Dist: mock; extra == "dev" +Requires-Dist: tox; extra == "dev" +Requires-Dist: pytest; extra == "dev" +Requires-Dist: pytest-cov; extra == "dev" +Requires-Dist: pytest-mock; extra == "dev" +Requires-Dist: ruff; extra == "dev" +Requires-Dist: babel; extra == "dev" +Requires-Dist: validate-pyproject; extra == "dev" +Provides-Extra: all-plugins +Requires-Dist: cryptography; extra == "all-plugins" +Requires-Dist: gntp; extra == "all-plugins" +Requires-Dist: paho-mqtt!=2.0.*; extra == "all-plugins" +Requires-Dist: PGPy; extra == "all-plugins" +Requires-Dist: smpplib; extra == "all-plugins" +Requires-Dist: slixmpp>=1.10.0; extra == "all-plugins" +Provides-Extra: windows +Requires-Dist: pywin32; extra == "windows" +Requires-Dist: tzdata; extra == "windows" +Dynamic: license-file + +![Apprise Logo](https://raw.githubusercontent.com/caronc/apprise/master/apprise/assets/themes/default/apprise-logo.png) + +
+ +**ap·prise** / *verb*
+To inform or tell (someone). To make one aware of something. +
+ +*Apprise* allows you to send a notification to *almost* all of the most popular *notification* services available to us today such as: Telegram, Discord, Slack, Amazon SNS, Gotify, etc. + +* One notification library to rule them all. +* A common and intuitive notification syntax. +* Supports the handling of images and attachments (_to the notification services that will accept them_). +* It's incredibly lightweight. +* Amazing response times because all messages sent asynchronously. + +Developers who wish to provide a notification service no longer need to research each and every one out there. They no longer need to try to adapt to the new ones that comeout thereafter. They just need to include this one library and then they can immediately gain access to almost all of the notifications services available to us today. + +System Administrators and DevOps who wish to send a notification now no longer need to find the right tool for the job. Everything is already wrapped and supported within the `apprise` command line tool (CLI) that ships with this product. + +[![Paypal](https://img.shields.io/badge/paypal-donate-green.svg)](https://www.paypal.com/donate/?hosted_button_id=CR6YF7KLQWQ5E) +[![Follow](https://img.shields.io/twitter/follow/l2gnux)](https://twitter.com/l2gnux/)
+[![Discord](https://img.shields.io/discord/558793703356104724.svg?colorB=7289DA&label=Discord&logo=Discord&logoColor=7289DA&style=flat-square)](https://discord.gg/MMPeN2D) +[![Python](https://img.shields.io/pypi/pyversions/apprise.svg?style=flat-square)](https://pypi.org/project/apprise/) +[![Build Status](https://github.com/caronc/apprise/actions/workflows/tests.yml/badge.svg)](https://github.com/caronc/apprise/actions/workflows/tests.yml) +[![Lines of Code](https://raw.githubusercontent.com/caronc/apprise/master/.github/badges/loc.svg)](https://github.com/caronc/apprise/actions/workflows/loc-badge.yml) +[![CodeCov Status](https://codecov.io/github/caronc/apprise/branch/master/graph/badge.svg)](https://codecov.io/github/caronc/apprise) +[![PyPi Downloads](https://img.shields.io/pepy/dt/apprise.svg?style=flat-square)](https://pypi.org/project/apprise/) + +# Table of Contents + +* [Supported Notifications](#supported-notifications) + * [Productivity Based Notifications](#productivity-based-notifications) + * [SMS Notifications](#sms-notifications) + * [Desktop Notifications](#desktop-notifications) + * [Email Notifications](#email-notifications) + * [Custom Notifications](#custom-notifications) +* [Installation](#installation) +* [Command Line Usage](#command-line-usage) + * [Configuration Files](#cli-configuration-files) + * [File Attachments](#cli-file-attachments) + * [Loading Custom Notifications/Hooks](#cli-loading-custom-notificationshooks) + * [Environment Variables](#cli-environment-variables) +* [Developer API Usage](#developer-api-usage) + * [Configuration Files](#api-configuration-files) + * [File Attachments](#api-file-attachments) + * [Loading Custom Notifications/Hooks](#api-loading-custom-notificationshooks) +* [Persistent Storage](#persistent-storage) +* [More Supported Links and Documentation](#want-to-learn-more) + + +Visit the [Official Documentation](https://appriseit.com/getting-started/) site for more information on Apprise. + +# Supported Notifications + +The section identifies all of the services supported by this library. [Check out the wiki for more information on the supported modules here](https://appriseit.com/). + +## Productivity Based Notifications + +The table below identifies the services this tool supports and some example service urls you need to use in order to take advantage of it. Click on any of the services listed below to get more details on how you can configure Apprise to access them. + +| Notification Service | Service ID | Default Port | Example Syntax | +| -------------------- | ---------- | ------------ | -------------- | +| [Apprise API](https://appriseit.com/services/apprise_api/) | apprise:// or apprises:// | (TCP) 80 or 443 | apprise://hostname/Token +| [AWS SES](https://appriseit.com/services/ses/) | ses:// | (TCP) 443 | ses://user@domain/AccessKeyID/AccessSecretKey/RegionName
ses://user@domain/AccessKeyID/AccessSecretKey/RegionName/email1/email2/emailN +| [Bark](https://appriseit.com/services/bark/) | bark:// | (TCP) 80 or 443 | bark://hostname
bark://hostname/device_key
bark://hostname/device_key1/device_key2/device_keyN
barks://hostname
barks://hostname/device_key
barks://hostname/device_key1/device_key2/device_keyN +| [BlueSky](https://appriseit.com/services/bluesky/) | bluesky:// | (TCP) 443 | bluesky://Handle:AppPw
bluesky://Handle:AppPw/TargetHandle
bluesky://Handle:AppPw/TargetHandle1/TargetHandle2/TargetHandleN +| [Brevo](https://appriseit.com/services/brevo/) | brevo:// | (TCP) 443 | brevo://APIToken:FromEmail/
brevo://APIToken:FromEmail/ToEmail
brevo://APIToken:FromEmail/ToEmail1/ToEmail2/ToEmailN/ +| [Chanify](https://appriseit.com/services/chanify/) | chantify:// | (TCP) 443 | chantify://token +| [Discord](https://appriseit.com/services/discord/) | discord:// | (TCP) 443 | discord://webhook_id/webhook_token
discord://avatar@webhook_id/webhook_token +| [Dot.](https://appriseit.com/services/dot/) | dot:// | (TCP) 443 | dot://apikey@device_id/text/
dot://apikey@device_id/image/
**Note**: `device_id` is the Quote/0 hardware serial +| [Emby](https://appriseit.com/services/emby/) | emby:// or embys:// | (TCP) 8096 | emby://user@hostname/
emby://user:password@hostname +| [Enigma2](https://appriseit.com/services/enigma2/) | enigma2:// or enigma2s:// | (TCP) 80 or 443 | enigma2://hostname +| [FCM](https://appriseit.com/services/fcm/) | fcm:// | (TCP) 443 | fcm://project@apikey/DEVICE_ID
fcm://project@apikey/#TOPIC
fcm://project@apikey/DEVICE_ID1/#topic1/#topic2/DEVICE_ID2/ +| [Feishu](https://appriseit.com/services/feishu/) | feishu:// | (TCP) 443 | feishu://token +| [Flock](https://appriseit.com/services/flock/) | flock:// | (TCP) 443 | flock://token
flock://botname@token
flock://app_token/u:userid
flock://app_token/g:channel_id
flock://app_token/u:userid/g:channel_id +| [Google Chat](https://appriseit.com/services/googlechat/) | gchat:// | (TCP) 443 | gchat://workspace/key/token +| [Gotify](https://appriseit.com/services/gotify/) | gotify:// or gotifys:// | (TCP) 80 or 443 | gotify://hostname/token
gotifys://hostname/token?priority=high +| [Growl](https://appriseit.com/services/growl/) | growl:// | (UDP) 23053 | growl://hostname
growl://hostname:portno
growl://password@hostname
growl://password@hostname:port
**Note**: you can also use the get parameter _version_ which can allow the growl request to behave using the older v1.x protocol. An example would look like: growl://hostname?version=1 +| [Guilded](https://appriseit.com/services/guilded/) | guilded:// | (TCP) 443 | guilded://webhook_id/webhook_token
guilded://avatar@webhook_id/webhook_token +| [Home Assistant](https://appriseit.com/services/homeassistant/) | hassio:// or hassios:// | (TCP) 8123 or 443 | hassio://hostname/accesstoken
hassio://user@hostname/accesstoken
hassio://user:password@hostname:port/accesstoken
hassio://hostname/optional/path/accesstoken +| [IFTTT](https://appriseit.com/services/ifttt/) | ifttt:// | (TCP) 443 | ifttt://webhooksID/Event
ifttt://webhooksID/Event1/Event2/EventN
ifttt://webhooksID/Event1/?+Key=Value
ifttt://webhooksID/Event1/?-Key=value1 +| [IRC](https://appriseit.com/services/irc/) | irc:// or ircs:// | (TCP) 6667 or 6697 | ircs://user:pass@irc.server/@user
ircs://user:pass@irc.server/#channel?join=true&mode=nickserv
ircs://user:pass@znc.server/@user1/@user2/@user3/#channel1 +| [Jellyfin](https://appriseit.com/services/jellyfin/) | jellyfin:// or jellyfins:// | (TCP) 8096 | jellyfin://user@hostname/
jellyfins://user:password@hostname +| [Join](https://appriseit.com/services/join/) | join:// | (TCP) 443 | join://apikey/device
join://apikey/device1/device2/deviceN/
join://apikey/group
join://apikey/groupA/groupB/groupN
join://apikey/DeviceA/groupA/groupN/DeviceN/ +| [KODI](https://appriseit.com/services/kodi/) | kodi:// or kodis:// | (TCP) 8080 or 443 | kodi://hostname
kodi://user@hostname
kodi://user:password@hostname:port +| [Kumulos](https://appriseit.com/services/kumulos/) | kumulos:// | (TCP) 443 | kumulos://apikey/serverkey +| [LaMetric Time](https://appriseit.com/services/lametric/) | lametric:// | (TCP) 443 | lametric://apikey@device_ipaddr
lametric://apikey@hostname:port
lametric://client_id@client_secret +| [Lark](https://appriseit.com/services/lark/) | lark:// | (TCP) 443 | lark://BotToken +| [Line](https://appriseit.com/services/line/) | line:// | (TCP) 443 | line://Token@User
line://Token/User1/User2/UserN +| [Mailgun](https://appriseit.com/services/mailgun/) | mailgun:// | (TCP) 443 | mailgun://user@hostname/apikey
mailgun://user@hostname/apikey/email
mailgun://user@hostname/apikey/email1/email2/emailN
mailgun://user@hostname/apikey/?name="From%20User" +| [Mastodon](https://appriseit.com/services/mastodon/) | mastodon:// or mastodons://| (TCP) 80 or 443 | mastodon://access_key@hostname
mastodon://access_key@hostname/@user
mastodon://access_key@hostname/@user1/@user2/@userN +| [Matrix](https://appriseit.com/services/matrix/) | matrix:// or matrixs:// | (TCP) 80 or 443 | matrix://hostname
matrix://user@hostname
matrixs://user:pass@hostname:port/#room_alias
matrixs://user:pass@hostname:port/!room_id
matrixs://user:pass@hostname:port/#room_alias/!room_id/#room2
matrixs://token@hostname:port/?webhook=matrix
matrix://user:token@hostname/?webhook=slack&format=markdown +| [Mattermost](https://appriseit.com/services/mattermost/) | mmost:// or mmosts:// | (TCP) 8065 | mmost://hostname/authkey
mmost://hostname:80/authkey
mmost://user@hostname:80/authkey
mmost://hostname/authkey?channel=channel
mmosts://hostname/authkey
mmosts://user@hostname/authkey
+| [Microsoft Power Automate / Workflows (MSTeams)](https://appriseit.com/services/workflows/) | workflows:// | (TCP) 443 | workflows://WorkflowID/Signature/ +| [Microsoft Teams](https://appriseit.com/services/msteams/) | msteams:// | (TCP) 443 | msteams://TokenA/TokenB/TokenC/ +| [Misskey](https://appriseit.com/services/misskey/) | misskey:// or misskeys://| (TCP) 80 or 443 | misskey://access_token@hostname +| [MQTT](https://appriseit.com/services/mqtt/) | mqtt:// or mqtts:// | (TCP) 1883 or 8883 | mqtt://hostname/topic
mqtt://user@hostname/topic
mqtts://user:pass@hostname:9883/topic +| [Nextcloud](https://appriseit.com/services/nextcloud/) | ncloud:// or nclouds:// | (TCP) 80 or 443 | ncloud://adminuser:pass@host/User
nclouds://adminuser:pass@host/User1/User2/UserN +| [NextcloudTalk](https://appriseit.com/services/nextcloudtalk/) | nctalk:// or nctalks:// | (TCP) 80 or 443 | nctalk://user:pass@host/RoomId
nctalks://user:pass@host/RoomId1/RoomId2/RoomIdN +| [Notica](https://appriseit.com/services/notica/) | notica:// | (TCP) 443 | notica://Token/ +| [NotificationAPI](https://appriseit.com/services/notificationapi/) | napi:// | (TCP) 443 | napi://ClientID/ClientSecret/Target
napi://ClientID/ClientSecret/Target1/Target2/TargetN
napi://MessageType@ClientID/ClientSecret/Target +| [Notifiarr](https://appriseit.com/services/notifiarr/) | notifiarr:// | (TCP) 443 | notifiarr://apikey/#channel
notifiarr://apikey/#channel1/#channel2/#channeln +| [Notifico](https://appriseit.com/services/notifico/) | notifico:// | (TCP) 443 | notifico://ProjectID/MessageHook/ +| [ntfy](https://appriseit.com/services/ntfy/) | ntfy:// | (TCP) 80 or 443 | ntfy://topic/
ntfys://topic/ +| [Office 365](https://appriseit.com/services/office365/) | o365:// | (TCP) 443 | o365://TenantID:AccountEmail/ClientID/ClientSecret
o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail
o365://TenantID:AccountEmail/ClientID/ClientSecret/TargetEmail1/TargetEmail2/TargetEmailN +| [OneSignal](https://appriseit.com/services/onesignal/) | onesignal:// | (TCP) 443 | onesignal://AppID@APIKey/PlayerID
onesignal://TemplateID:AppID@APIKey/UserID
onesignal://AppID@APIKey/#IncludeSegment
onesignal://AppID@APIKey/Email +| [Opsgenie](https://appriseit.com/services/opsgenie/) | opsgenie:// | (TCP) 443 | opsgenie://APIKey
opsgenie://APIKey/UserID
opsgenie://APIKey/#Team
opsgenie://APIKey/\*Schedule
opsgenie://APIKey/^Escalation +| [PagerDuty](https://appriseit.com/services/pagerduty/) | pagerduty:// | (TCP) 443 | pagerduty://IntegrationKey@ApiKey
pagerduty://IntegrationKey@ApiKey/Source/Component +| [PagerTree](https://appriseit.com/services/pagertree/) | pagertree:// | (TCP) 443 | pagertree://integration_id +| [ParsePlatform](https://appriseit.com/services/parseplatform/) | parsep:// or parseps:// | (TCP) 80 or 443 | parsep://AppID:MasterKey@Hostname
parseps://AppID:MasterKey@Hostname +| [PopcornNotify](https://appriseit.com/services/popcornnotify/) | popcorn:// | (TCP) 443 | popcorn://ApiKey/ToPhoneNo
popcorn://ApiKey/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/
popcorn://ApiKey/ToEmail
popcorn://ApiKey/ToEmail1/ToEmail2/ToEmailN/
popcorn://ApiKey/ToPhoneNo1/ToEmail1/ToPhoneNoN/ToEmailN +| [Prowl](https://appriseit.com/services/prowl/) | prowl:// | (TCP) 443 | prowl://apikey
prowl://apikey/providerkey +| [PushBullet](https://appriseit.com/services/pushbullet/) | pbul:// | (TCP) 443 | pbul://accesstoken
pbul://accesstoken/#channel
pbul://accesstoken/A_DEVICE_ID
pbul://accesstoken/email@address.com
pbul://accesstoken/#channel/#channel2/email@address.net/DEVICE +| [Pushjet](https://appriseit.com/services/pushjet/) | pjet:// or pjets:// | (TCP) 80 or 443 | pjet://hostname/secret
pjet://hostname:port/secret
pjets://secret@hostname/secret
pjets://hostname:port/secret +| [Push (Techulus)](https://appriseit.com/services/techulus/) | push:// | (TCP) 443 | push://apikey/ +| [Pushed](https://appriseit.com/services/pushed/) | pushed:// | (TCP) 443 | pushed://appkey/appsecret/
pushed://appkey/appsecret/#ChannelAlias
pushed://appkey/appsecret/#ChannelAlias1/#ChannelAlias2/#ChannelAliasN
pushed://appkey/appsecret/@UserPushedID
pushed://appkey/appsecret/@UserPushedID1/@UserPushedID2/@UserPushedIDN +| [PushMe](https://appriseit.com/services/pushme/) | pushme:// | (TCP) 443 | pushme://Token/ +| [Pushover](https://appriseit.com/services/pushover/) | pover:// | (TCP) 443 | pover://user@token
pover://user@token/DEVICE
pover://user@token/DEVICE1/DEVICE2/DEVICEN
**Note**: you must specify both your user_id and token +| [Pushplus](https://appriseit.com/services/pushplus/) | pushplus:// | (TCP) 443 | pushplus://Token +| [PushSafer](https://appriseit.com/services/pushsafer/) | psafer:// or psafers:// | (TCP) 80 or 443 | psafer://privatekey
psafers://privatekey/DEVICE
psafer://privatekey/DEVICE1/DEVICE2/DEVICEN +| [Pushy](https://appriseit.com/services/pushy/) | pushy:// | (TCP) 443 | pushy://apikey/DEVICE
pushy://apikey/DEVICE1/DEVICE2/DEVICEN
pushy://apikey/TOPIC
pushy://apikey/TOPIC1/TOPIC2/TOPICN +| [PushDeer](https://appriseit.com/services/pushdeer/) | pushdeer:// or pushdeers:// | (TCP) 80 or 443 | pushdeer://pushKey
pushdeer://hostname/pushKey
pushdeer://hostname:port/pushKey +| [QQ Push](https://appriseit.com/services/qq/) | qq:// | (TCP) 443 | qq://Token +| [Reddit](https://appriseit.com/services/reddit/) | reddit:// | (TCP) 443 | reddit://user:password@app_id/app_secret/subreddit
reddit://user:password@app_id/app_secret/sub1/sub2/subN +| [Resend](https://appriseit.com/services/resend/) | resend:// | (TCP) 443 | resend://APIToken:FromEmail/
resend://APIToken:FromEmail/ToEmail
resend://APIToken:FromEmail/ToEmail1/ToEmail2/ToEmailN/ +| [Revolt](https://appriseit.com/services/revolt/) | revolt:// | (TCP) 443 | revolt://bottoken/ChannelID
revolt://bottoken/ChannelID1/ChannelID2/ChannelIDN | +| [Rocket.Chat](https://appriseit.com/services/rocketchat/) | rocket:// or rockets:// | (TCP) 80 or 443 | rocket://user:password@hostname/RoomID/Channel
rockets://user:password@hostname:443/#Channel1/#Channel1/RoomID
rocket://user:password@hostname/#Channel
rocket://webhook@hostname
rockets://webhook@hostname/@User/#Channel +| [RSyslog](https://appriseit.com/services/rsyslog/) | rsyslog:// | (UDP) 514 | rsyslog://hostname
rsyslog://hostname/Facility +| [Ryver](https://appriseit.com/services/ryver/) | ryver:// | (TCP) 443 | ryver://Organization/Token
ryver://botname@Organization/Token +| [SendGrid](https://appriseit.com/services/sendgrid/) | sendgrid:// | (TCP) 443 | sendgrid://APIToken:FromEmail/
sendgrid://APIToken:FromEmail/ToEmail
sendgrid://APIToken:FromEmail/ToEmail1/ToEmail2/ToEmailN/ +| [SendPulse](https://appriseit.com/services/sendpulse/) | sendpulse:// | (TCP) 443 | sendpulse://user@host/ClientId/ClientSecret
sendpulse://user@host/ClientId/clientSecret/ToEmail
sendpulse://user@host/ClientId/ClientSecret/ToEmail1/ToEmail2/ToEmailN/ +| [ServerChan](https://appriseit.com/services/serverchan/) | schan:// | (TCP) 443 | schan://sendkey/ +| [Signal API](https://appriseit.com/services/signal/) | signal:// or signals:// | (TCP) 80 or 443 | signal://hostname:port/FromPhoneNo
signal://hostname:port/FromPhoneNo/ToPhoneNo
signal://hostname:port/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [SIGNL4](https://appriseit.com/services/signl4/) | signl4:// | (TCP) 80 or 443 | signl4://hostname +| [SimplePush](https://appriseit.com/services/simplepush/) | spush:// | (TCP) 443 | spush://apikey
spush://salt:password@apikey
spush://apikey?event=Apprise +| [Slack](https://appriseit.com/services/slack/) | slack:// | (TCP) 443 | slack://TokenA/TokenB/TokenC/
slack://TokenA/TokenB/TokenC/Channel
slack://botname@TokenA/TokenB/TokenC/Channel
slack://user@TokenA/TokenB/TokenC/Channel1/Channel2/ChannelN +| [SMTP2Go](https://appriseit.com/services/smtp2go/) | smtp2go:// | (TCP) 443 | smtp2go://user@hostname/apikey
smtp2go://user@hostname/apikey/email
smtp2go://user@hostname/apikey/email1/email2/emailN
smtp2go://user@hostname/apikey/?name="From%20User" +| [SparkPost](https://appriseit.com/services/sparkpost/) | sparkpost:// | (TCP) 443 | sparkpost://user@hostname/apikey
sparkpost://user@hostname/apikey/email
sparkpost://user@hostname/apikey/email1/email2/emailN
sparkpost://user@hostname/apikey/?name="From%20User" +| [Spike.sh](https://appriseit.com/services/spike/) | spike:// | (TCP) 443 | spike://Token +| [Splunk](https://appriseit.com/services/splunk/) | splunk:// or victorops:/ | (TCP) 443 | splunk://route_key@apikey
splunk://route_key@apikey/entity_id +| [Spug Push](https://appriseit.com/services/spugpush/) | spugpush:// | (TCP) 443 | spugpush://Token +| [Streamlabs](https://appriseit.com/services/streamlabs/) | strmlabs:// | (TCP) 443 | strmlabs://AccessToken/
strmlabs://AccessToken/?name=name&identifier=identifier&amount=0¤cy=USD +| [Synology Chat](https://appriseit.com/services/synology_chat/) | synology:// or synologys:// | (TCP) 80 or 443 | synology://hostname/token
synology://hostname:port/token +| [Syslog](https://appriseit.com/services/syslog/) | syslog:// | n/a | syslog://
syslog://Facility +| [Telegram](https://appriseit.com/services/telegram/) | tgram:// | (TCP) 443 | tgram://bottoken/ChatID
tgram://bottoken/ChatID1/ChatID2/ChatIDN +| [Twitter](https://appriseit.com/services/twitter/) | twitter:// | (TCP) 443 | twitter://CKey/CSecret/AKey/ASecret
twitter://user@CKey/CSecret/AKey/ASecret
twitter://CKey/CSecret/AKey/ASecret/User1/User2/User2
twitter://CKey/CSecret/AKey/ASecret?mode=tweet +| [Twist](https://appriseit.com/services/twist/) | twist:// | (TCP) 443 | twist://pasword:login
twist://password:login/#channel
twist://password:login/#team:channel
twist://password:login/#team:channel1/channel2/#team3:channel +| [Vapid (WebPush)](https://appriseit.com/services/vapid/) | vapid:// | (TCP) 443 | vapid://subscriber/target
vapid://subscriber/target?subfile=path&keyfile=path +| [Viber](https://appriseit.com/services/viber/) | viber:// | (TCP) 443 | viber://token/target +| [Webex Teams (Cisco)](https://appriseit.com/services/wxteams/) | wxteams:// | (TCP) 443 | wxteams://Token +| [WeCom Bot](https://appriseit.com/services/wecombot/) | wecombot:// | (TCP) 443 | wecombot://BotKey +| [WhatsApp](https://appriseit.com/services/whatsapp/) | whatsapp:// | (TCP) 443 | whatsapp://AccessToken@FromPhoneID/ToPhoneNo
whatsapp://Template:AccessToken@FromPhoneID/ToPhoneNo +| [WxPusher](https://appriseit.com/services/wxpusher/) | wxpusher:// | (TCP) 443 | wxpusher://AppToken@UserID1/UserID2/UserIDN
wxpusher://AppToken@Topic1/Topic2/Topic3
wxpusher://AppToken@UserID1/Topic1/ +| [XBMC](https://appriseit.com/services/xbmc/) | xbmc:// or xbmcs:// | (TCP) 8080 or 443 | xbmc://hostname
xbmc://user@hostname
xbmc://user:password@hostname:port +| [XMPP](https://appriseit.com/services/xmpp/) | xmpp:// or xmpps:// | (TCP) 5222 or 5223 | xmpp://user:pass@hostname
xmpps://user:pass@hostname/jid
xmpps://user:pass@hostname/jid1/jid2@example.ca +| [Zulip Chat](https://appriseit.com/services/zulip/) | zulip:// | (TCP) 443 | zulip://botname@Organization/Token
zulip://botname@Organization/Token/Stream
zulip://botname@Organization/Token/Email + +## SMS Notifications + +SMS Notifications for the most part do not have a both a `title` and `body`. They consist of a single `body` which is usually no more then 160 characters in length. When using Apprise, the `title` and `body` are therefore combined into a single message prior to their transmission. + +| Notification Service | Service ID | Default Port | Example Syntax | +| -------------------- | ---------- | ------------ | -------------- | +| [46elks](https://appriseit.com/services/46elks/) | 46elks:// | (TCP) 443 | 46elks://user:password@FromPhoneNo
46elks://user:password@FromPhoneNo/ToPhoneNo
46elks://user:password@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [Africas Talking](https://appriseit.com/services/africas_talking/) | atalk:// | (TCP) 443 | atalk://AppUser@ApiKey/ToPhoneNo
atalk://AppUser@ApiKey/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [Automated Packet Reporting System (ARPS)](https://appriseit.com/services/aprs/) | aprs:// | (TCP) 10152 | aprs://user:pass@callsign
aprs://user:pass@callsign1/callsign2/callsignN +| [AWS SNS](https://appriseit.com/services/sns/) | sns:// | (TCP) 443 | sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo
sns://AccessKeyID/AccessSecretKey/RegionName/+PhoneNo1/+PhoneNo2/+PhoneNoN
sns://AccessKeyID/AccessSecretKey/RegionName/Topic
sns://AccessKeyID/AccessSecretKey/RegionName/Topic1/Topic2/TopicN +| [BulkSMS](https://appriseit.com/services/bulksms/) | bulksms:// | (TCP) 443 | bulksms://user:password@ToPhoneNo
bulksms://User:Password@ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [BulkVS](https://appriseit.com/services/bulkvs/) | bulkvs:// | (TCP) 443 | bulkvs://user:password@FromPhoneNo
bulkvs://user:password@FromPhoneNo/ToPhoneNo
bulkvs://user:password@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [Burst SMS](https://appriseit.com/services/burstsms/) | burstsms:// | (TCP) 443 | burstsms://ApiKey:ApiSecret@FromPhoneNo/ToPhoneNo
burstsms://ApiKey:ApiSecret@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [Clickatell](https://appriseit.com/services/clickatell/) | clickatell:// | (TCP) 443 | clickatell://ApiKey/ToPhoneNo
clickatell://FromPhoneNo@ApiKey/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN +| [ClickSend](https://appriseit.com/services/clicksend/) | clicksend:// | (TCP) 443 | clicksend://user:pass@PhoneNo
clicksend://user:pass@ToPhoneNo1/ToPhoneNo2/ToPhoneNoN +| [DAPNET](https://appriseit.com/services/dapnet/) | dapnet:// | (TCP) 80 | dapnet://user:pass@callsign
dapnet://user:pass@callsign1/callsign2/callsignN +| [D7 Networks](https://appriseit.com/services/d7networks/) | d7sms:// | (TCP) 443 | d7sms://token@PhoneNo
d7sms://token@ToPhoneNo1/ToPhoneNo2/ToPhoneNoN +| [DingTalk](https://appriseit.com/services/dingtalk/) | dingtalk:// | (TCP) 443 | dingtalk://token/
dingtalk://token/ToPhoneNo
dingtalk://token/ToPhoneNo1/ToPhoneNo2/ToPhoneNo1/ +| [Free-Mobile](https://appriseit.com/services/freemobile/) | freemobile:// | (TCP) 443 | freemobile://user@password/ +| [httpSMS](https://appriseit.com/services/httpsms/) | httpsms:// | (TCP) 443 | httpsms://ApiKey@FromPhoneNo
httpsms://ApiKey@FromPhoneNo/ToPhoneNo
httpsms://ApiKey@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [Kavenegar](https://appriseit.com/services/kavenegar/) | kavenegar:// | (TCP) 443 | kavenegar://ApiKey/ToPhoneNo
kavenegar://FromPhoneNo@ApiKey/ToPhoneNo
kavenegar://ApiKey/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN +| [MessageBird](https://appriseit.com/services/messagebird/) | msgbird:// | (TCP) 443 | msgbird://ApiKey/FromPhoneNo
msgbird://ApiKey/FromPhoneNo/ToPhoneNo
msgbird://ApiKey/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [MSG91](https://appriseit.com/services/msg91/) | msg91:// | (TCP) 443 | msg91://TemplateID@AuthKey/ToPhoneNo
msg91://TemplateID@AuthKey/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [Plivo](https://appriseit.com/services/plivo/) | plivo:// | (TCP) 443 | plivo://AuthID@Token@FromPhoneNo
plivo://AuthID@Token/FromPhoneNo/ToPhoneNo
plivo://AuthID@Token/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [Seven](https://appriseit.com/services/seven/) | seven:// | (TCP) 443 | seven://ApiKey/FromPhoneNo
seven://ApiKey/FromPhoneNo/ToPhoneNo
seven://ApiKey/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [Société Française du Radiotéléphone (SFR)](https://appriseit.com/services/sfr/) | sfr:// | (TCP) 443 | sfr://user:password>@spaceId/ToPhoneNo
sfr://user:password>@spaceId/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [Signal API](https://appriseit.com/services/signal/) | signal:// or signals:// | (TCP) 80 or 443 | signal://hostname:port/FromPhoneNo
signal://hostname:port/FromPhoneNo/ToPhoneNo
signal://hostname:port/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [Sinch](https://appriseit.com/services/sinch/) | sinch:// | (TCP) 443 | sinch://ServicePlanId:ApiToken@FromPhoneNo
sinch://ServicePlanId:ApiToken@FromPhoneNo/ToPhoneNo
sinch://ServicePlanId:ApiToken@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/
sinch://ServicePlanId:ApiToken@ShortCode/ToPhoneNo
sinch://ServicePlanId:ApiToken@ShortCode/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [SMPP](https://appriseit.com/services/smpp/) | smpp:// or smpps:// | (TCP) 443 | smpp://user:password@hostname:port/FromPhoneNo/ToPhoneNo
smpps://user:password@hostname:port/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN +| [SMSEagle](https://appriseit.com/services/smseagle/) | smseagle:// or smseagles:// | (TCP) 80 or 443 | smseagles://hostname:port/ToPhoneNo
smseagles://hostname:port/@ToContact
smseagles://hostname:port/#ToGroup
smseagles://hostname:port/ToPhoneNo1/#ToGroup/@ToContact/ +| [SMS Manager](https://appriseit.com/services/sms_manager/) | smsmgr:// | (TCP) 443 | smsmgr://ApiKey@ToPhoneNo
smsmgr://ApiKey@ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [Threema Gateway](https://appriseit.com/services/threema/) | threema:// | (TCP) 443 | threema://GatewayID@secret/ToPhoneNo
threema://GatewayID@secret/ToEmail
threema://GatewayID@secret/ToThreemaID/
threema://GatewayID@secret/ToEmail/ToThreemaID/ToPhoneNo/... +| [Twilio](https://appriseit.com/services/twilio/) | twilio:// | (TCP) 443 | twilio://AccountSid:AuthToken@FromPhoneNo
twilio://AccountSid:AuthToken@FromPhoneNo/ToPhoneNo
twilio://AccountSid:AuthToken@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/
twilio://AccountSid:AuthToken@FromPhoneNo/ToPhoneNo?apikey=Key
twilio://AccountSid:AuthToken@ShortCode/ToPhoneNo
twilio://AccountSid:AuthToken@ShortCode/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/
twilio://AccountSid:AuthToken@FromPhoneNo/ToPhoneNo?method=call
twilio://AccountSid:AuthToken@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN?method=call +| [Voipms](https://appriseit.com/services/voipms/) | voipms:// | (TCP) 443 | voipms://password:email/FromPhoneNo
voipms://password:email/FromPhoneNo/ToPhoneNo
voipms://password:email/FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ +| [Vonage](https://appriseit.com/services/vonage/) (formerly Nexmo) | vonage:// | (TCP) 443 | vonage://ApiKey:ApiSecret@FromPhoneNo
vonage://ApiKey:ApiSecret@FromPhoneNo/ToPhoneNo
vonage://ApiKey:ApiSecret@FromPhoneNo/ToPhoneNo1/ToPhoneNo2/ToPhoneNoN/ + +## Desktop Notifications + +| Notification Service | Service ID | Default Port | Example Syntax | +| -------------------- | ---------- | ------------ | -------------- | +| [Linux DBus Notifications](https://appriseit.com/services/dbus/) | dbus://
qt://
glib://
kde:// | n/a | dbus://
qt://
glib://
kde:// +| [Linux Gnome Notifications](https://appriseit.com/services/gnome/) | gnome:// | n/a | gnome:// +| [MacOS X Notifications](https://appriseit.com/services/macosx/) | macosx:// | n/a | macosx:// +| [Windows Notifications](https://appriseit.com/services/windows/) | windows:// | n/a | windows:// + +## Email Notifications + +| Service ID | Default Port | Example Syntax | +| ---------- | ------------ | -------------- | +| [mailto://](https://appriseit.com/services/email/) | (TCP) 25 | mailto://userid:pass@domain.com
mailto://domain.com?user=userid&pass=password
mailto://domain.com:2525?user=userid&pass=password
mailto://user@gmail.com&pass=password
mailto://mySendingUsername:mySendingPassword@example.com?to=receivingAddress@example.com
mailto://userid:password@example.com?smtp=mail.example.com&from=noreply@example.com&name=no%20reply +| [mailtos://](https://appriseit.com/services/email/) | (TCP) 587 | mailtos://userid:pass@domain.com
mailtos://domain.com?user=userid&pass=password
mailtos://domain.com:465?user=userid&pass=password
mailtos://user@hotmail.com&pass=password
mailtos://mySendingUsername:mySendingPassword@example.com?to=receivingAddress@example.com
mailtos://userid:password@example.com?smtp=mail.example.com&from=noreply@example.com&name=no%20reply + +Apprise have some email services built right into it (such as yahoo, fastmail, hotmail, gmail, etc) that greatly simplify the mailto:// service. See more details [here](https://appriseit.com/services/email/). + +## Custom Notifications + +| Post Method | Service ID | Default Port | Example Syntax | +| -------------------- | ---------- | ------------ | -------------- | +| [Form](https://appriseit.com/services/form/) | form:// or forms:// | (TCP) 80 or 443 | form://hostname
form://user@hostname
form://user:password@hostname:port
form://hostname/a/path/to/post/to +| [JSON](https://appriseit.com/services/json/) | json:// or jsons:// | (TCP) 80 or 443 | json://hostname
json://user@hostname
json://user:password@hostname:port
json://hostname/a/path/to/post/to +| [XML](https://appriseit.com/services/xml/) | xml:// or xmls:// | (TCP) 80 or 443 | xml://hostname
xml://user@hostname
xml://user:password@hostname:port
xml://hostname/a/path/to/post/to + +# Installation + +The easiest way is to install Apprise from PyPI: +```bash +pip install apprise +``` + +Apprise is also packaged as an RPM and available through [EPEL](https://docs.fedoraproject.org/en-US/epel/) supporting CentOS, Redhat, Rocky, Oracle Linux, etc. +```bash +# Follow instructions on https://docs.fedoraproject.org/en-US/epel +# to get your system connected up to EPEL and then: +# Redhat/CentOS 7.x users +yum install apprise + +# Redhat/Rocky Linux 8.x+ and/or Fedora Users +dnf install apprise +``` + +You can also check out the [Graphical version of Apprise](https://github.com/caronc/apprise-api) to centralize your configuration and notifications through a manageable webpage. + +# Command Line Usage + +A small command line interface (CLI) tool is also provided with this package called *apprise*. If you know the server urls you wish to notify, you can simply provide them all on the command line and send your notifications that way: +```bash +# Send a notification to as many servers as you want +# as you can easily chain one after another (the -vv provides some +# additional verbosity to help let you know what is going on): +apprise -vv -t 'my title' -b 'my notification body' \ + 'mailto://myemail:mypass@gmail.com' \ + 'pbul://o.gn5kj6nfhv736I7jC3cj3QLRiyhgl98b' + +# If you don't specify a --body (-b) then stdin is used allowing +# you to use the tool as part of your every day administration: +cat /proc/cpuinfo | apprise -vv -t 'cpu info' \ + 'mailto://myemail:mypass@gmail.com' + +# The title field is totally optional +uptime | apprise -vv \ + 'discord:///4174216298/JHMHI8qBe7bk2ZwO5U711o3dV_js' +``` + +## CLI Configuration Files + +No one wants to put their credentials out for everyone to see on the command line. No problem *apprise* also supports configuration files. It can handle both a specific YAML format or a very simple TEXT format. You can also pull these configuration files via an HTTP query too! Read more about the expected structure of the configuration files [here](https://appriseit.com/config/). + +```bash +# By default if no url or configuration is specified apprise will attempt to load +# configuration files (if present) from: +# ~/.apprise +# ~/.apprise.yaml +# ~/.config/apprise.conf +# ~/.config/apprise.yaml +# /etc/apprise.conf +# /etc/apprise.yaml + +# Also a subdirectory handling allows you to leverage plugins +# ~/.apprise/apprise +# ~/.apprise/apprise.yaml +# ~/.config/apprise/apprise.conf +# ~/.config/apprise/apprise.yaml +# /etc/apprise/apprise.yaml +# /etc/apprise/apprise.conf + +# Windows users can store their default configuration files here: +# %APPDATA%/Apprise/apprise.conf +# %APPDATA%/Apprise/apprise.yaml +# %LOCALAPPDATA%/Apprise/apprise.conf +# %LOCALAPPDATA%/Apprise/apprise.yaml +# %ALLUSERSPROFILE%\Apprise\apprise.conf +# %ALLUSERSPROFILE%\Apprise\apprise.yaml +# %PROGRAMFILES%\Apprise\apprise.conf +# %PROGRAMFILES%\Apprise\apprise.yaml +# %COMMONPROGRAMFILES%\Apprise\apprise.conf +# %COMMONPROGRAMFILES%\Apprise\apprise.yaml + +# The configuration files specified above can also be identified with a `.yml` +# extension or even just entirely removing the `.conf` extension altogether. + +# If you loaded one of those files, your command line gets really easy: +apprise -vv -t 'my title' -b 'my notification body' + +# If you want to deviate from the default paths or specify more than one, +# just specify them using the --config switch: +apprise -vv -t 'my title' -b 'my notification body' \ + --config=/path/to/my/config.yml + +# Got lots of configuration locations? No problem, you can specify them all: +# Apprise can even fetch the configuration from over a network! +apprise -vv -t 'my title' -b 'my notification body' \ + --config=/path/to/my/config.yml \ + --config=https://localhost/my/apprise/config +``` + +## CLI Tagging Support + +Apprise allows you to tag your services in your configuration to organize them (e.g., `family`, `devops`, `critical`). You can then filter which services to notify using the `--tag` (`-g`) switch. + +It is important to understand how Apprise handles multiple tags: + +* **OR Logic (Union)**: To notify services that have *either* Tag A **OR** Tag B, specify the `-g` switch multiple times. +* **AND Logic (Intersection)**: To notify services that have *both* Tag A **AND** Tag B, separate the tags with a comma. + +```bash +# OR Logic: Notify any service tagged 'devops' OR 'admin' +apprise -vv -t "Union Test" \ + --config=~/apprise.yml \ + -g devops -g admin + +# AND Logic: Notify only services tagged with BOTH 'devops' AND 'critical' +apprise -vv -t "Intersection Test" \ + --config=~/apprise.yml \ + -g devops,critical + +## CLI File Attachments + +Apprise also supports file attachments too! Specify as many attachments to a notification as you want. +```bash +# Send a funny image you found on the internet to a colleague: +apprise -vv --title 'Agile Joke' \ + --body 'Did you see this one yet?' \ + --attach https://i.redd.it/my2t4d2fx0u31.jpg \ + 'mailto://myemail:mypass@gmail.com' + +# Easily send an update from a critical server to your dev team +apprise -vv --title 'system crash' \ + --body 'I do not think Jim fixed the bug; see attached...' \ + --attach /var/log/myprogram.log \ + --attach /var/debug/core.2345 \ + --tag devteam +``` + +## CLI Loading Custom Notifications/Hooks + +To create your own custom `schema://` hook so that you can trigger your own custom code, +simply include the `@notify` decorator to wrap your function. +```python +from apprise.decorators import notify +# +# The below assumes you want to catch foobar:// calls: +# +@notify(on="foobar", name="My Custom Foobar Plugin") +def my_custom_notification_wrapper(body, title, notify_type, *args, **kwargs): + """My custom notification function that triggers on all foobar:// calls + """ + # Write all of your code here... as an example... + print("{}: {} - {}".format(notify_type.upper(), title, body)) + + # Returning True/False is a way to relay your status back to Apprise. + # Returning nothing (None by default) is always interpreted as a Success +``` + +Once you've defined your custom hook, you just need to tell Apprise where it is at runtime. +```bash +# By default if no plugin path is specified apprise will attempt to load +# all plugin files (if present) from the following directory paths: +# ~/.apprise/plugins +# ~/.config/apprise/plugins +# /var/lib/apprise/plugins + +# Windows users can store their default plugin files in these directories: +# %APPDATA%/Apprise/plugins +# %LOCALAPPDATA%/Apprise/plugins +# %ALLUSERSPROFILE%\Apprise\plugins +# %PROGRAMFILES%\Apprise\plugins +# %COMMONPROGRAMFILES%\Apprise\plugins + +# If you placed your plugin file within one of the directories already defined +# above, then your call simply needs to look like: +apprise -vv --title 'custom override' \ + --body 'the body of my message' \ + foobar:\\ + +# However you can override the path like so +apprise -vv --title 'custom override' \ + --body 'the body of my message' \ + --plugin-path /path/to/my/plugin.py \ + foobar:\\ +``` + +You can read more about creating your own custom notifications and/or hooks [here](https://appriseit.com/library/extending/decorator/). + +## CLI Environment Variables + +Those using the Command Line Interface (CLI) can also leverage environment variables to pre-set the default settings: + +| Variable | Description | +|------------------------ | ----------------- | +| `APPRISE_URLS` | Specify the default URLs to notify IF none are otherwise specified on the command line explicitly. If the `--config` (`-c`) is specified, then this will overrides any reference to this variable. Use white space and/or a comma (`,`) to delimit multiple entries. +| `APPRISE_CONFIG_PATH` | Explicitly specify the config search path to use (overriding the default). The path(s) defined here must point to the absolute filename to open/reference. Use a semi-colon (`;`), line-feed (`\n`), and/or carriage return (`\r`) to delimit multiple entries. +| `APPRISE_PLUGIN_PATH` | Explicitly specify the custom plugin search path to use (overriding the default). Use a semi-colon (`;`), line-feed (`\n`), and/or carriage return (`\r`) to delimit multiple entries. +| `APPRISE_STORAGE_PATH` | Explicitly specify the persistent storage path to use (overriding the default). + +# Developer API Usage + +To send a notification from within your python application, just do the following: +```python +import apprise + +# Create an Apprise instance +apobj = apprise.Apprise() + +# Add all of the notification services by their server url. +# A sample email notification: +apobj.add('mailto://myuserid:mypass@gmail.com') + +# A sample pushbullet notification +apobj.add('pbul://o.gn5kj6nfhv736I7jC3cj3QLRiyhgl98b') + +# Then notify these services any time you desire. The below would +# notify all of the services loaded into our Apprise object. +apobj.notify( + body='what a great notification service!', + title='my notification title', +) +``` + +## API Configuration Files + +Developers need access to configuration files too. The good news is their use just involves declaring another object (called *AppriseConfig*) that the *Apprise* object can ingest. You can also freely mix and match config and notification entries as often as you wish! You can read more about the expected structure of the configuration files [here](https://appriseit.com/getting-started/configuration/). +```python +import apprise + +# Create an Apprise instance +apobj = apprise.Apprise() + +# Create an Config instance +config = apprise.AppriseConfig() + +# Add a configuration source: +config.add('/path/to/my/config.yml') + +# Add another... +config.add('https://myserver:8080/path/to/config') + +# Make sure to add our config into our apprise object +apobj.add(config) + +# You can mix and match; add an entry directly if you want too +# In this entry we associate the 'admin' tag with our notification +apobj.add('mailto://myuser:mypass@hotmail.com', tag='admin') + +# Then notify these services any time you desire. The below would +# notify all of the services that have not been bound to any specific +# tag. +apobj.notify( + body='what a great notification service!', + title='my notification title', +) + +# Tagging allows you to specifically target only specific notification +# services you've loaded: +apobj.notify( + body='send a notification to our admin group', + title='Attention Admins', + # notify any services tagged with the 'admin' tag + tag='admin', +) + +# If you want to notify absolutely everything (regardless of whether +# it's been tagged or not), just use the reserved tag of 'all': +apobj.notify( + body='send a notification to our admin group', + title='Attention Admins', + # notify absolutely everything loaded, regardless on whether + # it has a tag associated with it or not: + tag='all', +) +``` + +## API File Attachments + +Attachments are very easy to send using the Apprise API: +```python +import apprise + +# Create an Apprise instance +apobj = apprise.Apprise() + +# Add at least one service you want to notify +apobj.add('mailto://myuser:mypass@hotmail.com') + +# Then send your attachment. +apobj.notify( + title='A great photo of our family', + body='The flash caused Jane to close her eyes! hah! :)', + attach='/local/path/to/my/DSC_003.jpg', +) + +# Send a web based attachment too! In the below example, we connect to a home +# security camera and send a live image to an email. By default remote web +# content is cached, but for a security camera we might want to call notify +# again later in our code, so we want our last image retrieved to expire(in +# this case after 3 seconds). +apobj.notify( + title='Latest security image', + attach='http://admin:password@hikvision-cam01/ISAPI/Streaming/channels/101/picture?cache=3' +) +``` + +To send more than one attachment, just use a list, set, or tuple instead: +```python +import apprise + +# Create an Apprise instance +apobj = apprise.Apprise() + +# Add at least one service you want to notify +apobj.add('mailto://myuser:mypass@hotmail.com') + +# Now add all of the entries we're interested in: +attach = ( + # ?name= allows us to rename the actual jpeg as found on the site + # to be another name when sent to our receipient(s) + 'https://i.redd.it/my2t4d2fx0u31.jpg?name=FlyingToMars.jpg', + + # Now add another: + '/path/to/funny/joke.gif', +) + +# Send your multiple attachments with a single notify call: +apobj.notify( + title='Some good jokes.', + body='Hey guys, check out these!', + attach=attach, +) +``` + +## API Loading Custom Notifications/Hooks + +By default, no custom plugins are loaded at all for those building from within the Apprise API. +It's at the developers discretion to load custom modules. But should you choose to do so, it's as easy +as including the path reference in the `AppriseAsset()` object prior to the initialization of your `Apprise()` +instance. + +For example: +```python +from apprise import Apprise +from apprise import AppriseAsset + +# Prepare your Asset object so that you can enable the custom plugins to +# be loaded for your instance of Apprise... +asset = AppriseAsset(plugin_paths="/path/to/scan") + +# OR You can also generate scan more then one file too: +asset = AppriseAsset( + plugin_paths=[ + # Iterate over all python libraries found in the root of the + # specified path. This is NOT a recursive (directory) scan; only + # the first level is parsed. HOWEVER, if a directory containing + # an __init__.py is found, it will be included in the load. + "/dir/containing/many/python/libraries", + + # An absolute path to a plugin.py to exclusively load + "/path/to/plugin.py", + + # if you point to a directory that has an __init__.py file found in + # it, then only that file is loaded (it's similar to point to a + # absolute .py file. Hence, there is no (level 1) scanning at all + # within the directory specified. + "/path/to/dir/library" + ] +) + +# Now that we've got our asset, we just work with our Apprise object as we +# normally do +aobj = Apprise(asset=asset) + +# If our new custom `foobar://` library was loaded (presuming we prepared +# one like in the examples above). then you would be able to safely add it +# into Apprise at this point +aobj.add('foobar://') + +# Send our notification out through our foobar:// +aobj.notify("test") +``` + +You can read more about creating your own custom notifications and/or hooks [here](https://appriseit.com/library/extending/decorator/). + +# Persistent Storage + +Persistent storage allows Apprise to cache re-occurring actions optionaly to disk. This can greatly reduce the overhead used to send a notification. + +There are 3 Persistent Storage operational states Apprise can operate using: +1. `auto`: Flush gathered cache information to the filesystem on demand. This option is incredibly light weight. This is the default behavior for all CLI usage. + * Developers who choose to use this operational mode can also force cached information manually if they choose. + * The CLI will use this operational mode by default. +1. `flush`: Flushes any cache information to the filesystem during every transaction. +1. `memory`: Effectively disable Persistent Storage. Any caching of data required by each plugin used is done in memory. Apprise effectively operates as it always did before peristent storage was available. This setting ensures no content is every written to disk. + * By default this is the mode Apprise will operate under for those developing with it unless they configure it to otherwise operate as `auto` or `flush`. This is done through the `AppriseAsset()` object and is explained further on in this documentation. + +## CLI Persistent Storage Commands + +You can provide the keyword `storage` on your CLI call to see the persistent storage options available to you. +```bash +# List all of the occupied space used by Apprise's Persistent Storage: +apprise storage list + +# list is the default option, so the following does the same thing: +apprise storage + +# You can prune all of your storage older then 30 days +# and not accessed for this period like so: +apprise storage prune + +# You can do a hard reset (and wipe all persistent storage) with: +apprise storage clean + +``` + +You can also filter your results by adding tags and/or URL Identifiers. When you get a listing (`apprise storage list`), you may see: +``` + # example output of 'apprise storage list': + 1. f7077a65 0.00B unused + - matrixs://abcdef:****@synapse.example12.com/%23general?image=no&mode=off&version=3&msgtype... + tags: team + + 2. 0e873a46 81.10B active + - tgram://W...U//?image=False&detect=yes&silent=no&preview=no&content=before&mdv=v1&format=m... + tags: personal + + 3. abcd123 12.00B stale + +``` +The (persistent storage) cache states are: + - `unused`: This plugin has not commited anything to disk for reuse/cache purposes + - `active`: This plugin has written content to disk. Or at the very least, it has prepared a persistent storage location it can write into. + - `stale`: The system detected a location where a URL may have possibly written to in the past, but there is nothing linking to it using the URLs provided. It is likely wasting space or is no longer of any use. + +You can use this information to filter your results by specifying _URL ID_ (UID) values after your command. For example: +```bash +# The below commands continue with the example already identified above +# the following would match abcd123 (even though just ab was provided) +# The output would only list the 'stale' entry above +apprise storage list ab + +# knowing our filter is safe, we could remove it +# the below command would not obstruct our other to URLs and would only +# remove our stale one: +apprise storage clean ab + +# Entries can be filtered by tag as well: +apprise storage list --tag=team + +# You can match on multiple URL ID's as well: +# The followin would actually match the URL ID's of 1. and .2 above +apprise storage list f 0 +``` +When using the CLI, Persistent storage is set to the operational mode of `auto` by default, you can change this by providing `--storage-mode=` (`-SM`) during your calls. If you want to ensure it's always set to a value of your choice. + +For more information on persistent storage, [visit here](https://appriseit.com/cli/persistent-storage/). + +## API Persistent Storage Commands +For developers, persistent storage is set in the operational mode of `memory` by default. + +It's at the developers discretion to enable it (by switching it to either `auto` or `flush`). Should you choose to do so: it's as easy as including the information in the `AppriseAsset()` object prior to the initialization of your `Apprise()` instance. + +For example: +```python +from apprise import Apprise +from apprise import AppriseAsset +from apprise import PersistentStoreMode + +# Prepare a location the persistent storage can write it's cached content to. +# By setting this path, this immediately assumes you wish to operate the +# persistent storage in the operational 'auto' mode +asset = AppriseAsset(storage_path="/path/to/save/data") + +# If you want to be more explicit and set more options, then you may do the +# following +asset = AppriseAsset( + # Set our storage path directory (minimum requirement to enable it) + storage_path="/path/to/save/data", + + # Set the mode... the options are: + # 1. PersistentStoreMode.MEMORY + # - disable persistent storage from writing to disk + # 2. PersistentStoreMode.AUTO + # - write to disk on demand + # 3. PersistentStoreMode.FLUSH + # - write to disk always and often + storage_mode=PersistentStoreMode.FLUSH + + # The URL IDs are by default 8 characters in length. You can increase and + # decrease it's value here. The value must be > 2. The default value is 8 + # if not otherwise specified + storage_idlen=8, +) + +# Now that we've got our asset, we just work with our Apprise object as we +# normally do +aobj = Apprise(asset=asset) +``` + +For more information on persistent storage, [visit here](https://appriseit.com/library/persistent-storage/). + +# Want To Learn More? + +If you're interested in reading more about this and other methods on how to customize your own notifications, please check out the following links: +* 📣 [Using the CLI](https://appriseit.com/cli/) +* ðŸ› ï¸ [Development API](https://appriseit.com/library/) +* âš™ï¸ [Configuration File Help](https://appriseit.com/getting-started/configuration/) +* âš¡ [Create Your Own Custom Notifications](https://appriseit.com/library/extending/decorator/) +* 🌎 [Apprise API/Web Interface](https://github.com/caronc/apprise-api/) +* 📖 [Apprise Documentation Source](https://github.com/caronc/apprise-docs/) +* 🔧 [Troubleshooting](https://appriseit.com/qa/) +* 🎉 [Showcase](https://appriseit.com/contributing/showcase/) + +Want to help make Apprise better? +* 💡 [Contribute to the Apprise Code Base](https://appriseit.com/contributing/) +* â¤ï¸ [Sponsorship and Donations](https://appriseit.com/contributing/sponsors/) diff --git a/libs/apprise-1.9.8.dist-info/RECORD b/libs/apprise-1.9.8.dist-info/RECORD new file mode 100644 index 0000000000..9f0dfa6a34 --- /dev/null +++ b/libs/apprise-1.9.8.dist-info/RECORD @@ -0,0 +1,233 @@ +../../bin/apprise,sha256=41ocDdUKbn8QbOtncNxcwagiZ6d2DL1ARZyEgip9OYA,186 +apprise-1.9.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +apprise-1.9.8.dist-info/METADATA,sha256=An6sk6bAqQdJRDAfr8KhZhyohL-i_h37eGX6V5R4aVM,56830 +apprise-1.9.8.dist-info/RECORD,, +apprise-1.9.8.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +apprise-1.9.8.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92 +apprise-1.9.8.dist-info/entry_points.txt,sha256=71YypBuNdjAKiaLsiMG40HEfLHxkU4Mi7o_S0s0d8wI,45 +apprise-1.9.8.dist-info/licenses/LICENSE,sha256=msAH6Aa9cZnPMVit_4OhY1GuhcRjc8NTOpi5YZdRA_I,1343 +apprise-1.9.8.dist-info/top_level.txt,sha256=JrCRn-_rXw5LMKXkIgMSE4E0t1Ks9TYrBH54Pflwjkk,8 +apprise/__init__.py,sha256=wZB-1ATAsyV-rjXvPa2MOI4vfYxWO6gsZ_yGoZD7PYk,3851 +apprise/apprise.py,sha256=urhgoEWyI9AzAjbkY6XmNUcsrdU-4KWYYT0CHspHE2g,36087 +apprise/apprise_attachment.py,sha256=PUvsVkFrFodtYY5KKWApGwVzRucTJIcSsRjy0YDE7xM,13642 +apprise/apprise_config.py,sha256=fPiTCS4wEG2R-IOXbVe3Jzd_dJgoOkLVrYndFkMFNDY,17964 +apprise/asset.py,sha256=2Epg10nieZKt5yFEgq2Qq_bTpijiICjaHK2oeGv_tmc,17701 +apprise/assets/NotifyXML-1.0.xsd,sha256=292qQ_IUl5EWDhPyzm9UTT0C2rVvJkyGar8jiODkJs8,986 +apprise/assets/NotifyXML-1.1.xsd,sha256=bjR3CGG4AEXoJjYkGCbDttKHSkPP1FlIWO02E7G59g4,1758 +apprise/assets/themes/default/apprise-failure-128x128.ico,sha256=Mt0ptfHJaN3Wsv5UCNDn9_3lyEDHxVDv1JdaDEI_xCA,67646 +apprise/assets/themes/default/apprise-failure-128x128.png,sha256=66ps8TDPxVH3g9PlObJqF-0x952CjnqQyN3zvpRcOT8,16135 +apprise/assets/themes/default/apprise-failure-256x256.png,sha256=bQBsKKCsKfR9EqgYOZrcVcVa5y8qG58PN2mEqO5eNRI,41931 +apprise/assets/themes/default/apprise-failure-32x32.png,sha256=vH0pZffIDCvkejpr3fJHGXW__8Yc3R_p0bacX6t6l18,2437 +apprise/assets/themes/default/apprise-failure-72x72.png,sha256=EP5A8DHRDr9srgupFSwOoyQ308bNJ8aL192J_L4K-ec,7600 +apprise/assets/themes/default/apprise-info-128x128.ico,sha256=F5_CirmXueRCRI5Z_Crf6TS6jVIXTJlRD83zw1oJ66g,67646 +apprise/assets/themes/default/apprise-info-128x128.png,sha256=bBqRZAgQey-gkmJrnFhPbzjILSrljE59mRkgj3raMQo,16671 +apprise/assets/themes/default/apprise-info-256x256.png,sha256=B5r_O4d9MHCmSWZwfbqQgZSp-ZetTdiBSwKcMTF1aFA,43331 +apprise/assets/themes/default/apprise-info-32x32.png,sha256=lt3NZ95TzkiCNVNlurrB2fE2nriMa1wftl7nrNXmb6c,2485 +apprise/assets/themes/default/apprise-info-72x72.png,sha256=kDnsZpqNUZGqs9t1ECUup7FOfXUIL-rupnQCYJp9So4,7875 +apprise/assets/themes/default/apprise-logo.png,sha256=85ttALudKkLmiqilJT7mUQLUXRFmM1AK89rnwLm313s,160907 +apprise/assets/themes/default/apprise-success-128x128.ico,sha256=uCopPwdQjxgfohKazHaDzYs9y4oiaOpL048PYC6WRlg,67646 +apprise/assets/themes/default/apprise-success-128x128.png,sha256=nvDuU_QqhGlw6cMtdj7Mv-gPgqCEx-0DaaXn1KBLVYg,17446 +apprise/assets/themes/default/apprise-success-256x256.png,sha256=vXfKuxY3n0eeXHKdb9hTxICxOEn7HjAQ4IZpX0HSLzc,48729 +apprise/assets/themes/default/apprise-success-32x32.png,sha256=Jg9pFJh3YPI-LiPBebyJ7Z4Vt7BRecaE8AsRjQVIkME,2471 +apprise/assets/themes/default/apprise-success-72x72.png,sha256=FQbgvIhqKOhEK0yvrhaSpai0R7hrkTt_-GaC2KUgCCk,7858 +apprise/assets/themes/default/apprise-warning-128x128.ico,sha256=6XaQPOx0oWK_xbhr4Yhb7qNazCWwSs9lk2SYR2MHTrQ,67646 +apprise/assets/themes/default/apprise-warning-128x128.png,sha256=pf5c4Ph7jWH7gf39dJoieSj8TzAsY3TXI-sGISGVIW4,16784 +apprise/assets/themes/default/apprise-warning-256x256.png,sha256=SY-xlaiXaj420iEYKC2_fJxU-yj2SuaQg6xfPNi83bw,43708 +apprise/assets/themes/default/apprise-warning-32x32.png,sha256=97R2ywNvcwczhBoWEIgajVtWjgT8fLs4FCCz4wu0dwc,2472 +apprise/assets/themes/default/apprise-warning-72x72.png,sha256=L8moEInkO_OLxoOcuvN7rmrGZo64iJeH20o-24MQghE,7913 +apprise/attachment/__init__.py,sha256=HVj63cGWbHlQrUy8WedaDFtvSRMPKsATb8lvKXfR-6A,1654 +apprise/attachment/base.py,sha256=oAB4Bdfof7tEgGtJUqPtFR9INEPGfvWOwuXf4sARN_s,16463 +apprise/attachment/file.py,sha256=J14LHyloDOVq1rXlqIeX7pkb9MCBH7C2DAEqNLExeGw,4953 +apprise/attachment/http.py,sha256=r7sphh4tb9JRcZ8l2BRJfyGErPeIRDWo--Ae_FuFYOg,13843 +apprise/attachment/memory.py,sha256=6j2kdFV32Zu8mod7Asiiwf6ByaRvf2PVCLIV28UIFYg,6873 +apprise/cli.py,sha256=u_DaRmwlDk81tOaa43VZ_6nBQaINxFCh2Mxc-9B4YWE,37773 +apprise/common.py,sha256=5a2s8yQtqm3BbdhM0cd9aIvZR5yGvwl9ioLOnNbwW7s,6863 +apprise/compat.py,sha256=hjaMSzwCcdodK79bZ54_vxhhO_KhcidIen-tJwE0NQQ,2043 +apprise/config/__init__.py,sha256=BrkAKU9NZ_dN8KuiUwBjYB9U7gU1wVW95uksS9rAbmI,1655 +apprise/config/base.py,sha256=t2gDb8-OAGINKo_aZfxvZsJIUPU3xZeXT9M7SDa9cjs,55848 +apprise/config/file.py,sha256=-y5oCdn1m9Sv8zfLSWwcw0jXj5nwDPv2r1i-eS_jSOE,6119 +apprise/config/http.py,sha256=V2U93piWl94umfKDIDsm66esR4lAmx-VLPvSP187Ed8,9445 +apprise/config/memory.py,sha256=J49cB4MFDvJADgnZVCX1GmNSwrQm-wnFsi6o1nY22vg,2733 +apprise/conversion.py,sha256=O-PjprtH1sMN1z9_sj6AMXsAd9jvbku8hvRdoSh_Ggg,6393 +apprise/decorators/__init__.py,sha256=UJvg7JUgzj50yr_uQqZd1Z2y6My-T9oeR2Luc1nUDJw,1456 +apprise/decorators/base.py,sha256=oXGf9ByHG5HZ8WMqZwTKTmBvUq1RBHFZ5QIljBtqhvQ,7879 +apprise/decorators/notify.py,sha256=yNtbZ3zID6cYnR6b-UTuZ0ILmZETwPO0HK2ZzjpvOUI,5066 +apprise/emojis.py,sha256=_IR06DNsTMAWpYzkZSm_zqVP0MKLtmCvlWS9jv07kE4,87630 +apprise/exception.py,sha256=9mOUrGXxLzwdpMo7XEk4iMUaED5FJU6QiSf9q5-NQdk,2512 +apprise/i18n/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +apprise/i18n/apprise.pot,sha256=emswBY3tz-4tnvXR9yqj6B0LvHb_skcWkyhrbbM7h5I,44287 +apprise/i18n/en/LC_MESSAGES/apprise.mo,sha256=6TR8Gayv6E8LR9HLdbRnCVzRP_rdyIrIiCuCPt9-CT4,3655 +apprise/i18n/en/LC_MESSAGES/apprise.po,sha256=c3E2UB7PAqwxjb1IZ8ydvh4hL88Wz7YQF8snWepAung,47558 +apprise/locale.py,sha256=Qg0Jr49cPQQctNrzCSuzrWd27YMGgoG7shrAt6kepJU,8983 +apprise/logger.py,sha256=01qEr8aL2RrGnl7DNaawEbBLnGZr0izn9vb6e4e974U,6715 +apprise/manager.py,sha256=mmXPHMWFxEpFOPj3rpdOOYUQcsXNwl0eH7_2jGGHV7g,29624 +apprise/manager_attachment.py,sha256=Zqqq6X-m1qvIKbbqdXvCuuwfbzE7kXSBmDVjKD_Vkvg,2134 +apprise/manager_config.py,sha256=7Sw8ab_JhyxkCVFlkSfElYCacAq7immLrO6RyeOQe-0,2139 +apprise/manager_plugins.py,sha256=L_g-nxr_qyoZ-GyYvOskK2NLXyOBbRJCHFILOVgba_E,2137 +apprise/persistent_store.py,sha256=hr73A9lazx31fA0yz7wKOuPaDSHT-z4BkddC9RSNpiU,61030 +apprise/plugins/__init__.py,sha256=trbtZemgqRGdwGm493UCaRjTV94L8jzhRxgi-7KEQuQ,18665 +apprise/plugins/africas_talking.py,sha256=2U9PW46BwENnV14ZYoaIbTSKhxt_eaSZXGmMcaVtu6w,16624 +apprise/plugins/apprise_api.py,sha256=FPErT1UIlZniLqpT49qyNXGyVgs7lJTteWqZzWtP-tk,18134 +apprise/plugins/aprs.py,sha256=7Vmvv1pHjZWijpEjrgjuGjAb3dQko_kGO-ah4IFg9Io,25117 +apprise/plugins/bark.py,sha256=w-0YJ4Q6-uHYKLCqZKMrhmDzeISrU4tWc7HxSNdTYMw,19003 +apprise/plugins/base.py,sha256=3e4O6YtKc_gnull9Bp77nnTHEIm-a_-VS27QnlYp0P4,37080 +apprise/plugins/bluesky.py,sha256=lot_KKYhvVdwt6hGAUZdTRLGg2oLHkz9bJLpx8LF6u4,23303 +apprise/plugins/brevo.py,sha256=h33VyQ108W09sZwLFc28jvz8Jok0EuvLhTn-MGqlDzU,19388 +apprise/plugins/bulksms.py,sha256=N1xU5C3Jdbxi6bGNQY87i8XjZA12UwxNfq0A48noNuU,17036 +apprise/plugins/bulkvs.py,sha256=L7SASQxVox6QCQDYiVqM631o_7R6Mz3-UyexR0HFnGI,13922 +apprise/plugins/burstsms.py,sha256=HBFd-QYuVcsdLuBEwhzbzwErVuR9fiXD3RG-qUA8WkQ,16149 +apprise/plugins/chanify.py,sha256=lUBM7MXWvjBJNL3_ISIoNvZnArTFpEEnBPJcGBhWcWw,6887 +apprise/plugins/clickatell.py,sha256=R3oc3ZksikJYgV-iAMpc12lyjkvavayUftH6TbKB4K4,10367 +apprise/plugins/clicksend.py,sha256=TM52u60aM9OxRulhhDgrZPaV5o48wS96WlOzzxhEgdA,12014 +apprise/plugins/custom_form.py,sha256=HnMmOQoXBLeApGE9vD8LI_c57JsZkB0V3wd72fPONxw,19121 +apprise/plugins/custom_json.py,sha256=fkzDYCZ_zFT1g_Att9BifvJEMKdBIUuYGjAj50rIN2g,15012 +apprise/plugins/custom_xml.py,sha256=bnyusnF36BG0TD8OfgaU_PM6wD06SplcJmFPJjOqn6A,18441 +apprise/plugins/d7networks.py,sha256=sEVihX4Wm9AiVI2harLkGtqpSB0NLVRkeeVk6jfqBqU,15494 +apprise/plugins/dapnet.py,sha256=FIkaboR2m0Zgae4pl5LjsLeC3C_97pHLKoXtToPRpRM,13855 +apprise/plugins/dbus.py,sha256=SNv-nQlcFQIRgHNTcAr_tfVqcygqfn3k2rnUPOGz5qA,14495 +apprise/plugins/dingtalk.py,sha256=hd2LHBDaPAy241-NXtvgrmpegvpPPEKkEWrFa84yfZc,12343 +apprise/plugins/discord.py,sha256=OfiyW5_PzLXnlUTLre_91Rkwc_Av5Q8LvOGZS5ZDcTQ,30868 +apprise/plugins/dot.py,sha256=fzHv8MC2QSB0nb58Tja1G0WfliAfQPPBMMKRXseP-Vo,20666 +apprise/plugins/email/__init__.py,sha256=kTDw88J0x5MVAJC7UIXI-RpiGgzoXI0YanBXvvciylE,1917 +apprise/plugins/email/base.py,sha256=e_A2INbASt76PxX9_IOpKVjtGW6QiXgwvAoJavhy5PU,41205 +apprise/plugins/email/common.py,sha256=oJiGN2GKiXaDbiiJI0CSB32FIf8dqLuVqrJA75KLeJw,2527 +apprise/plugins/email/templates.py,sha256=tAxROfw2Q7XNmWSgggLnQXttGfY0cGJHQqpeCT9hsck,10353 +apprise/plugins/emby.py,sha256=HpR4WAcp8rwbgvwXpSQ3mJ3lE2plPBkQQ3_udQ10nYk,22567 +apprise/plugins/enigma2.py,sha256=vh8qAGmldV4ln1Y9zzYB3gYfhl07zRkgBnDV6ERP5W0,12109 +apprise/plugins/fcm/__init__.py,sha256=69qDcMxtfh8tZOY8gzZp0oFK9fX2NbMWPiEDC-19bGw,22158 +apprise/plugins/fcm/color.py,sha256=rqSeq9Bc6gmwKAqOMLNvSJDFUAKIBwDP5lDxpYd1ClA,4609 +apprise/plugins/fcm/common.py,sha256=yaEc5lOrEr5UFHd24CKhNNZW39PKDqR6Uxa8IwXVBP0,1687 +apprise/plugins/fcm/oauth.py,sha256=iqs5DRqR8dTWI8EiltzlCAn8aWl-gPZi7st7AErO0BE,11147 +apprise/plugins/fcm/priority.py,sha256=8y1E1iHPCH34sas1sGg5fFmyo_lTQpe0o473hUNjQ_0,6904 +apprise/plugins/feishu.py,sha256=kqPjitwmUf2ca62u_NKtiRF8FVFQ-zyeJkBrjfa4jYQ,7497 +apprise/plugins/flock.py,sha256=3s_xc91Jvvso4j8IzvlmSCX_YkEXQJYOUcjTUMzpBdk,13092 +apprise/plugins/fluxer.py,sha256=IjicrZfA_t93kxPAEtoKVXNQlCxX1Zb4niDpqmtINcY,36147 +apprise/plugins/fortysixelks.py,sha256=5mO6UYgO319A1HfT_Av8jdbXfjhL5qZLAZMRftawQgM,11937 +apprise/plugins/freemobile.py,sha256=RHZHrpwdQJYHr8KGzteipNN-bZLsvEWGqFAYO1cK2qc,6910 +apprise/plugins/glib.py,sha256=rQV1hH7lHRio1b2ZFvZJNTDV5Iwa_F_5d2uS83I8h5E,12920 +apprise/plugins/gnome.py,sha256=2Ykzkdncf0UfI4ok-hZIuhQC3__u10U12PBUGbjvWVU,9295 +apprise/plugins/google_chat.py,sha256=LJI1FLHWRcX3gjspnF3xVCquWt7O0V3JSDt0IcfLjPU,13588 +apprise/plugins/gotify.py,sha256=XgTM2FZlTQwRXD4wfMvYYcIujP8dcEqYM5cDhnj0gN4,11162 +apprise/plugins/growl.py,sha256=A9UYAKrEqBgQzSZlxwiRUN8s7CLRx3M8QowLTY-_KgA,14947 +apprise/plugins/guilded.py,sha256=ARbx9otLNOMoeYNKftLTu8uuDnkmt91jk61zt9HdgN8,3801 +apprise/plugins/home_assistant.py,sha256=CjOVejsI95CdkHDjTw38SxFIoz5BxpiWJj1_1p7gXYk,11669 +apprise/plugins/httpsms.py,sha256=nIC7noesMHLwMXO9vC-92zJx8_v2PFyh6jsivwBR5ow,11457 +apprise/plugins/ifttt.py,sha256=PUsvapfXAreWYyKKTAjDFLHP1Os9vu9VOHPcMKHVnaA,13899 +apprise/plugins/irc/__init__.py,sha256=FnlnCevX7Hxc8zea6r3yqOJimIucLNM6YMDStcfebL8,1493 +apprise/plugins/irc/base.py,sha256=oxjXLj9cpF-vbR0I02nZWQwcA0zlMv7tvxex7Cc4icQ,16860 +apprise/plugins/irc/client.py,sha256=4usfuD7kFARXl2VdviKuG3tQ1DinlrXBoaICbM6QnEg,12788 +apprise/plugins/irc/protocol.py,sha256=4jTetI-FuguaR1R2dBSEoHofYUw7-Q6iMBTFUdDe6zY,6539 +apprise/plugins/irc/state.py,sha256=HI4ZR0wXZUiM5q9Q7rAVW0xpQu7_WIlmBAHbRP0L9KE,7301 +apprise/plugins/irc/templates.py,sha256=tbr4nZ9NR_RlqZXMNfNmtj_F5pxcGkWnsXfxeMLeDYA,2709 +apprise/plugins/jellyfin.py,sha256=Qsqtq0oPXQfZgZekykk6d5TvJBFY7rrBHRpulERczhQ,2155 +apprise/plugins/join.py,sha256=ypse-K0Kp8XpYalogZ3ayVDfxkdZddOFPNuyeY7caEk,14167 +apprise/plugins/kavenegar.py,sha256=onekhFAjWH-3vd1INxSO4p9_VHmKi-19h7ydGdDkjKI,12937 +apprise/plugins/kumulos.py,sha256=L0f5MdNn2gae8-JDy3wDWVNkJqJ9aLyoTbPeAVk1XGg,8464 +apprise/plugins/lametric.py,sha256=FjhugijcodU4j_SgxGuJgbKYLfp5rsPdm9mseJBDTt4,40183 +apprise/plugins/lark.py,sha256=JdAU4YpeXK2bQtvX1BQdqLJ9Dutesavd4USTThiwmy8,6424 +apprise/plugins/line.py,sha256=_Rg4x4LhPB02HWaVJ9znFohOqAMrOvH_1XeOoNf5Dfc,10972 +apprise/plugins/macosx.py,sha256=x5FE8ccOHg3H7r6Q9Q2N24XpHkCUVsk5xbER1Oj7d9Q,8365 +apprise/plugins/mailgun.py,sha256=f7l1AvYP8BeIvXxpYmh9olzydCEz2_lHa3s0kv1n_EQ,26591 +apprise/plugins/mastodon.py,sha256=CXm0UWddiLBJ5_OrMdkfZ5xrsgGkA0kh_hRlVjjrmVc,36655 +apprise/plugins/matrix.py,sha256=JMujdt0XnClQasKcrJcXL2WysmIKyavo7dQyjSyc0AM,65360 +apprise/plugins/mattermost.py,sha256=QvxvsySE5qneIgAIIMR7WhlOI3_TFK4cWuMSKYfNvQQ,25346 +apprise/plugins/messagebird.py,sha256=cydwBE6RD40HXmJP1Fxkd1XeyvaL2FKqarec4UEzv-s,12569 +apprise/plugins/misskey.py,sha256=mNy2RaspVNOxC9AfpNLM30vTHjDAnZq-oxAW_JtK_dI,10055 +apprise/plugins/mqtt.py,sha256=vo1ay92eEz7cWdLk3_POJOESxQxKOjvRP0lRbzLqi0s,21522 +apprise/plugins/msg91.py,sha256=Nehhtoi_JM2rcP-FhFgiT40ivYgB3DovR2QbB6eCoTE,13153 +apprise/plugins/msteams.py,sha256=rT9VTQdzgZDwUN2kt1IzIlMirz00Sjyw68J-XGPBFrc,27533 +apprise/plugins/nextcloud.py,sha256=km6s73zggAqnsJ5klMfgxW0w0eVsnUNRsg6JN0_3d3I,20174 +apprise/plugins/nextcloudtalk.py,sha256=f18YWEhNCkjOz4XjFeX3oeg-JR7u-mItKfXm4YXv3yE,11667 +apprise/plugins/notica.py,sha256=dvQlQx2QBvpzvIcDRVHReP36EiuXAlXgQOC8hpUlOyc,13541 +apprise/plugins/notifiarr.py,sha256=UscluMX7WJXNx74ANH013Ob83RMBIfPTtGmeZwWdRPI,15829 +apprise/plugins/notificationapi.py,sha256=vjWeqrmq44QejcyTFiRVcNV0qVqrkMlMUyv62N0FY7A,33843 +apprise/plugins/notifico.py,sha256=KPE7AIN16INkoFxCxw83nMTzkTjqG-jYJpcHUZMpz00,12619 +apprise/plugins/ntfy.py,sha256=nbT-XHmF8PGYqDMxdhjFaB_Nf2B3JXZEdy32eaH9P0w,32547 +apprise/plugins/office365.py,sha256=XA4V3zlqgVbXVP-wJgVtALGlOkrxKhBOx7uMwTFpD7Y,38391 +apprise/plugins/one_signal.py,sha256=xcb_r2zQ0K4LepBWqZ5n8aSasM1okglJ3pDTaecVaXY,23941 +apprise/plugins/opsgenie.py,sha256=2g3R7EpIUuaEyKei6DvJ4Nd8hhWfyzKPGQIRKcFsKxQ,29465 +apprise/plugins/pagerduty.py,sha256=T-QaKTGI_Hw8Yv59L5m_Iyl_IimF0p648YoxwckhfQA,18843 +apprise/plugins/pagertree.py,sha256=bPdX37D43PnfpjCGeSWRGCvUx4gAeGU2o9SP_MUBnl0,14225 +apprise/plugins/parseplatform.py,sha256=INnv_RTfQeYqOi9K2Pymma8VnqbfbvLajkthhQobX_s,11087 +apprise/plugins/plivo.py,sha256=MKp3wj1SO3BQrJMXUVgVQtmIkbn4zjVLS9jXP4zzv0k,14124 +apprise/plugins/popcorn_notify.py,sha256=DgAt0sA9P4qIEQUQKQ1prz7DyOyZF4_2joNfXDBkaCM,11091 +apprise/plugins/prowl.py,sha256=iSZoy6VHjLnk7zaOIBJznMtd0coE3GKl1CMxE13kiV0,10242 +apprise/plugins/pushbullet.py,sha256=PBc6RFPbi8Ohv8OrZUZRam5rvj8-7fJ00_twpjDXUrA,16236 +apprise/plugins/pushdeer.py,sha256=Ap3R2IVjfcyVzSgpLC5tYym0U4SNoalTtbOgs16ymL4,7246 +apprise/plugins/pushed.py,sha256=PsTUAIQLjaS3vHhYmnLZ47P186_dSeCKbN7JZ2NRLjU,12638 +apprise/plugins/pushjet.py,sha256=tSD5LymHK8EuXrWixYBC3e5idz3BRX2yzsYprOsgY2U,9591 +apprise/plugins/pushme.py,sha256=hfnK-9DQzgrCcHtDxuYxjI7N1_ZgtQpZWnUEkjEaI_E,7415 +apprise/plugins/pushover.py,sha256=Mk7xaT7lvDjrhtMu2YWKLa0SkFb2ywZMIWSRH9BQYHI,22103 +apprise/plugins/pushplus.py,sha256=iHASnXcQ1xdrf8Opz0NA0DJxsOSlVIrUn6ugnlputtY,5683 +apprise/plugins/pushsafer.py,sha256=Q9HKYtpqCajuhbljiZzZ01eho9_YU0vKUelETUWwOzE,28301 +apprise/plugins/pushy.py,sha256=jFPChiF9U1mHVPYsrbicdWSgGR3kPrMgQGIJAf7jQgo,12837 +apprise/plugins/qq.py,sha256=RcUEcwgjrX-dNjfB4k7XHWsiC1HuKUJiMcwuXN32_pM,5655 +apprise/plugins/reddit.py,sha256=yIFnLAWIzbnb7UTx8mio2j9WzJaw5zN54nZNx34AcFo,26183 +apprise/plugins/resend.py,sha256=HnV4hUizz7XFIReLHtYQsRtHV1G6BccI-gk9X4nLqBI,20362 +apprise/plugins/revolt.py,sha256=HEagDzGVrYIlqJXc185aq4ATXSCnfmWUpk-bb-hOBL0,14900 +apprise/plugins/rocketchat.py,sha256=Qcpbd06yo548RlDZnJNSpaeb4J728Ox04BDuiaimWts,26677 +apprise/plugins/rsyslog.py,sha256=lZRnTglk-dM87dRd1hMrnLBYvCWwYWMEiF3zrAPUB7M,12516 +apprise/plugins/ryver.py,sha256=Ns-fy6DOboVycrCGKigx0kSDzSawPRTCxx5YxP1TZoA,12249 +apprise/plugins/sendgrid.py,sha256=9XD98TDubjQOfFppOrVEV3iGYbwYWakG4ZOUPNu2JVw,19228 +apprise/plugins/sendpulse.py,sha256=rMNs1cFvErzALD7lw6p_PyjzxLvO7HRbcTUxfgT7YHA,27428 +apprise/plugins/serverchan.py,sha256=VWtgW8AgIPHqBtJu_Uhpjd8ZSaPCkkFlobZP5ZLLDkw,6026 +apprise/plugins/ses.py,sha256=Hg80nMOVCvXi5OW6rfUp7sjLMe7gWoykxeIo6WYThaM,34848 +apprise/plugins/seven.py,sha256=kfG2Xw55vTaDoOSsAe2SESESThAVydwULFZ5m5blDSM,12451 +apprise/plugins/sfr.py,sha256=UTNdwvYNZ_CojLC0qNwZBT9F1GZQsYICD1Z0LK7QVzw,15185 +apprise/plugins/signal_api.py,sha256=9kqu2_vTX_AViNhn2FcHixg4nNSuFeQrA8FOvGmgRHc,18656 +apprise/plugins/signl4.py,sha256=f7xYu9bseoT3iufQvD7GmhTPRMMTLdOWsfXfXPQ6u3w,11260 +apprise/plugins/simplepush.py,sha256=9j0VqAQ2v8tv6--sZS_2On5CNMK_UBLJRlHDwx6zi0w,12137 +apprise/plugins/sinch.py,sha256=Fxd4VLgDHlGYECWIVmP_bXIQmllEGQ6aJw_0OgJHWew,17456 +apprise/plugins/slack.py,sha256=tQF2konKYyGgp7YQBDI3nhCwV4PpQ-jS1mjUTKmJaIs,48562 +apprise/plugins/smpp.py,sha256=EbWMFnt3ACWAtHzxjZLWSmrQ5kJbUL23LpfON6mvOu0,11759 +apprise/plugins/smseagle.py,sha256=XKCzj6tnxNBy7J5sEeQxNItL4xz1yA-2ShqvVu20fZk,26241 +apprise/plugins/smsmanager.py,sha256=QycgH5UqHvEAoMXMrMJJunHIsALDjYIr6o8df6kj574,14706 +apprise/plugins/smtp2go.py,sha256=IBIy8lQPgIgVioeSDVbITGCq87QjCAYFti1M05j6d78,20956 +apprise/plugins/sns.py,sha256=KxGre-BFLkLdMkJHYiGkGRovpyan8sMSPH13_TeuS2I,24519 +apprise/plugins/sparkpost.py,sha256=qKYuaoCaOJ59T6wnEa3-bGU-ihpnSg8adKulDuYoNpQ,28205 +apprise/plugins/spike.py,sha256=PKSYQvjNCKM9VN5VSa-BCIR5WVyinHoKL2rCVBI3Jc4,5963 +apprise/plugins/splunk.py,sha256=HeBg6FEM1xbuXHNv2XSKkZ74rpaFcUVzTeUegC5xGHI,17182 +apprise/plugins/spugpush.py,sha256=q4pd7G7TBQ2hYd_3qZ5bAdR--nufENbXdtjEpkVK4oc,5616 +apprise/plugins/streamlabs.py,sha256=ys7jnREK9aFowY3rKjz2K_0ufaz2szZczHUCXcZSr7k,16622 +apprise/plugins/synology.py,sha256=2bgOCWMP4i7uUn8Hpxz85e_MsIESS1I23KHLJ45N0ek,11857 +apprise/plugins/syslog.py,sha256=0-E8qJeiKJCElWKcsRQkqFW74yUmF7mT699sFabNHTc,10799 +apprise/plugins/techuluspush.py,sha256=ahi_4sBY3VSECCdPzhDL1snuN0oCEqVMxi5OvxvXbAE,7412 +apprise/plugins/telegram.py,sha256=KBrQXPgYJdbJ--hEiZ0RfN0JbGrXmZ98tCOIDjgdhA4,40886 +apprise/plugins/threema.py,sha256=jjIARxit74jX_WQmom1BPj8-vi5AkK7xM9vNAFssOnA,12287 +apprise/plugins/twilio.py,sha256=wRGstpOOQ_jnpCHfjKm-R5SwUIzQZYwmay8H0srDa9I,21752 +apprise/plugins/twist.py,sha256=RM75TRbioN9PPGvkKY5031IB1uQBTVaTMgyEmjB3mZM,26944 +apprise/plugins/twitter.py,sha256=AApwLbHfvePJinHMToovp_ESTJ0lW8fNcu0VnVaTFqs,31870 +apprise/plugins/vapid/__init__.py,sha256=tgxaZH-QDXSwTJa5GPw9cjW3mc0X1DG0VB_z2Llacd4,20545 +apprise/plugins/vapid/subscription.py,sha256=v6IWFIyifxTHCs3Y-lNuEcFUOeZ8Q_sXuPWZhDeuscI,13139 +apprise/plugins/viber.py,sha256=m6nDkOwkEVfPEDaO7tQdKlv5OT8Zpnv1LzRYGBPUBRE,11511 +apprise/plugins/voipms.py,sha256=U1cRyImG0xQnYEOifKKRzNMFjlEpiy8bjNYKutUUwow,13335 +apprise/plugins/vonage.py,sha256=Nr4mZDPnPbQb5BdAcoa3KHCpQzH3WBBZuO9R0lcTiiI,13752 +apprise/plugins/webexteams.py,sha256=sEbtMh-YyxCyNNPjLpiYPpV4wCudzJOqYolBzSmH4sY,9886 +apprise/plugins/wecombot.py,sha256=G1gBM4JAzlISHmrRBuI2DSodJhPLpZSo3l5UNcwo7yw,9159 +apprise/plugins/whatsapp.py,sha256=EA9eSADSwgtZHIvEfjqg7IRUbmSLwf698RB39mQDx4A,20846 +apprise/plugins/windows.py,sha256=ozKn8c620kWufxRrgW0G4xIFKrgDP4k0PNufA4b3WFQ,9289 +apprise/plugins/workflows.py,sha256=D4wl5QGX4wRA3_2Oljvkhdx8JJMicJpHjPDf0JZP1ag,22918 +apprise/plugins/wxpusher.py,sha256=2PQCHD8BORYkYeQmWbhMSmqZIfyOdPb4PNAsxkliyIY,13119 +apprise/plugins/xbmc.py,sha256=65_OtJvuaf4Wyn1cqo4ehi2BvJmhrUr53gkqLMadQ50,13071 +apprise/plugins/xmpp/__init__.py,sha256=MHHkHXkuAccQmd54fd5jLdpFTrID7fa6XtnqKQx_SUo,1496 +apprise/plugins/xmpp/adapter.py,sha256=DkKJIJOayei96W-jKyCh0DLJFFT5xlnYpv5At-mt6UY,32095 +apprise/plugins/xmpp/base.py,sha256=8NTYZJJte3SzN_VECLkY-sbPLcqvYU3vc4fp-YA-n9U,14730 +apprise/plugins/xmpp/common.py,sha256=P4_esSvRzgUJNbJB4FNfG_Ec42kf6anPr3jWq77wSQI,2106 +apprise/plugins/zulip.py,sha256=psxDXX8luootHqiElSVtvMRFYQ8_XZLMpGS5CYe-2DA,14531 +apprise/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +apprise/url.py,sha256=NSS9PwWbj2edxPCWFnFZvc8lnlOE0dA-Yi_c4ysyqhY,35434 +apprise/utils/__init__.py,sha256=-XVQG23gk_zU6BF_ugbAan8ChfJmycmuI-gDVLexW88,1406 +apprise/utils/base64.py,sha256=oRzRs78Di2Gh8nOPVw9kDXkgtPb3O74jfmQ_O4lGfXQ,3771 +apprise/utils/cwe312.py,sha256=6PhiMZqvcWhO9k8cPF0AYo9kBbmX5WEP9Qx4HHE2vTg,7767 +apprise/utils/disk.py,sha256=hKKGA6KMIihqsk9PW4syCtbbOAbCwlraV8Zmub7SJRM,5659 +apprise/utils/format.py,sha256=FxLD6yBcR1PeKAT_a1STo4r5oNv1hUb0DYQGWpZbg0k,6916 +apprise/utils/logic.py,sha256=feRCj1ttRuZADg5_vi9ITcofkV9QuxWOP7BB3mCuo5g,4578 +apprise/utils/module.py,sha256=DYUvpbmcHc0GimpBIwqwDatPzNYVHdKl_r9nCT2KDcQ,2054 +apprise/utils/parse.py,sha256=QWsNdJ39GyHp4TXYZiUk-tKMqsYF2RqLLoRqiSQPhPA,39926 +apprise/utils/pem.py,sha256=FfBjI-I_gjI9ON-K2lId-3gCQrU7L0Cx62USmD8rdOk,28025 +apprise/utils/pgp.py,sha256=qlpYOvyFoZ75ReLDJNqguBeT3K2rrVZPGHBnHUnfhjY,11607 +apprise/utils/sanitize.py,sha256=2SoNrdlX-ZAFQryxgGQixIUWrWO-_lwI3M7ZTYMsJ8I,9532 +apprise/utils/singleton.py,sha256=OUSF4C5yzIQm8wNEkXtSw_pTEKVueXfgaLdOe-fiOe4,1795 +apprise/utils/socket.py,sha256=QHjWbqd4IV2wcTl5a3GZeD8EIkCsA2Wq-EBjPpmrI0E,21903 +apprise/utils/templates.py,sha256=6t1dJmx2l3pqiJEFZLFZWHkpPz2WS92mjRCEG4HSMGA,3245 +apprise/utils/time.py,sha256=W3_y338hpaArJqNr2UIHGjPinEOi2oeBiF2hR3lvF9M,2877 diff --git a/libs/Mako-1.3.8.dist-info/REQUESTED b/libs/apprise-1.9.8.dist-info/REQUESTED similarity index 100% rename from libs/Mako-1.3.8.dist-info/REQUESTED rename to libs/apprise-1.9.8.dist-info/REQUESTED diff --git a/libs/apprise-1.9.8.dist-info/WHEEL b/libs/apprise-1.9.8.dist-info/WHEEL new file mode 100644 index 0000000000..0885d05555 --- /dev/null +++ b/libs/apprise-1.9.8.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (80.10.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/libs/apprise-1.9.2.dist-info/entry_points.txt b/libs/apprise-1.9.8.dist-info/entry_points.txt similarity index 100% rename from libs/apprise-1.9.2.dist-info/entry_points.txt rename to libs/apprise-1.9.8.dist-info/entry_points.txt diff --git a/libs/apprise-1.9.8.dist-info/licenses/LICENSE b/libs/apprise-1.9.8.dist-info/licenses/LICENSE new file mode 100644 index 0000000000..8ff150bc08 --- /dev/null +++ b/libs/apprise-1.9.8.dist-info/licenses/LICENSE @@ -0,0 +1,25 @@ +BSD 2-Clause License + +Copyright (c) 2026, Chris Caron +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/libs/apprise-1.9.2.dist-info/top_level.txt b/libs/apprise-1.9.8.dist-info/top_level.txt similarity index 100% rename from libs/apprise-1.9.2.dist-info/top_level.txt rename to libs/apprise-1.9.8.dist-info/top_level.txt diff --git a/libs/apprise/__init__.py b/libs/apprise/__init__.py index e9f496539a..1e17ea8c9e 100644 --- a/libs/apprise/__init__.py +++ b/libs/apprise/__init__.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,81 +25,98 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -__title__ = 'Apprise' -__version__ = '1.9.2' -__author__ = 'Chris Caron' -__license__ = 'BSD 2-Clause' -__copywrite__ = 'Copyright (C) 2025 Chris Caron ' -__email__ = 'lead2gold@gmail.com' -__status__ = 'Production' - -from .common import NotifyType -from .common import NOTIFY_TYPES -from .common import NotifyImageSize -from .common import NOTIFY_IMAGE_SIZES -from .common import NotifyFormat -from .common import NOTIFY_FORMATS -from .common import OverflowMode -from .common import OVERFLOW_MODES -from .common import ConfigFormat -from .common import CONFIG_FORMATS -from .common import ContentIncludeMode -from .common import CONTENT_INCLUDE_MODES -from .common import ContentLocation -from .common import CONTENT_LOCATIONS -from .common import PersistentStoreMode -from .common import PERSISTENT_STORE_MODES - -from .url import URLBase -from .url import PrivacyMode -from .plugins.base import NotifyBase -from .config.base import ConfigBase -from .attachment.base import AttachBase -from . import exception +__title__ = "Apprise" +__description__: str = \ + "Push Notifications that work with just about every platform!" +__version__ = "1.9.8" +__author__ = "Chris Caron" +__email__ = "lead2gold@gmail.com" +__license__ = "BSD 2-Clause" +__copyright__ = "Copyright (c) 2026, Chris Caron " +__status__ = "Production" +from . import decorators, exception from .apprise import Apprise -from .locale import AppriseLocale -from .asset import AppriseAsset -from .persistent_store import PersistentStore -from .apprise_config import AppriseConfig from .apprise_attachment import AppriseAttachment +from .apprise_config import AppriseConfig +from .asset import AppriseAsset +from .attachment.base import AttachBase +from .common import ( + CONFIG_FORMATS, + CONTENT_INCLUDE_MODES, + CONTENT_LOCATIONS, + NOTIFY_FORMATS, + NOTIFY_IMAGE_SIZES, + NOTIFY_TYPES, + OVERFLOW_MODES, + PERSISTENT_STORE_MODES, + PERSISTENT_STORE_STATES, + ConfigFormat, + ContentIncludeMode, + ContentLocation, + NotifyFormat, + NotifyImageSize, + NotifyType, + OverflowMode, + PersistentStoreMode, +) +from .config.base import ConfigBase +from .locale import AppriseLocale + +# Inherit our logging with our additional entries added to it +from .logger import LOGGER_NAME, LogCapture, logger, logging from .manager_attachment import AttachmentManager from .manager_config import ConfigurationManager from .manager_plugins import NotificationManager -from . import decorators - -# Inherit our logging with our additional entries added to it -from .logger import logging -from .logger import logger -from .logger import LogCapture +from .persistent_store import PersistentStore +from .plugins.base import NotifyBase +from .url import PrivacyMode, URLBase # Set default logging handler to avoid "No handler found" warnings. logging.getLogger(__name__).addHandler(logging.NullHandler()) __all__ = [ + "CONFIG_FORMATS", + "CONTENT_INCLUDE_MODES", + "CONTENT_LOCATIONS", + "LOGGER_NAME", + "NOTIFY_FORMATS", + "NOTIFY_IMAGE_SIZES", + "NOTIFY_TYPES", + "OVERFLOW_MODES", + "PERSISTENT_STORE_MODES", + "PERSISTENT_STORE_STATES", # Core - 'Apprise', 'AppriseAsset', 'AppriseConfig', 'AppriseAttachment', 'URLBase', - 'NotifyBase', 'ConfigBase', 'AttachBase', 'AppriseLocale', - 'PersistentStore', - - # Exceptions - 'exception', - - # Reference - 'NotifyType', 'NotifyImageSize', 'NotifyFormat', 'OverflowMode', - 'NOTIFY_TYPES', 'NOTIFY_IMAGE_SIZES', 'NOTIFY_FORMATS', 'OVERFLOW_MODES', - 'ConfigFormat', 'CONFIG_FORMATS', - 'ContentIncludeMode', 'CONTENT_INCLUDE_MODES', - 'ContentLocation', 'CONTENT_LOCATIONS', - 'PersistentStoreMode', 'PERSISTENT_STORE_MODES', - 'PrivacyMode', - + "Apprise", + "AppriseAsset", + "AppriseAttachment", + "AppriseConfig", + "AppriseLocale", + "AttachBase", + "AttachmentManager", + "ConfigBase", + "ConfigFormat", + "ConfigurationManager", + "ContentIncludeMode", + "ContentLocation", + "LogCapture", # Managers - 'NotificationManager', 'ConfigurationManager', 'AttachmentManager', - + "NotificationManager", + "NotifyBase", + "NotifyFormat", + "NotifyImageSize", + # Reference + "NotifyType", + "OverflowMode", + "PersistentStore", + "PersistentStoreMode", + "PrivacyMode", + "URLBase", # Decorator - 'decorators', - + "decorators", + # Exceptions + "exception", # Logging - 'logging', 'logger', 'LogCapture', + "logger", + "logging", ] diff --git a/libs/apprise/apprise.py b/libs/apprise/apprise.py index 9af55b5faf..924b47bb9d 100644 --- a/libs/apprise/apprise.py +++ b/libs/apprise/apprise.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,59 +25,74 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +from __future__ import annotations + import asyncio +from collections.abc import Iterator import concurrent.futures as cf -import os from itertools import chain -from . import common +import os +from typing import Any, Optional, Union + +from . import __version__, common, plugins +from .apprise_attachment import AppriseAttachment +from .apprise_config import AppriseConfig +from .asset import AppriseAsset +from .common import ContentLocation +from .config.base import ConfigBase from .conversion import convert_between -from .utils.logic import is_exclusive_match -from .utils.parse import parse_list, parse_urls -from .utils.cwe312 import cwe312_url -from .manager_plugins import NotificationManager from .emojis import apply_emojis -from .logger import logger -from .asset import AppriseAsset -from .apprise_config import AppriseConfig -from .apprise_attachment import AppriseAttachment from .locale import AppriseLocale -from .config.base import ConfigBase +from .logger import logger +from .manager_plugins import NotificationManager from .plugins.base import NotifyBase - -from . import plugins -from . import __version__ +from .utils.cwe312 import cwe312_url +from .utils.logic import is_exclusive_match +from .utils.parse import parse_list, parse_urls # Grant access to our Notification Manager Singleton N_MGR = NotificationManager() class Apprise: - """ - Our Notification Manager - - """ - - def __init__(self, servers=None, asset=None, location=None, debug=False): - """ - Loads a set of server urls while applying the Asset() module to each + """Our Notification Manager.""" + + def __init__( + self, + servers: Optional[ + Union[ + str, + dict, + NotifyBase, + AppriseConfig, + ConfigBase, + list[Union[str, dict, NotifyBase, AppriseConfig, ConfigBase]], + ] + ] = None, + asset: Optional[AppriseAsset] = None, + location: Optional[ContentLocation] = None, + debug: bool = False, + ) -> None: + """Loads a set of server urls while applying the Asset() module to each if specified. If no asset is provided, then the default asset is used. - Optionally specify a global ContentLocation for a more strict means - of handling Attachments. + Optionally specify a global ContentLocation for a more strict means of + handling Attachments. """ # Initialize a server list of URLs - self.servers = list() + self.servers = [] # Assigns an central asset object that will be later passed into each # notification plugin. Assets contain information such as the local # directory images can be found in. It can also identify remote # URL paths that contain the images you want to present to the end # user. If no asset is specified, then the default one is used. - self.asset = \ + self.asset = ( asset if isinstance(asset, AppriseAsset) else AppriseAsset() + ) if servers: self.add(servers) @@ -95,9 +109,13 @@ def __init__(self, servers=None, asset=None, location=None, debug=False): self.location = location @staticmethod - def instantiate(url, asset=None, tag=None, suppress_exceptions=True): - """ - Returns the instance of a instantiated plugin based on the provided + def instantiate( + url: Union[str, dict], + asset: Optional[AppriseAsset] = None, + tag: Optional[Union[str, list[str]]] = None, + suppress_exceptions: bool = True, + ) -> Optional[NotifyBase]: + """Returns the instance of a instantiated plugin based on the provided Server URL. If the url fails to be parsed, then None is returned. The specified url can be either a string (the URL itself) or a @@ -129,7 +147,8 @@ def instantiate(url, asset=None, tag=None, suppress_exceptions=True): if isinstance(url, str): # Acquire our url tokens results = plugins.url_to_dict( - url, secure_logging=asset.secure_logging) + url, secure_logging=asset.secure_logging + ) if results is None: # Failed to parse the server URL; detailed logging handled @@ -140,73 +159,86 @@ def instantiate(url, asset=None, tag=None, suppress_exceptions=True): # We already have our result set results = url - if results.get('schema') not in N_MGR: + if results.get("schema") not in N_MGR: # schema is a mandatory dictionary item as it is the only way # we can index into our loaded plugins logger.error('Dictionary does not include a "schema" entry.') logger.trace( - 'Invalid dictionary unpacked as:{}{}'.format( - os.linesep, os.linesep.join( - ['{}="{}"'.format(k, v) - for k, v in results.items()]))) + "Invalid dictionary unpacked as:{}{}".format( + os.linesep, + os.linesep.join( + [f'{k}="{v}"' for k, v in results.items()] + ), + ) + ) return None logger.trace( - 'Dictionary unpacked as:{}{}'.format( - os.linesep, os.linesep.join( - ['{}="{}"'.format(k, v) for k, v in results.items()]))) + "Dictionary unpacked as:{}{}".format( + os.linesep, + os.linesep.join( + [f'{k}="{v}"' for k, v in results.items()] + ), + ) + ) # Otherwise we handle the invalid input specified else: logger.error( - 'An invalid URL type (%s) was specified for instantiation', - type(url)) + "An invalid URL type (%s) was specified for instantiation", + type(url), + ) return None - if not N_MGR[results['schema']].enabled: + if not N_MGR[results["schema"]].enabled: # # First Plugin Enable Check (Pre Initialization) # # Plugin has been disabled at a global level logger.error( - '%s:// is disabled on this system.', results['schema']) + "%s:// is disabled on this system.", results["schema"] + ) return None # Build a list of tags to associate with the newly added notifications - results['tag'] = set(parse_list(tag)) + results["tag"] = set(parse_list(tag)) # Set our Asset Object - results['asset'] = asset + results["asset"] = asset if suppress_exceptions: try: # Attempt to create an instance of our plugin using the parsed # URL information - plugin = N_MGR[results['schema']](**results) + plugin = N_MGR[results["schema"]](**results) # Create log entry of loaded URL logger.debug( - 'Loaded {} URL: {}'.format( - N_MGR[results['schema']].service_name, - plugin.url(privacy=asset.secure_logging))) + "Loaded {} URL: {}".format( + N_MGR[results["schema"]].service_name, + plugin.url(privacy=asset.secure_logging), + ) + ) except Exception: # CWE-312 (Secure Logging) Handling - loggable_url = url if not asset.secure_logging \ - else cwe312_url(url) + loggable_url = ( + url if not asset.secure_logging else cwe312_url(url) + ) # the arguments are invalid or can not be used. logger.error( - 'Could not load {} URL: {}'.format( - N_MGR[results['schema']].service_name, - loggable_url)) + "Could not load {} URL: {}".format( + N_MGR[results["schema"]].service_name, loggable_url + ) + ) return None else: # Attempt to create an instance of our plugin using the parsed # URL information but don't wrap it in a try catch - plugin = N_MGR[results['schema']](**results) + plugin = N_MGR[results["schema"]](**results) if not plugin.enabled: # @@ -223,14 +255,26 @@ def instantiate(url, asset=None, tag=None, suppress_exceptions=True): # are polling what is available. These services that become # disabled thereafter are shown initially that they can be used. logger.error( - '%s:// has become disabled on this system.', results['schema']) + "%s:// has become disabled on this system.", results["schema"] + ) return None return plugin - def add(self, servers, asset=None, tag=None): - """ - Adds one or more server URLs into our list. + def add( + self, + servers: Union[ + str, + dict, + NotifyBase, + AppriseConfig, + ConfigBase, + list[Union[str, dict, NotifyBase, AppriseConfig, ConfigBase]], + ], + asset: Optional[AppriseAsset] = None, + tag: Optional[Union[str, list[str]]] = None, + ) -> bool: + """Adds one or more server URLs into our list. You can override the global asset if you wish by including it with the server(s) that you add. @@ -264,27 +308,29 @@ def add(self, servers, asset=None, tag=None): elif not isinstance(servers, (tuple, set, list)): logger.error( - "An invalid notification (type={}) was specified.".format( - type(servers))) + f"An invalid notification (type={type(servers)}) was" + " specified." + ) return False - for _server in servers: + for server in servers: - if isinstance(_server, (ConfigBase, NotifyBase, AppriseConfig)): + if isinstance(server, (ConfigBase, NotifyBase, AppriseConfig)): # Go ahead and just add our plugin into our list - self.servers.append(_server) + self.servers.append(server) continue - elif not isinstance(_server, (str, dict)): + elif not isinstance(server, (str, dict)): logger.error( - "An invalid notification (type={}) was specified.".format( - type(_server))) + f"An invalid notification (type={type(server)}) was" + " specified." + ) return_status = False continue # Instantiate ourselves an object, this function throws or # returns None if it fails - instance = Apprise.instantiate(_server, asset=asset, tag=tag) + instance = Apprise.instantiate(server, asset=asset, tag=tag) if not isinstance(instance, NotifyBase): # No logging is required as instantiate() handles failure # and/or success reasons for us @@ -297,18 +343,16 @@ def add(self, servers, asset=None, tag=None): # Return our status return return_status - def clear(self): - """ - Empties our server list - - """ + def clear(self) -> None: + """Empties our server list.""" self.servers[:] = [] - def find(self, tag=common.MATCH_ALL_TAG, match_always=True): - """ - Returns a list of all servers matching against the tag specified. - - """ + def find( + self, + tag: Any = common.MATCH_ALL_TAG, + match_always: bool = True, + ) -> Iterator[NotifyBase]: + """Returns a list of all servers matching against the tag specified.""" # Build our tag setup # - top level entries are treated as an 'or' @@ -332,51 +376,66 @@ def find(self, tag=common.MATCH_ALL_TAG, match_always=True): servers = entry.servers() else: - servers = [entry, ] + servers = [ + entry, + ] for server in servers: # Apply our tag matching based on our defined logic if is_exclusive_match( - logic=tag, data=server.tags, - match_all=common.MATCH_ALL_TAG, - match_always=match_always): + logic=tag, + data=server.tags, + match_all=common.MATCH_ALL_TAG, + match_always=match_always, + ): yield server return - def notify(self, body, title='', notify_type=common.NotifyType.INFO, - body_format=None, tag=common.MATCH_ALL_TAG, match_always=True, - attach=None, interpret_escapes=None): - """ - Send a notification to all the plugins previously loaded. - - If the body_format specified is NotifyFormat.MARKDOWN, it will - be converted to HTML if the Notification type expects this. - - if the tag is specified (either a string or a set/list/tuple - of strings), then only the notifications flagged with that - tagged value are notified. By default, all added services - are notified (tag=MATCH_ALL_TAG) - - This function returns True if all notifications were successfully - sent, False if even just one of them fails, and None if no - notifications were sent at all as a result of tag filtering and/or - simply having empty configuration files that were read. + def notify( + self, + body: Union[str, bytes], + title: Union[str, bytes] = "", + notify_type: Union[str, common.NotifyType] = common.NotifyType.INFO, + body_format: Optional[str] = None, + tag: Any = common.MATCH_ALL_TAG, + match_always: bool = True, + attach: Any = None, + interpret_escapes: Optional[bool] = None, + ) -> Optional[bool]: + """Send a notification to all the plugins previously loaded. + + If the body_format specified is NotifyFormat.MARKDOWN, it will be + converted to HTML if the Notification type expects this. + + if the tag is specified (either a string or a set/list/tuple of + strings), then only the notifications flagged with that tagged value + are notified. By default, all added services are notified + (tag=MATCH_ALL_TAG) + + This function returns True if all notifications were successfully sent, + False if even just one of them fails, and None if no notifications were + sent at all as a result of tag filtering and/or simply having empty + configuration files that were read. Attach can contain a list of attachment URLs. attach can also be - represented by an AttachBase() (or list of) object(s). This - identifies the products you wish to notify + represented by an AttachBase() (or list of) object(s). This identifies + the products you wish to notify - Set interpret_escapes to True if you want to pre-escape a string - such as turning a \n into an actual new line, etc. + Set interpret_escapes to True if you want to pre-escape a string such + as turning a \n into an actual new line, etc. """ try: # Process arguments and build synchronous and asynchronous calls # (this step can throw internal errors). sequential_calls, parallel_calls = self._create_notify_calls( - body, title, - notify_type=notify_type, body_format=body_format, - tag=tag, match_always=match_always, attach=attach, + body, + title, + notify_type=notify_type, + body_format=body_format, + tag=tag, + match_always=match_always, + attach=attach, interpret_escapes=interpret_escapes, ) @@ -392,19 +451,22 @@ def notify(self, body, title='', notify_type=common.NotifyType.INFO, parallel_result = Apprise._notify_parallel_threadpool(*parallel_calls) return sequential_result and parallel_result - async def async_notify(self, *args, **kwargs): - """ - Send a notification to all the plugins previously loaded, for + async def async_notify( + self, + *args: Any, + **kwargs: Any + ) -> Optional[bool]: + """Send a notification to all the plugins previously loaded, for asynchronous callers. The arguments are identical to those of Apprise.notify(). - """ try: # Process arguments and build synchronous and asynchronous calls # (this step can throw internal errors). sequential_calls, parallel_calls = self._create_notify_calls( - *args, **kwargs) + *args, **kwargs + ) except TypeError: # No notifications sent, and there was an internal error. @@ -415,13 +477,13 @@ async def async_notify(self, *args, **kwargs): return None sequential_result = Apprise._notify_sequential(*sequential_calls) - parallel_result = \ - await Apprise._notify_parallel_asyncio(*parallel_calls) + parallel_result = await Apprise._notify_parallel_asyncio( + *parallel_calls + ) return sequential_result and parallel_result def _create_notify_calls(self, *args, **kwargs): - """ - Creates notifications for all the plugins loaded. + """Creates notifications for all the plugins loaded. Returns a list of (server, notify() kwargs) tuples for plugins with parallelism disabled and another list for plugins with parallelism @@ -432,7 +494,7 @@ def _create_notify_calls(self, *args, **kwargs): # Split into sequential and parallel notify() calls. sequential, parallel = [], [] - for (server, notify_kwargs) in all_calls: + for server, notify_kwargs in all_calls: if server.asset.async_mode: parallel.append((server, notify_kwargs)) else: @@ -440,14 +502,18 @@ def _create_notify_calls(self, *args, **kwargs): return sequential, parallel - def _create_notify_gen(self, body, title='', - notify_type=common.NotifyType.INFO, - body_format=None, tag=common.MATCH_ALL_TAG, - match_always=True, attach=None, - interpret_escapes=None): - """ - Internal generator function for _create_notify_calls(). - """ + def _create_notify_gen( + self, + body, + title="", + notify_type=common.NotifyType.INFO, + body_format=None, + tag=common.MATCH_ALL_TAG, + match_always=True, + attach=None, + interpret_escapes=None, + ): + """Internal generator function for _create_notify_calls().""" if len(self) == 0: # Nothing to notify @@ -460,6 +526,18 @@ def _create_notify_gen(self, body, title='', logger.error(msg) raise TypeError(msg) + try: + notify_type = ( + notify_type if isinstance(notify_type, common.NotifyType) + else common.NotifyType(notify_type.lower()) + ) + + except (AttributeError, ValueError, TypeError): + err = ( + f"An invalid notification type ({notify_type}) was " + "specified.") + raise TypeError(err) from None + try: if title and isinstance(title, bytes): title = title.decode(self.asset.encoding) @@ -468,27 +546,34 @@ def _create_notify_gen(self, body, title='', body = body.decode(self.asset.encoding) except UnicodeDecodeError: - msg = 'The content passed into Apprise was not of encoding ' \ - 'type: {}'.format(self.asset.encoding) + msg = ( + "The content passed into Apprise was not of encoding " + f"type: {self.asset.encoding}" + ) logger.error(msg) - raise TypeError(msg) + raise TypeError(msg) from None # Tracks conversions - conversion_body_map = dict() - conversion_title_map = dict() + conversion_body_map = {} + conversion_title_map = {} # Prepare attachments if required if attach is not None and not isinstance(attach, AppriseAttachment): attach = AppriseAttachment( - attach, asset=self.asset, location=self.location) + attach, asset=self.asset, location=self.location + ) # Allow Asset default value - body_format = self.asset.body_format \ - if body_format is None else body_format + body_format = ( + self.asset.body_format if body_format is None else body_format + ) # Allow Asset default value - interpret_escapes = self.asset.interpret_escapes \ - if interpret_escapes is None else interpret_escapes + interpret_escapes = ( + self.asset.interpret_escapes + if interpret_escapes is None + else interpret_escapes + ) # Iterate over our loaded plugins for server in self.find(tag, match_always=match_always): @@ -499,8 +584,11 @@ def _create_notify_gen(self, body, title='', # First we need to generate a key we will use to determine if we # need to build our data out. Entries without are merged with # the body at this stage. - key = server.notify_format if server.title_maxlen > 0\ - else f'_{server.notify_format}' + key = ( + server.notify_format + if server.title_maxlen > 0 + else f"_{server.notify_format}" + ) if server.interpret_emojis: # alter our key slightly to handle emojis since their value is @@ -510,19 +598,21 @@ def _create_notify_gen(self, body, title='', if key not in conversion_title_map: # Prepare our title - conversion_title_map[key] = '' if not title else title + conversion_title_map[key] = title if title else "" # Conversion of title only occurs for services where the title # is blended with the body (title_maxlen <= 0) if conversion_title_map[key] and server.title_maxlen <= 0: conversion_title_map[key] = convert_between( - body_format, server.notify_format, - content=conversion_title_map[key]) + body_format, + server.notify_format, + content=conversion_title_map[key], + ) # Our body is always converted no matter what - conversion_body_map[key] = \ - convert_between( - body_format, server.notify_format, content=body) + conversion_body_map[key] = convert_between( + body_format, server.notify_format, content=body + ) if interpret_escapes: # @@ -532,50 +622,52 @@ def _create_notify_gen(self, body, title='', try: # Added overhead required due to Python 3 Encoding Bug # identified here: https://bugs.python.org/issue21331 - conversion_body_map[key] = \ - conversion_body_map[key]\ - .encode('ascii', 'backslashreplace')\ - .decode('unicode-escape') - - conversion_title_map[key] = \ - conversion_title_map[key]\ - .encode('ascii', 'backslashreplace')\ - .decode('unicode-escape') + conversion_body_map[key] = ( + conversion_body_map[key] + .encode("ascii", "backslashreplace") + .decode("unicode-escape") + ) + + conversion_title_map[key] = ( + conversion_title_map[key] + .encode("ascii", "backslashreplace") + .decode("unicode-escape") + ) except AttributeError: # Must be of string type - msg = 'Failed to escape message body' + msg = "Failed to escape message body" logger.error(msg) - raise TypeError(msg) + raise TypeError(msg) from None if server.interpret_emojis: # # Convert our :emoji: definitions # - conversion_body_map[key] = \ - apply_emojis(conversion_body_map[key]) - conversion_title_map[key] = \ - apply_emojis(conversion_title_map[key]) - - kwargs = dict( - body=conversion_body_map[key], - title=conversion_title_map[key], - notify_type=notify_type, - attach=attach, - body_format=body_format - ) + conversion_body_map[key] = apply_emojis( + conversion_body_map[key] + ) + conversion_title_map[key] = apply_emojis( + conversion_title_map[key] + ) + + kwargs = { + "body": conversion_body_map[key], + "title": conversion_title_map[key], + "notify_type": notify_type, + "attach": attach, + "body_format": body_format, + } yield (server, kwargs) @staticmethod def _notify_sequential(*servers_kwargs): - """ - Process a list of notify() calls sequentially and synchronously. - """ + """Process a list of notify() calls sequentially and synchronously.""" success = True - for (server, kwargs) in servers_kwargs: + for server, kwargs in servers_kwargs: try: # Send notification result = server.notify(**kwargs) @@ -595,9 +687,7 @@ def _notify_sequential(*servers_kwargs): @staticmethod def _notify_parallel_threadpool(*servers_kwargs): - """ - Process a list of notify() calls in parallel and synchronously. - """ + """Process a list of notify() calls in parallel and synchronously.""" n_calls = len(servers_kwargs) @@ -611,12 +701,15 @@ def _notify_parallel_threadpool(*servers_kwargs): # Create log entry logger.info( - 'Notifying %d service(s) with threads.', len(servers_kwargs)) + "Notifying %d service(s) with threads.", len(servers_kwargs) + ) with cf.ThreadPoolExecutor() as executor: success = True - futures = [executor.submit(server.notify, **kwargs) - for (server, kwargs) in servers_kwargs] + futures = [ + executor.submit(server.notify, **kwargs) + for (server, kwargs) in servers_kwargs + ] for future in cf.as_completed(futures): try: @@ -637,9 +730,8 @@ def _notify_parallel_threadpool(*servers_kwargs): @staticmethod async def _notify_parallel_asyncio(*servers_kwargs): - """ - Process a list of async_notify() calls in parallel and asynchronously. - """ + """Process a list of async_notify() calls in parallel and + asynchronously.""" n_calls = len(servers_kwargs) @@ -653,7 +745,8 @@ async def _notify_parallel_asyncio(*servers_kwargs): # Create log entry logger.info( - 'Notifying %d service(s) asynchronously.', len(servers_kwargs)) + "Notifying %d service(s) asynchronously.", len(servers_kwargs) + ) async def do_call(server, kwargs): return await server.async_notify(**kwargs) @@ -661,8 +754,10 @@ async def do_call(server, kwargs): cors = (do_call(server, kwargs) for (server, kwargs) in servers_kwargs) results = await asyncio.gather(*cors, return_exceptions=True) - if any(isinstance(status, Exception) - and not isinstance(status, TypeError) for status in results): + if any( + isinstance(status, Exception) and not isinstance(status, TypeError) + for status in results + ): # A catch all so we don't have to abort early just because # one of our plugins has a bug in it. logger.exception("Unhandled Notification Exception") @@ -674,20 +769,22 @@ async def do_call(server, kwargs): return all(results) - def details(self, lang=None, show_requirements=False, show_disabled=False): - """ - Returns the details associated with the Apprise object - - """ + def details( + self, + lang: Optional[str] = None, + show_requirements: bool = False, + show_disabled: bool = False, + ) -> dict[str, Any]: + """Returns the details associated with the Apprise object.""" # general object returned response = { # Defines the current version of Apprise - 'version': __version__, + "version": __version__, # Lists all of the currently supported Notifications - 'schemas': [], + "schemas": [], # Includes the configured asset details - 'asset': self.asset.details(), + "asset": self.asset.details(), } for plugin in N_MGR.plugins(): @@ -695,79 +792,83 @@ def details(self, lang=None, show_requirements=False, show_disabled=False): # their status: content = { - 'service_name': getattr(plugin, 'service_name', None), - 'service_url': getattr(plugin, 'service_url', None), - 'setup_url': getattr(plugin, 'setup_url', None), + "service_name": getattr(plugin, "service_name", None), + "service_url": getattr(plugin, "service_url", None), + "setup_url": getattr(plugin, "setup_url", None), # Placeholder - populated below - 'details': None, - + "details": None, # Let upstream service know of the plugins that support # attachments - 'attachment_support': getattr( - plugin, 'attachment_support', False), - + "attachment_support": getattr( + plugin, "attachment_support", False + ), # Differentiat between what is a custom loaded plugin and # which is native. - 'category': getattr(plugin, 'category', None) + "category": getattr(plugin, "category", None), } # Standard protocol(s) should be None or a tuple - enabled = getattr(plugin, 'enabled', True) + enabled = getattr(plugin, "enabled", True) if not show_disabled and not enabled: # Do not show inactive plugins continue elif show_disabled: # Add current state to response - content['enabled'] = enabled + content["enabled"] = enabled # Standard protocol(s) should be None or a tuple - protocols = getattr(plugin, 'protocol', None) + protocols = getattr(plugin, "protocol", None) if isinstance(protocols, str): - protocols = (protocols, ) + protocols = (protocols,) # Secure protocol(s) should be None or a tuple - secure_protocols = getattr(plugin, 'secure_protocol', None) + secure_protocols = getattr(plugin, "secure_protocol", None) if isinstance(secure_protocols, str): - secure_protocols = (secure_protocols, ) + secure_protocols = (secure_protocols,) # Add our protocol details to our content content.update({ - 'protocols': protocols, - 'secure_protocols': secure_protocols, + "protocols": protocols, + "secure_protocols": secure_protocols, }) if not lang: # Simply return our results - content['details'] = plugins.details(plugin) + content["details"] = plugins.details(plugin) if show_requirements: - content['requirements'] = plugins.requirements(plugin) + content["requirements"] = plugins.requirements(plugin) else: # Emulate the specified language when returning our results with self.locale.lang_at(lang): - content['details'] = plugins.details(plugin) + content["details"] = plugins.details(plugin) if show_requirements: - content['requirements'] = plugins.requirements(plugin) + content["requirements"] = plugins.requirements(plugin) # Build our response object - response['schemas'].append(content) + response["schemas"].append(content) return response - def urls(self, privacy=False): - """ - Returns all of the loaded URLs defined in this apprise object. - """ - return [x.url(privacy=privacy) for x in self.servers] + def urls(self, privacy: bool = False) -> list[str]: + """Returns all of the loaded URLs defined in this apprise object.""" + urls = [] + for s in self.servers: + if isinstance(s, (ConfigBase, AppriseConfig)): + for s_ in s.servers(): + urls.append(s_.url(privacy=privacy)) + else: + urls.append(s.url(privacy=privacy)) + return urls - def pop(self, index): - """ - Removes an indexed Notification Service from the stack and returns it. + def pop(self, index: int) -> NotifyBase: + """Removes an indexed Notification Service from the stack and returns + it. The thing is we can never pop AppriseConfig() entries, only what was - loaded within them. So pop needs to carefully iterate over our list - and only track actual entries. + loaded within them. So pop needs to carefully iterate over our list and + only track actual entries. """ # Tracking variables @@ -783,11 +884,17 @@ def pop(self, index): if offset >= index: # we can pop an element from our config stack - fn = s.pop if isinstance(s, ConfigBase) \ + fn = ( + s.pop + if isinstance(s, ConfigBase) else s.server_pop + ) - return fn(index if prev_offset == -1 - else (index - prev_offset - 1)) + return fn( + index + if prev_offset == -1 + else (index - prev_offset - 1) + ) else: offset = prev_offset + 1 @@ -798,12 +905,10 @@ def pop(self, index): prev_offset = offset # If we reach here, then we indexed out of range - raise IndexError('list index out of range') + raise IndexError("list index out of range") - def __getitem__(self, index): - """ - Returns the indexed server entry of a loaded notification server - """ + def __getitem__(self, index: int) -> NotifyBase: + """Returns the indexed server entry of a loaded notification server.""" # Tracking variables prev_offset = -1 offset = prev_offset @@ -817,8 +922,11 @@ def __getitem__(self, index): offset = prev_offset + len(servers) if offset >= index: - return servers[index if prev_offset == -1 - else (index - prev_offset - 1)] + return servers[( + index + if prev_offset == -1 + else (index - prev_offset - 1) + )] else: offset = prev_offset + 1 @@ -829,58 +937,77 @@ def __getitem__(self, index): prev_offset = offset # If we reach here, then we indexed out of range - raise IndexError('list index out of range') + raise IndexError("list index out of range") - def __getstate__(self): - """ - Pickle Support dumps() - """ + def __getstate__(self) -> dict[str, object]: + """Pickle Support dumps()""" attributes = { - 'asset': self.asset, + "asset": self.asset, # Prepare our URL list as we need to extract the associated tags # and asset details associated with it - 'urls': [{ - 'url': server.url(privacy=False), - 'tag': server.tags if server.tags else None, - 'asset': server.asset} for server in self.servers], - 'locale': self.locale, - 'debug': self.debug, - 'location': self.location, + "urls": [ + { + "url": server.url(privacy=False), + "tag": server.tags if server.tags else None, + "asset": server.asset, + } + for server in self.servers + ], + "locale": self.locale, + "debug": self.debug, + "location": self.location.value if self.location else None, } return attributes - def __setstate__(self, state): - """ - Pickle Support loads() - """ - self.servers = list() - self.asset = state['asset'] - self.locale = state['locale'] - self.location = state['location'] - for entry in state['urls']: - self.add(entry['url'], asset=entry['asset'], tag=entry['tag']) - - def __bool__(self): - """ - Allows the Apprise object to be wrapped in an 'if statement'. + def __setstate__(self, state: dict[str, object]) -> None: + """Pickle Support loads()""" + self.servers = [] + self.asset = state["asset"] + self.locale = state["locale"] + + location = state.get("location") + self.location = ( + location if isinstance(location, ContentLocation) + else ContentLocation(location) + if location is not None + else None + ) + + for entry in state["urls"]: + self.add(entry["url"], asset=entry["asset"], tag=entry["tag"]) + + def __bool__(self) -> bool: + """Allows the Apprise object to be wrapped in an 'if statement'. + True is returned if at least one service has been loaded. """ return len(self) > 0 - def __iter__(self): - """ - Returns an iterator to each of our servers loaded. This includes those - found inside configuration. - """ - return chain(*[[s] if not isinstance(s, (ConfigBase, AppriseConfig)) - else iter(s.servers()) for s in self.servers]) + def __iter__(self) -> Iterator[NotifyBase]: + """Returns an iterator to each of our servers loaded. - def __len__(self): + This includes those found inside configuration. """ - Returns the number of servers loaded; this includes those found within - loaded configuration. This funtion nnever actually counts the - Config entry themselves (if they exist), only what they contain. + return chain(*[ + ( + [s] + if not isinstance(s, (ConfigBase, AppriseConfig)) + else iter(s.servers()) + ) + for s in self.servers + ]) + + def __len__(self) -> int: + """Returns the number of servers loaded; this includes those found + within loaded configuration. + + This funtion nnever actually counts the Config entry themselves (if + they exist), only what they contain. """ - return sum([1 if not isinstance(s, (ConfigBase, AppriseConfig)) - else len(s.servers()) for s in self.servers]) + return sum(( + 1 + if not isinstance(s, (ConfigBase, AppriseConfig)) + else len(s.servers()) + ) + for s in self.servers) diff --git a/libs/apprise/apprise.pyi b/libs/apprise/apprise.pyi deleted file mode 100644 index 5a34c9c654..0000000000 --- a/libs/apprise/apprise.pyi +++ /dev/null @@ -1,62 +0,0 @@ -from typing import Any, Dict, List, Iterable, Iterator, Optional - -from . import (AppriseAsset, AppriseAttachment, AppriseConfig, ConfigBase, - NotifyBase, NotifyFormat, NotifyType) -from .common import ContentLocation - -_Server = Union[str, ConfigBase, NotifyBase, AppriseConfig] -_Servers = Union[_Server, Dict[Any, _Server], Iterable[_Server]] -# Can't define this recursively as mypy doesn't support recursive types: -# https://github.com/python/mypy/issues/731 -_Tag = Union[str, Iterable[Union[str, Iterable[str]]]] - -class Apprise: - def __init__( - self, - servers: _Servers = ..., - asset: Optional[AppriseAsset] = ..., - location: Optional[ContentLocation] = ..., - debug: bool = ... - ) -> None: ... - @staticmethod - def instantiate( - url: Union[str, Dict[str, NotifyBase]], - asset: Optional[AppriseAsset] = ..., - tag: Optional[_Tag] = ..., - suppress_exceptions: bool = ... - ) -> NotifyBase: ... - def add( - self, - servers: _Servers = ..., - asset: Optional[AppriseAsset] = ..., - tag: Optional[_Tag] = ... - ) -> bool: ... - def clear(self) -> None: ... - def find(self, tag: str = ...) -> Iterator[Apprise]: ... - def notify( - self, - body: str, - title: str = ..., - notify_type: NotifyType = ..., - body_format: NotifyFormat = ..., - tag: _Tag = ..., - attach: Optional[AppriseAttachment] = ..., - interpret_escapes: Optional[bool] = ... - ) -> bool: ... - async def async_notify( - self, - body: str, - title: str = ..., - notify_type: NotifyType = ..., - body_format: NotifyFormat = ..., - tag: _Tag = ..., - attach: Optional[AppriseAttachment] = ..., - interpret_escapes: Optional[bool] = ... - ) -> bool: ... - def details(self, lang: Optional[str] = ...) -> Dict[str, Any]: ... - def urls(self, privacy: bool = ...) -> Iterable[str]: ... - def pop(self, index: int) -> ConfigBase: ... - def __getitem__(self, index: int) -> ConfigBase: ... - def __bool__(self) -> bool: ... - def __iter__(self) -> Iterator[ConfigBase]: ... - def __len__(self) -> int: ... \ No newline at end of file diff --git a/libs/apprise/apprise_attachment.py b/libs/apprise/apprise_attachment.py index 855b42d440..64c58a2ce2 100644 --- a/libs/apprise/apprise_attachment.py +++ b/libs/apprise/apprise_attachment.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,13 +25,15 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -from . import URLBase -from .attachment.base import AttachBase +from collections.abc import Iterator +from typing import Any, Optional, Union + from .asset import AppriseAsset -from .manager_attachment import AttachmentManager -from .logger import logger +from .attachment.base import AttachBase from .common import ContentLocation -from .common import CONTENT_LOCATIONS +from .logger import logger +from .manager_attachment import AttachmentManager +from .url import URLBase from .utils.parse import GET_SCHEMA_RE # Grant access to our Notification Manager Singleton @@ -40,15 +41,18 @@ class AppriseAttachment: - """ - Our Apprise Attachment File Manager - - """ - - def __init__(self, paths=None, asset=None, cache=True, location=None, - **kwargs): - """ - Loads all of the paths/urls specified (if any). + """Our Apprise Attachment File Manager.""" + + def __init__( + self, + paths: Optional[Union[str, list[ + Union[str, AttachBase, "AppriseAttachment"]]]] = None, + asset: Optional[AppriseAsset] = None, + cache: Union[bool, int] = True, + location: Optional[Union[str, ContentLocation]] = None, + **kwargs: Any, + ) -> None: + """Loads all of the paths/urls specified (if any). The path can either be a single string identifying one explicit location, otherwise you can pass in a series of locations to scan @@ -78,45 +82,60 @@ def __init__(self, paths=None, asset=None, cache=True, location=None, - LOCAL: The least restrictive mode as local files can be referenced in addition to hosted. - In all both HOSTED and LOCAL modes, INACCESSIBLE attachment types will + In all but HOSTED and LOCAL modes, INACCESSIBLE attachment types will continue to be inaccessible. However if you set this field (location) to None (it's default value) the attachment location category will not be tested in any way (all attachment types will be allowed). The location field is also a global option that can be set when initializing the Apprise object. - """ # Initialize our attachment listings - self.attachments = list() + self.attachments = [] # Set our cache flag self.cache = cache # Prepare our Asset Object - self.asset = \ + self.asset = ( asset if isinstance(asset, AppriseAsset) else AppriseAsset() + ) - if location is not None and location not in CONTENT_LOCATIONS: - msg = "An invalid Attachment location ({}) was specified." \ - .format(location) - logger.warning(msg) - raise TypeError(msg) - - # Store our location - self.location = location + if location: + try: + self.location = ( + location if isinstance(location, ContentLocation) + else ContentLocation(location.lower()) + ) + + except (AttributeError, ValueError): + err = ( + f"An invalid Attachment location ({location}) was " + "specified.", + ) + logger.warning(err) + raise TypeError(err) from None + else: + # do not set location if no initialization was made for it + self.location = None # Now parse any paths specified - if paths is not None: - # Store our path(s) - if not self.add(paths): - # Parse Source domain based on from_addr - raise TypeError("One or more attachments could not be added.") - - def add(self, attachments, asset=None, cache=None): - """ - Adds one or more attachments into our list. + if paths is not None and not self.add(paths): + raise TypeError("One or more attachments could not be added.") + + def add( + self, + attachments: Union[ + str, + AttachBase, + "AppriseAttachment", + list[Union[str, AttachBase, "AppriseAttachment"]], + ], + asset: Optional[AppriseAsset] = None, + cache: Optional[Union[bool, int]] = None, + ) -> bool: + """Adds one or more attachments into our list. By default we cache our responses so that subsiquent calls does not cause the content to be retrieved again. For local file references @@ -144,60 +163,66 @@ def add(self, attachments, asset=None, cache=None): if isinstance(attachments, (AttachBase, str)): # store our instance - attachments = (attachments, ) + attachments = (attachments,) elif not isinstance(attachments, (tuple, set, list)): logger.error( - 'An invalid attachment url (type={}) was ' - 'specified.'.format(type(attachments))) + f"An invalid attachment url (type={type(attachments)}) was " + "specified." + ) return False # Iterate over our attachments - for _attachment in attachments: + for attachment in attachments: if self.location == ContentLocation.INACCESSIBLE: logger.warning( - "Attachments are disabled; ignoring {}" - .format(_attachment)) + f"Attachments are disabled; ignoring {attachment}" + ) return_status = False continue - if isinstance(_attachment, str): - logger.debug("Loading attachment: {}".format(_attachment)) + if isinstance(attachment, str): + logger.debug(f"Loading attachment: {attachment}") # Instantiate ourselves an object, this function throws or # returns None if it fails instance = AppriseAttachment.instantiate( - _attachment, asset=asset, cache=cache) + attachment, asset=asset, cache=cache + ) if not isinstance(instance, AttachBase): return_status = False continue - elif isinstance(_attachment, AppriseAttachment): + elif isinstance(attachment, AppriseAttachment): # We were provided a list of Apprise Attachments # append our content together - instance = _attachment.attachments + instance = attachment.attachments - elif not isinstance(_attachment, AttachBase): + elif not isinstance(attachment, AttachBase): logger.warning( - "An invalid attachment (type={}) was specified.".format( - type(_attachment))) + f"An invalid attachment (type={type(attachment)}) was" + " specified." + ) return_status = False continue else: # our entry is of type AttachBase, so just go ahead and point # our instance to it for some post processing below - instance = _attachment + instance = attachment # Apply some simple logic if our location flag is set - if self.location and (( + if self.location and ( + ( self.location == ContentLocation.HOSTED - and instance.location != ContentLocation.HOSTED) - or instance.location == ContentLocation.INACCESSIBLE): + and instance.location != ContentLocation.HOSTED + ) + or instance.location == ContentLocation.INACCESSIBLE + ): logger.warning( - "Attachment was disallowed due to accessibility " - "restrictions ({}->{}): {}".format( - self.location, instance.location, - instance.url(privacy=True))) + "Attachment was disallowed due to accessibility" + f" restrictions ({self.location}->{instance.location}):" + f" {instance.url(privacy=True)}" + ) return_status = False continue @@ -212,30 +237,33 @@ def add(self, attachments, asset=None, cache=None): return return_status @staticmethod - def instantiate(url, asset=None, cache=None, suppress_exceptions=True): - """ - Returns the instance of a instantiated attachment plugin based on + def instantiate( + url: str, + asset: Optional[AppriseAsset] = None, + cache: Optional[Union[bool, int]] = None, + suppress_exceptions: bool = True, + ) -> Optional[AttachBase]: + """Returns the instance of a instantiated attachment plugin based on the provided Attachment URL. If the url fails to be parsed, then None is returned. A specified cache value will over-ride anything set - """ # Attempt to acquire the schema at the very least to allow our # attachment based urls. schema = GET_SCHEMA_RE.match(url) if schema is None: # Plan B is to assume we're dealing with a file - schema = 'file' - url = '{}://{}'.format(schema, URLBase.quote(url)) + schema = "file" + url = f"{schema}://{URLBase.quote(url)}" else: # Ensure our schema is always in lower case - schema = schema.group('schema').lower() + schema = schema.group("schema").lower() # Some basic validation if schema not in A_MGR: - logger.warning('Unsupported schema {}.'.format(schema)) + logger.warning(f"Unsupported schema {schema}.") return None # Parse our url details of the server object as dictionary containing @@ -244,78 +272,83 @@ def instantiate(url, asset=None, cache=None, suppress_exceptions=True): if not results: # Failed to parse the server URL - logger.warning('Unparseable URL {}.'.format(url)) + logger.warning(f"Unparseable URL {url}.") return None # Prepare our Asset Object - results['asset'] = \ + results["asset"] = ( asset if isinstance(asset, AppriseAsset) else AppriseAsset() + ) if cache is not None: # Force an over-ride of the cache value to what we have specified - results['cache'] = cache + results["cache"] = cache if suppress_exceptions: try: # Attempt to create an instance of our plugin using the parsed # URL information - attach_plugin = A_MGR[results['schema']](**results) + attach_plugin = A_MGR[results["schema"]](**results) except Exception: # the arguments are invalid or can not be used. - logger.warning('Could not load URL: %s' % url) + logger.warning(f"Could not load URL: {url}") return None else: # Attempt to create an instance of our plugin using the parsed # URL information but don't wrap it in a try catch - attach_plugin = A_MGR[results['schema']](**results) + attach_plugin = A_MGR[results["schema"]](**results) return attach_plugin - def clear(self): - """ - Empties our attachment list - - """ + def sync( + self, + abort_on_error: bool = True, + abort_if_empty: bool = True, + ) -> bool: + """Itereates over all of the attachments and retrieves them.""" + return ( + False + if abort_if_empty and not self.attachments + else ( + next((False for a in self.attachments if not a), True) + if abort_on_error + else next((True for a in self.attachments), True) + ) + ) + + def clear(self) -> None: + """Empties our attachment list.""" self.attachments[:] = [] - def size(self): - """ - Returns the total size of accumulated attachments - """ - return sum([len(a) for a in self.attachments if len(a) > 0]) + def size(self) -> int: + """Returns the total size of accumulated attachments.""" + return sum(len(a) for a in self.attachments if len(a) > 0) - def pop(self, index=-1): - """ - Removes an indexed Apprise Attachment from the stack and returns it. + def pop(self, index: int = -1) -> AttachBase: + """Removes an indexed Apprise Attachment from the stack and returns it. by default the last element is poped from the list """ # Remove our entry return self.attachments.pop(index) - def __getitem__(self, index): - """ - Returns the indexed entry of a loaded apprise attachments - """ + def __getitem__(self, index: int) -> AttachBase: + """Returns the indexed entry of a loaded apprise attachments.""" return self.attachments[index] - def __bool__(self): - """ - Allows the Apprise object to be wrapped in an 'if statement'. + def __bool__(self) -> bool: + """Allows the Apprise object to be wrapped in an 'if statement'. + True is returned if at least one service has been loaded. """ - return True if self.attachments else False + return bool(self.attachments) - def __iter__(self): - """ - Returns an iterator to our attachment list - """ + def __iter__(self) -> Iterator[AttachBase]: + """Returns an iterator to our attachment list.""" return iter(self.attachments) - def __len__(self): - """ - Returns the number of attachment entries loaded - """ + def __len__(self) -> int: + """Returns the number of attachment entries loaded.""" return len(self.attachments) diff --git a/libs/apprise/apprise_attachment.pyi b/libs/apprise/apprise_attachment.pyi deleted file mode 100644 index a28acb1445..0000000000 --- a/libs/apprise/apprise_attachment.pyi +++ /dev/null @@ -1,37 +0,0 @@ -from typing import Any, Iterable, Optional, Union - -from . import AppriseAsset, ContentLocation -from .attachment import AttachBase - -_Attachment = Union[str, AttachBase] -_Attachments = Iterable[_Attachment] - -class AppriseAttachment: - def __init__( - self, - paths: Optional[_Attachments] = ..., - asset: Optional[AppriseAttachment] = ..., - cache: bool = ..., - location: Optional[ContentLocation] = ..., - **kwargs: Any - ) -> None: ... - def add( - self, - attachments: _Attachments, - asset: Optional[AppriseAttachment] = ..., - cache: Optional[bool] = ... - ) -> bool: ... - @staticmethod - def instantiate( - url: str, - asset: Optional[AppriseAsset] = ..., - cache: Optional[bool] = ..., - suppress_exceptions: bool = ... - ) -> NotifyBase: ... - def clear(self) -> None: ... - def size(self) -> int: ... - def pop(self, index: int = ...) -> AttachBase: ... - def __getitem__(self, index: int) -> AttachBase: ... - def __bool__(self) -> bool: ... - def __iter__(self) -> Iterator[AttachBase]: ... - def __len__(self) -> int: ... \ No newline at end of file diff --git a/libs/apprise/apprise_config.py b/libs/apprise/apprise_config.py index 5262b5be49..58551db7bc 100644 --- a/libs/apprise/apprise_config.py +++ b/libs/apprise/apprise_config.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,33 +25,43 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -from . import ConfigBase -from . import CONFIG_FORMATS -from .manager_config import ConfigurationManager -from . import URLBase -from .asset import AppriseAsset +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + from . import common -from .utils.parse import GET_SCHEMA_RE, parse_list -from .utils.logic import is_exclusive_match +from .asset import AppriseAsset +from .config.base import ConfigBase from .logger import logger +from .manager_config import ConfigurationManager +from .url import URLBase +from .utils.logic import is_exclusive_match +from .utils.parse import GET_SCHEMA_RE, parse_list + +if TYPE_CHECKING: + from .plugins.base import NotifyBase # Grant access to our Configuration Manager Singleton C_MGR = ConfigurationManager() class AppriseConfig: - """ - Our Apprise Configuration File Manager - - - Supports a list of URLs defined one after another (text format) - - Supports a destinct YAML configuration format + """Our Apprise Configuration File Manager. + - Supports a list of URLs defined one after another (text format) + - Supports a destinct YAML configuration format """ - def __init__(self, paths=None, asset=None, cache=True, recursion=0, - insecure_includes=False, **kwargs): - """ - Loads all of the paths specified (if any). + def __init__( + self, + paths: str | list[str] | None = None, + asset: AppriseAsset | None = None, + cache: bool | int = True, + recursion: int = 0, + insecure_includes: bool = False, + **kwargs: Any, + ) -> None: + """Loads all of the paths specified (if any). The path can either be a single string identifying one explicit location, otherwise you can pass in a series of locations to scan @@ -99,11 +108,12 @@ def __init__(self, paths=None, asset=None, cache=True, recursion=0, """ # Initialize a server list of URLs - self.configs = list() + self.configs = [] # Prepare our Asset Object - self.asset = \ + self.asset = ( asset if isinstance(asset, AppriseAsset) else AppriseAsset() + ) # Set our cache flag self.cache = cache @@ -120,10 +130,16 @@ def __init__(self, paths=None, asset=None, cache=True, recursion=0, return - def add(self, configs, asset=None, tag=None, cache=True, recursion=None, - insecure_includes=None): - """ - Adds one or more config URLs into our list. + def add( + self, + configs: str | ConfigBase | list[str | ConfigBase], + asset: AppriseAsset | None = None, + tag: str | list[str] | None = None, + cache: bool | int = True, + recursion: int | None = None, + insecure_includes: bool | None = None, + ) -> bool: + """Adds one or more config URLs into our list. You can override the global asset if you wish by including it with the config(s) that you add. @@ -144,9 +160,9 @@ def add(self, configs, asset=None, tag=None, cache=True, recursion=None, Optionally override the default recursion value. - Optionally override the insecure_includes flag. - if insecure_includes is set to True then all plugins that are - set to a STRICT mode will be a treated as ALWAYS. + Optionally override the insecure_includes flag. if insecure_includes is + set to True then all plugins that are set to a STRICT mode will be a + treated as ALWAYS. """ # Initialize our return status @@ -159,9 +175,11 @@ def add(self, configs, asset=None, tag=None, cache=True, recursion=None, recursion = recursion if recursion is not None else self.recursion # Initialize our default insecure_includes value - insecure_includes = \ - insecure_includes if insecure_includes is not None \ + insecure_includes = ( + insecure_includes + if insecure_includes is not None else self.insecure_includes + ) if asset is None: # prepare default asset @@ -174,36 +192,43 @@ def add(self, configs, asset=None, tag=None, cache=True, recursion=None, elif isinstance(configs, str): # Save our path - configs = (configs, ) + configs = (configs,) elif not isinstance(configs, (tuple, set, list)): logger.error( - 'An invalid configuration path (type={}) was ' - 'specified.'.format(type(configs))) + f"An invalid configuration path (type={type(configs)}) was " + "specified." + ) return False # Iterate over our configuration - for _config in configs: + for config in configs: - if isinstance(_config, ConfigBase): + if isinstance(config, ConfigBase): # Go ahead and just add our configuration into our list - self.configs.append(_config) + self.configs.append(config) continue - elif not isinstance(_config, str): + elif not isinstance(config, str): logger.warning( - "An invalid configuration (type={}) was specified.".format( - type(_config))) + f"An invalid configuration (type={type(config)}) was" + " specified." + ) return_status = False continue - logger.debug("Loading configuration: {}".format(_config)) + logger.debug(f"Loading configuration: {config}") # Instantiate ourselves an object, this function throws or # returns None if it fails instance = AppriseConfig.instantiate( - _config, asset=asset, tag=tag, cache=cache, - recursion=recursion, insecure_includes=insecure_includes) + config, + asset=asset, + tag=tag, + cache=cache, + recursion=recursion, + insecure_includes=insecure_includes, + ) if not isinstance(instance, ConfigBase): return_status = False continue @@ -214,31 +239,39 @@ def add(self, configs, asset=None, tag=None, cache=True, recursion=None, # Return our status return return_status - def add_config(self, content, asset=None, tag=None, format=None, - recursion=None, insecure_includes=None): - """ - Adds one configuration file in it's raw format. Content gets loaded as - a memory based object and only exists for the life of this + def add_config( + self, + content: str, + asset: AppriseAsset | None = None, + tag: str | list[str] | None = None, + format: str | None = None, + recursion: int | None = None, + insecure_includes: bool | None = None, + ) -> bool: + """Adds one configuration file in it's raw format. Content gets loaded + as a memory based object and only exists for the life of this AppriseConfig object it was loaded into. - If you know the format ('yaml' or 'text') you can specify - it for slightly less overhead during this call. Otherwise the - configuration is auto-detected. + If you know the format ('yaml' or 'text') you can specify it for + slightly less overhead during this call. Otherwise the configuration + is auto-detected. Optionally override the default recursion value. - Optionally override the insecure_includes flag. - if insecure_includes is set to True then all plugins that are - set to a STRICT mode will be a treated as ALWAYS. + Optionally override the insecure_includes flag. if insecure_includes is + set to True then all plugins that are set to a STRICT mode will be a + treated as ALWAYS. """ # Initialize our default recursion value recursion = recursion if recursion is not None else self.recursion # Initialize our default insecure_includes value - insecure_includes = \ - insecure_includes if insecure_includes is not None \ + insecure_includes = ( + insecure_includes + if insecure_includes is not None else self.insecure_includes + ) if asset is None: # prepare default asset @@ -246,20 +279,28 @@ def add_config(self, content, asset=None, tag=None, format=None, if not isinstance(content, str): logger.warning( - "An invalid configuration (type={}) was specified.".format( - type(content))) + f"An invalid configuration (type={type(content)}) was" + " specified." + ) return False - logger.debug("Loading raw configuration: {}".format(content)) + logger.debug(f"Loading raw configuration: {content}") # Create ourselves a ConfigMemory Object to store our configuration - instance = C_MGR['memory']( - content=content, format=format, asset=asset, tag=tag, - recursion=recursion, insecure_includes=insecure_includes) - - if instance.config_format not in CONFIG_FORMATS: + instance = C_MGR["memory"]( + content=content, + format=format, + asset=asset, + tag=tag, + recursion=recursion, + insecure_includes=insecure_includes, + ) + + if not (instance.config_format and + instance.config_format.value in common.CONFIG_FORMATS): logger.warning( - "The format of the configuration could not be deteced.") + "The format of the configuration could not be detected." + ) return False # Add our initialized plugin to our server listings @@ -268,21 +309,23 @@ def add_config(self, content, asset=None, tag=None, format=None, # Return our status return True - def servers(self, tag=common.MATCH_ALL_TAG, match_always=True, *args, - **kwargs): - """ - Returns all of our servers dynamically build based on parsed + def servers( + self, + tag: str | list[str] = common.MATCH_ALL_TAG, + match_always: bool = True, + *args: Any, + **kwargs: Any, + ) -> list[NotifyBase]: + """Returns all of our servers dynamically build based on parsed configuration. If a tag is specified, it applies to the configuration sources themselves and not the notification services inside them. - This is for filtering the configuration files polled for - results. - - If the anytag is set, then any notification that is found - set with that tag are included in the response. + This is for filtering the configuration files polled for results. + If the anytag is set, then any notification that is found set with that + tag are included in the response. """ # A match_always flag allows us to pick up on our 'any' keyword @@ -299,14 +342,17 @@ def servers(self, tag=common.MATCH_ALL_TAG, match_always=True, *args, # tag=[('tagA', 'tagC'), 'tagB'] = (tagA and tagC) or tagB # tag=[('tagB', 'tagC')] = tagB and tagC - response = list() + response = [] for entry in self.configs: # Apply our tag matching based on our defined logic if is_exclusive_match( - logic=tag, data=entry.tags, match_all=common.MATCH_ALL_TAG, - match_always=match_always): + logic=tag, + data=entry.tags, + match_all=common.MATCH_ALL_TAG, + match_always=match_always, + ): # Build ourselves a list of services dynamically and return the # as a list response.extend(entry.servers()) @@ -314,30 +360,35 @@ def servers(self, tag=common.MATCH_ALL_TAG, match_always=True, *args, return response @staticmethod - def instantiate(url, asset=None, tag=None, cache=None, - recursion=0, insecure_includes=False, - suppress_exceptions=True): - """ - Returns the instance of a instantiated configuration plugin based on - the provided Config URL. If the url fails to be parsed, then None - is returned. - + def instantiate( + url: str, + asset: AppriseAsset | None = None, + tag: str | list[str] | None = None, + cache: bool | int | None = None, + recursion: int = 0, + insecure_includes: bool = False, + suppress_exceptions: bool = True, + ) -> ConfigBase | None: + """Returns the instance of a instantiated configuration plugin based on + the provided Config URL. + + If the url fails to be parsed, then None is returned. """ # Attempt to acquire the schema at the very least to allow our # configuration based urls. schema = GET_SCHEMA_RE.match(url) if schema is None: # Plan B is to assume we're dealing with a file - schema = 'file' - url = '{}://{}'.format(schema, URLBase.quote(url)) + schema = "file" + url = f"{schema}://{URLBase.quote(url)}" else: # Ensure our schema is always in lower case - schema = schema.group('schema').lower() + schema = schema.group("schema").lower() # Some basic validation if schema not in C_MGR: - logger.warning('Unsupported schema {}.'.format(schema)) + logger.warning(f"Unsupported schema {schema}.") return None # Parse our url details of the server object as dictionary containing @@ -346,55 +397,51 @@ def instantiate(url, asset=None, tag=None, cache=None, if not results: # Failed to parse the server URL - logger.warning('Unparseable URL {}.'.format(url)) + logger.warning(f"Unparseable URL {url}.") return None # Build a list of tags to associate with the newly added notifications - results['tag'] = set(parse_list(tag)) + results["tag"] = set(parse_list(tag)) # Prepare our Asset Object - results['asset'] = \ + results["asset"] = ( asset if isinstance(asset, AppriseAsset) else AppriseAsset() + ) if cache is not None: # Force an over-ride of the cache value to what we have specified - results['cache'] = cache + results["cache"] = cache # Recursion can never be parsed from the URL - results['recursion'] = recursion + results["recursion"] = recursion # Insecure includes flag can never be parsed from the URL - results['insecure_includes'] = insecure_includes + results["insecure_includes"] = insecure_includes if suppress_exceptions: try: # Attempt to create an instance of our plugin using the parsed # URL information - cfg_plugin = C_MGR[results['schema']](**results) + cfg_plugin = C_MGR[results["schema"]](**results) except Exception: # the arguments are invalid or can not be used. - logger.warning('Could not load URL: %s' % url) + logger.warning(f"Could not load URL: {url}") return None else: # Attempt to create an instance of our plugin using the parsed # URL information but don't wrap it in a try catch - cfg_plugin = C_MGR[results['schema']](**results) + cfg_plugin = C_MGR[results["schema"]](**results) return cfg_plugin - def clear(self): - """ - Empties our configuration list - - """ + def clear(self) -> None: + """Empties our configuration list.""" self.configs[:] = [] - def server_pop(self, index): - """ - Removes an indexed Apprise Notification from the servers - """ + def server_pop(self, index: int) -> NotifyBase: + """Removes an indexed Apprise Notification from the servers.""" # Tracking variables prev_offset = -1 @@ -408,45 +455,43 @@ def server_pop(self, index): if offset >= index: # we can pop an notification from our config stack - return entry.pop(index if prev_offset == -1 - else (index - prev_offset - 1)) + return entry.pop( + index + if prev_offset == -1 + else (index - prev_offset - 1) + ) # Update our old offset prev_offset = offset # If we reach here, then we indexed out of range - raise IndexError('list index out of range') + raise IndexError("list index out of range") - def pop(self, index=-1): - """ - Removes an indexed Apprise Configuration from the stack and returns it. + def pop(self, index: int = -1) -> ConfigBase: + """Removes an indexed Apprise Configuration from the stack and returns + it. By default, the last element is removed from the list """ # Remove our entry return self.configs.pop(index) - def __getitem__(self, index): - """ - Returns the indexed config entry of a loaded apprise configuration - """ + def __getitem__(self, index: int) -> ConfigBase: + """Returns the indexed config entry of a loaded apprise + configuration.""" return self.configs[index] - def __bool__(self): - """ - Allows the Apprise object to be wrapped in an 'if statement'. + def __bool__(self) -> bool: + """Allows the Apprise object to be wrapped in an 'if statement'. + True is returned if at least one service has been loaded. """ - return True if self.configs else False + return bool(self.configs) - def __iter__(self): - """ - Returns an iterator to our config list - """ + def __iter__(self): # type: () -> Iterator[ConfigBase] + """Returns an iterator to our config list.""" return iter(self.configs) - def __len__(self): - """ - Returns the number of config entries loaded - """ + def __len__(self) -> int: + """Returns the number of config entries loaded.""" return len(self.configs) diff --git a/libs/apprise/apprise_config.pyi b/libs/apprise/apprise_config.pyi deleted file mode 100644 index 9ea819ac39..0000000000 --- a/libs/apprise/apprise_config.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from typing import Any, Iterable, Iterator, List, Optional, Union - -from . import AppriseAsset, NotifyBase -from .config import ConfigBase - -_Configs = Union[ConfigBase, str, Iterable[str]] - -class AppriseConfig: - def __init__( - self, - paths: Optional[_Configs] = ..., - asset: Optional[AppriseAsset] = ..., - cache: bool = ..., - recursion: int = ..., - insecure_includes: bool = ..., - **kwargs: Any - ) -> None: ... - def add( - self, - configs: _Configs, - asset: Optional[AppriseAsset] = ..., - cache: bool = ..., - recursion: Optional[bool] = ..., - insecure_includes: Optional[bool] = ... - ) -> bool: ... - def add_config( - self, - content: str, - asset: Optional[AppriseAsset] = ..., - tag: Optional[str] = ..., - format: Optional[str] = ..., - recursion: Optional[int] = ..., - insecure_includes: Optional[bool] = ... - ) -> bool: ... - def servers(self, tag: str = ..., *args: Any, **kwargs: Any) -> List[ConfigBase]: ... - def instantiate( - url: str, - asset: Optional[AppriseAsset] = ..., - tag: Optional[str] = ..., - cache: Optional[bool] = ... - ) -> NotifyBase: ... - def clear(self) -> None: ... - def server_pop(self, index: int) -> ConfigBase: ... - def pop(self, index: int = ...) -> ConfigBase: ... - def __getitem__(self, index: int) -> ConfigBase: ... - def __bool__(self) -> bool: ... - def __iter__(self) -> Iterator[ConfigBase]: ... - def __len__(self) -> int: ... \ No newline at end of file diff --git a/libs/apprise/asset.py b/libs/apprise/asset.py index aef90f08f3..548ca1d165 100644 --- a/libs/apprise/asset.py +++ b/libs/apprise/asset.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,89 +25,97 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +from datetime import datetime, tzinfo +from os.path import abspath, dirname, isfile, join import re +from typing import Any, Optional, Union from uuid import uuid4 -from os.path import join -from os.path import dirname -from os.path import isfile -from os.path import abspath -from .common import NotifyType -from .common import PersistentStoreMode -from .manager_plugins import NotificationManager +from .common import ( + NotifyFormat, + NotifyImageSize, + NotifyType, + PersistentStoreMode, +) +from .manager_plugins import NotificationManager +from .utils.time import zoneinfo # Grant access to our Notification Manager Singleton N_MGR = NotificationManager() class AppriseAsset: - """ - Provides a supplimentary class that can be used to provide extra - information and details that can be used by Apprise such as providing - an alternate location to where images/icons can be found and the - URL masks. - - Any variable that starts with an underscore (_) can only be initialized - by this class manually and will/can not be parsed from a configuration - file. + """Provides a supplimentary class that can be used to provide extra + information and details that can be used by Apprise such as providing an + alternate location to where images/icons can be found and the URL masks. + Any variable that starts with an underscore (_) can only be initialized by + this class manually and will/can not be parsed from a configuration file. """ + # Application Identifier - app_id = 'Apprise' + app_id = "Apprise" # Application Description - app_desc = 'Apprise Notifications' + app_desc = "Apprise Notifications" # Provider URL - app_url = 'https://github.com/caronc/apprise' + app_url = "https://github.com/caronc/apprise" # A Simple Mapping of Colors; For every NOTIFY_TYPE identified, # there should be a mapping to it's color here: html_notify_map = { - NotifyType.INFO: '#3AA3E3', - NotifyType.SUCCESS: '#3AA337', - NotifyType.FAILURE: '#A32037', - NotifyType.WARNING: '#CACF29', + NotifyType.INFO: "#3AA3E3", + NotifyType.SUCCESS: "#3AA337", + NotifyType.FAILURE: "#A32037", + NotifyType.WARNING: "#CACF29", } # The default color to return if a mapping isn't found in our table above - default_html_color = '#888888' + default_html_color = "#888888" # Ascii Notification ascii_notify_map = { - NotifyType.INFO: '[i]', - NotifyType.SUCCESS: '[+]', - NotifyType.FAILURE: '[!]', - NotifyType.WARNING: '[~]', + NotifyType.INFO: "[i]", + NotifyType.SUCCESS: "[+]", + NotifyType.FAILURE: "[!]", + NotifyType.WARNING: "[~]", } # The default ascii to return if a mapping isn't found in our table above - default_ascii_chars = '[?]' + default_ascii_chars = "[?]" # The default image extension to use - default_extension = '.png' + default_extension = ".png" + + # The default image size if one isn't specified + default_image_size = NotifyImageSize.XY_256 # The default theme - theme = 'default' + theme = "default" # Image URL Mask - image_url_mask = \ - 'https://github.com/caronc/apprise/raw/master/apprise/assets/' \ - 'themes/{THEME}/apprise-{TYPE}-{XY}{EXTENSION}' + image_url_mask = ( + "https://github.com/caronc/apprise/raw/master/apprise/assets/" + "themes/{THEME}/apprise-{TYPE}-{XY}{EXTENSION}" + ) # Application Logo - image_url_logo = \ - 'https://github.com/caronc/apprise/raw/master/apprise/assets/' \ - 'themes/{THEME}/apprise-logo.png' + image_url_logo = ( + "https://github.com/caronc/apprise/raw/master/apprise/assets/" + "themes/{THEME}/apprise-logo.png" + ) # Image Path Mask - image_path_mask = abspath(join( - dirname(__file__), - 'assets', - 'themes', - '{THEME}', - 'apprise-{TYPE}-{XY}{EXTENSION}', - )) + image_path_mask = abspath( + join( + dirname(__file__), + "assets", + "themes", + "{THEME}", + "apprise-{TYPE}-{XY}{EXTENSION}", + ) + ) # This value can also be set on calls to Apprise.notify(). This allows # you to let Apprise upfront the type of data being passed in. This @@ -141,7 +148,7 @@ class AppriseAsset: interpret_escapes = False # Defines the encoding of the content passed into Apprise - encoding = 'utf-8' + encoding = "utf-8" # Automatically generate our Pretty Good Privacy (PGP) keys if one isn't # present and our environment configuration allows for it. @@ -149,6 +156,12 @@ class AppriseAsset: # if Persistent Storage was set to `memory` pgp_autogen = True + # Automatically generate our Privacy Enhanced Mail (PEM) keys if one isn't + # present and our environment configuration allows for it. + # For example, a case where the environment wouldn't allow for it would be + # if Persistent Storage was set to `memory` + pem_autogen = True + # For more detail see CWE-312 @ # https://cwe.mitre.org/data/definitions/312.html # @@ -170,7 +183,7 @@ class AppriseAsset: # Optionally define the default salt to apply to all persistent storage # namespace generation (unless over-ridden) - __storage_salt = b'' + __storage_salt = b"" # Optionally define the namespace length of the directories created by # the storage. If this is set to zero, then the length is pre-determined @@ -194,19 +207,31 @@ class AppriseAsset: # A unique identifer we can use to associate our calling source _uid = str(uuid4()) - def __init__(self, plugin_paths=None, storage_path=None, - storage_mode=None, storage_salt=None, - storage_idlen=None, **kwargs): - """ - Asset Initialization - - """ + # Default timezone to use (pass in timezone value) + # A list of timezones can be found here: + # https://en.wikipedia.org/wiki/List_of_tz_database_time_zones + # You can specify things such as 'America/Montreal' + # If no timezone is specified, then the one detected on the system + # is uzed + _tzinfo = None + + def __init__( + self, + plugin_paths: Optional[list[str]] = None, + storage_path: Optional[str] = None, + storage_mode: Optional[Union[str, PersistentStoreMode]] = None, + storage_salt: Optional[Union[str, bytes]] = None, + storage_idlen: Optional[int] = None, + timezone: Optional[Union[str, tzinfo]] = None, + **kwargs: Any + ) -> None: + """Asset Initialization.""" # Assign default arguments if specified for key, value in kwargs.items(): if not hasattr(AppriseAsset, key): raise AttributeError( - 'AppriseAsset init(): ' - 'An invalid key {} was specified.'.format(key)) + f"AppriseAsset init(): An invalid key {key} was specified." + ) setattr(self, key, value) @@ -221,19 +246,42 @@ def __init__(self, plugin_paths=None, storage_path=None, if storage_mode: # Define how our persistent storage behaves - self.__storage_mode = storage_mode + try: + self.__storage_mode = ( + storage_mode if isinstance(storage_mode, NotifyFormat) + else PersistentStoreMode(storage_mode.lower()) + ) + + except (AttributeError, ValueError, TypeError): + err = ( + f"An invalid persistent store mode ({storage_mode}) was " + "specified.") + raise AttributeError(err) from None if isinstance(storage_idlen, int): # Define the number of characters utilized from our namespace lengh if storage_idlen < 0: # Unsupported type raise ValueError( - 'AppriseAsset storage_idlen(): Value must ' - 'be an integer and > 0') + "AppriseAsset storage_idlen(): Value must " + "be an integer and > 0" + ) # Store value self.__storage_idlen = storage_idlen + if isinstance(timezone, tzinfo): + self._tzinfo = timezone + + elif timezone is not None: + self._tzinfo = zoneinfo(timezone) + if not self._tzinfo: + raise AttributeError( + "AppriseAsset timezone provided is invalid") from None + else: + # Default our timezone to what is detected on the system + self._tzinfo = datetime.now().astimezone().tzinfo + if storage_salt is not None: # Define the number of characters utilized from our namespace lengh @@ -247,17 +295,22 @@ def __init__(self, plugin_paths=None, storage_path=None, except UnicodeEncodeError: # Bad data; don't pass it along raise ValueError( - 'AppriseAsset namespace_salt(): ' - 'Value provided could not be encoded') + "AppriseAsset namespace_salt(): " + "Value provided could not be encoded" + ) from None else: # Unsupported raise ValueError( - 'AppriseAsset namespace_salt(): Value provided must be ' - 'string or bytes object') + "AppriseAsset namespace_salt(): Value provided must be " + "string or bytes object" + ) - def color(self, notify_type, color_type=None): - """ - Returns an HTML mapped color based on passed in notify type + def color( + self, + notify_type: NotifyType, + color_type: Optional[type] = None, + ) -> Union[str, int, tuple[int, int, int]]: + """Returns an HTML mapped color based on passed in notify type. if color_type is: None then a standard hex string is returned as @@ -265,12 +318,12 @@ def color(self, notify_type, color_type=None): int then the integer representation is returned tuple then the the red, green, blue is returned in a tuple - """ # Attempt to get the type, otherwise return a default grey # if we couldn't look up the entry - color = self.html_notify_map.get(notify_type, self.default_html_color) + color = self.html_notify_map.get( + notify_type, self.default_html_color) if color_type is None: # This is the default return type return color @@ -285,22 +338,25 @@ def color(self, notify_type, color_type=None): # Unsupported type raise ValueError( - 'AppriseAsset html_color(): An invalid color_type was specified.') - - def ascii(self, notify_type): - """ - Returns an ascii representation based on passed in notify type + "AppriseAsset html_color(): An invalid color_type was specified." + ) - """ + def ascii(self, notify_type: NotifyType) -> str: + """Returns an ascii representation based on passed in notify type.""" # look our response up - return self.ascii_notify_map.get(notify_type, self.default_ascii_chars) - - def image_url(self, notify_type, image_size, logo=False, extension=None): - """ - Apply our mask to our image URL + return self.ascii_notify_map.get( + notify_type, self.default_ascii_chars) + + def image_url( + self, + notify_type: NotifyType, + image_size: Optional[NotifyImageSize] = None, + logo: bool = False, + extension: Optional[str] = None, + ) -> Optional[str]: + """Apply our mask to our image URL. if logo is set to True, then the logo_url is used instead - """ url_mask = self.image_url_logo if logo else self.image_url_mask @@ -311,27 +367,32 @@ def image_url(self, notify_type, image_size, logo=False, extension=None): if extension is None: extension = self.default_extension + if image_size is None: + image_size = self.default_image_size + re_map = { - '{THEME}': self.theme if self.theme else '', - '{TYPE}': notify_type, - '{XY}': image_size, - '{EXTENSION}': extension, + "{THEME}": self.theme if self.theme else "", + "{TYPE}": notify_type.value, + "{XY}": image_size.value, + "{EXTENSION}": extension, } # Iterate over above list and store content accordingly re_table = re.compile( - r'(' + '|'.join(re_map.keys()) + r')', + r"(" + "|".join(re_map.keys()) + r")", re.IGNORECASE, ) return re_table.sub(lambda x: re_map[x.group()], url_mask) - def image_path(self, notify_type, image_size, must_exist=True, - extension=None): - """ - Apply our mask to our image file path - - """ + def image_path( + self, + notify_type: NotifyType, + image_size: NotifyImageSize, + must_exist: bool = True, + extension: Optional[str] = None, + ) -> Optional[str]: + """Apply our mask to our image file path.""" if not self.image_path_mask: # No image to return @@ -341,15 +402,15 @@ def image_path(self, notify_type, image_size, must_exist=True, extension = self.default_extension re_map = { - '{THEME}': self.theme if self.theme else '', - '{TYPE}': notify_type, - '{XY}': image_size, - '{EXTENSION}': extension, + "{THEME}": self.theme if self.theme else "", + "{TYPE}": notify_type.value, + "{XY}": image_size.value, + "{EXTENSION}": extension, } # Iterate over above list and store content accordingly re_table = re.compile( - r'(' + '|'.join(re_map.keys()) + r')', + r"(" + "|".join(re_map.keys()) + r")", re.IGNORECASE, ) @@ -361,11 +422,14 @@ def image_path(self, notify_type, image_size, must_exist=True, # Return what we parsed return path - def image_raw(self, notify_type, image_size, extension=None): - """ - Returns the raw image if it can (otherwise the function returns None) - - """ + def image_raw( + self, + notify_type: NotifyType, + image_size: NotifyImageSize, + extension: Optional[str] = None, + ) -> Optional[bytes]: + """Returns the raw image if it can (otherwise the function returns + None)""" path = self.image_path( notify_type=notify_type, @@ -374,88 +438,77 @@ def image_raw(self, notify_type, image_size, extension=None): ) if path: try: - with open(path, 'rb') as fd: + with open(path, "rb") as fd: return fd.read() - except (OSError, IOError): + except OSError: # We can't access the file return None return None - def details(self): - """ - Returns the details associated with the AppriseAsset object - - """ + def details(self) -> dict[str, str]: + """Returns the details associated with the AppriseAsset object.""" return { - 'app_id': self.app_id, - 'app_desc': self.app_desc, - 'default_extension': self.default_extension, - 'theme': self.theme, - 'image_path_mask': self.image_path_mask, - 'image_url_mask': self.image_url_mask, - 'image_url_logo': self.image_url_logo, + "app_id": self.app_id, + "app_desc": self.app_desc, + "default_extension": self.default_extension, + "theme": self.theme, + "image_path_mask": self.image_path_mask, + "image_url_mask": self.image_url_mask, + "image_url_logo": self.image_url_logo, } @staticmethod - def hex_to_rgb(value): - """ - Takes a hex string (such as #00ff00) and returns a tuple in the form + def hex_to_rgb(value: str) -> tuple[int, int, int]: + """Takes a hex string (such as #00ff00) and returns a tuple in the form of (red, green, blue) eg: #00ff00 becomes : (0, 65535, 0) - """ - value = value.lstrip('#') + value = value.lstrip("#") lv = len(value) - return tuple(int(value[i:i + lv // 3], 16) - for i in range(0, lv, lv // 3)) + return tuple( + int(value[i : i + lv // 3], 16) for i in range(0, lv, lv // 3) + ) @staticmethod - def hex_to_int(value): - """ - Takes a hex string (such as #00ff00) and returns its integer - equivalent + def hex_to_int(value: str) -> int: + """Takes a hex string (such as #00ff00) and returns its integer + equivalent. eg: #00000f becomes : 15 - """ - return int(value.lstrip('#'), 16) + return int(value.lstrip("#"), 16) @property - def plugin_paths(self): - """ - Return the plugin paths defined - """ + def plugin_paths(self) -> list[str]: + """Return the plugin paths defined.""" return self.__plugin_paths @property - def storage_path(self): - """ - Return the persistent storage path defined - """ + def storage_path(self) -> Optional[str]: + """Return the persistent storage path defined.""" return self.__storage_path @property - def storage_mode(self): - """ - Return the persistent storage mode defined - """ + def storage_mode(self) -> PersistentStoreMode: + """Return the persistent storage mode defined.""" return self.__storage_mode @property - def storage_salt(self): - """ - Return the provided namespace salt; this is always of type bytes - """ + def storage_salt(self) -> bytes: + """Return the provided namespace salt; this is always of type bytes.""" return self.__storage_salt @property - def storage_idlen(self): - """ - Return the persistent storage id length - """ + def storage_idlen(self) -> int: + """Return the persistent storage id length.""" return self.__storage_idlen + + @property + def tzinfo(self) -> tzinfo: + """Return the timezone object""" + return self._tzinfo diff --git a/libs/apprise/asset.pyi b/libs/apprise/asset.pyi deleted file mode 100644 index 08303341b1..0000000000 --- a/libs/apprise/asset.pyi +++ /dev/null @@ -1,34 +0,0 @@ -from typing import Dict, Optional - -from . import NotifyFormat, NotifyType - -class AppriseAsset: - app_id: str - app_desc: str - app_url: str - html_notify_map: Dict[NotifyType, str] - default_html_color: str - default_extension: str - theme: Optional[str] - image_url_mask: str - image_url_logo: str - image_path_mask: Optional[str] - body_format: Optional[NotifyFormat] - async_mode: bool - interpret_escapes: bool - def __init__( - self, - app_id: str = ..., - app_desc: str = ..., - app_url: str = ..., - html_notify_map: Dict[NotifyType, str] = ..., - default_html_color: str = ..., - default_extension: str = ..., - theme: Optional[str] = ..., - image_url_mask: str = ..., - image_url_logo: str = ..., - image_path_mask: Optional[str] = ..., - body_format: Optional[NotifyFormat] = ..., - async_mode: bool = ..., - interpret_escapes: bool = ... - ) -> None: ... \ No newline at end of file diff --git a/libs/apprise/attachment/__init__.py b/libs/apprise/attachment/__init__.py index 1f09941dc2..884095a963 100644 --- a/libs/apprise/attachment/__init__.py +++ b/libs/apprise/attachment/__init__.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -27,14 +26,14 @@ # POSSIBILITY OF SUCH DAMAGE. # Used for testing -from .base import AttachBase from ..manager_attachment import AttachmentManager +from .base import AttachBase # Initalize our Attachment Manager Singleton A_MGR = AttachmentManager() __all__ = [ # Reference - 'AttachBase', - 'AttachmentManager', + "AttachBase", + "AttachmentManager", ] diff --git a/libs/apprise/attachment/base.py b/libs/apprise/attachment/base.py index 0161cab08f..5020958c96 100644 --- a/libs/apprise/attachment/base.py +++ b/libs/apprise/attachment/base.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,34 +25,34 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +import base64 +import contextlib +import mimetypes import os import time -import mimetypes -import base64 + from .. import exception -from ..url import URLBase -from ..utils.parse import parse_bool from ..common import ContentLocation from ..locale import gettext_lazy as _ +from ..url import URLBase +from ..utils.parse import parse_bool class AttachBase(URLBase): - """ - This is the base class for all supported attachment types - """ + """This is the base class for all supported attachment types.""" # For attachment type detection; this amount of data is read into memory # 128KB (131072B) max_detect_buffer_size = 131072 # Unknown mimetype - unknown_mimetype = 'application/octet-stream' + unknown_mimetype = "application/octet-stream" # Our filename when we can't otherwise determine one - unknown_filename = 'apprise-attachment' + unknown_filename = "apprise-attachment" # Our filename extension when we can't otherwise determine one - unknown_filename_extension = '.obj' + unknown_filename_extension = ".obj" # The strict argument is a flag specifying whether the list of known MIME # types is limited to only the official types registered with IANA. When @@ -79,33 +78,32 @@ class AttachBase(URLBase): # These act the same way as tokens except they are optional and/or # have default values set if mandatory. This rule must be followed template_args = { - 'cache': { - 'name': _('Cache Age'), - 'type': 'int', + "cache": { + "name": _("Cache Age"), + "type": "int", # We default to (600) which means we cache for 10 minutes - 'default': 600, + "default": 600, }, - 'mime': { - 'name': _('Forced Mime Type'), - 'type': 'string', + "mime": { + "name": _("Forced Mime Type"), + "type": "string", }, - 'name': { - 'name': _('Forced File Name'), - 'type': 'string', + "name": { + "name": _("Forced File Name"), + "type": "string", }, - 'verify': { - 'name': _('Verify SSL'), + "verify": { + "name": _("Verify SSL"), # SSL Certificate Authority Verification - 'type': 'bool', + "type": "bool", # Provide a default - 'default': True, + "default": True, }, } def __init__(self, name=None, mimetype=None, cache=None, **kwargs): - """ - Initialize some general logging and common server arguments that will - keep things consistent when working with the configurations that + """Initialize some general logging and common server arguments that + will keep things consistent when working with the configurations that inherit this class. Optionally provide a filename to over-ride name associated with the @@ -160,15 +158,13 @@ def __init__(self, name=None, mimetype=None, cache=None, **kwargs): self.cache = cache if isinstance(cache, bool) else int(cache) except (TypeError, ValueError): - err = 'An invalid cache value ({}) was specified.'.format( - cache) + err = f"An invalid cache value ({cache}) was specified." self.logger.warning(err) - raise TypeError(err) + raise TypeError(err) from None # Some simple error checking if self.cache < 0: - err = 'A negative cache value ({}) was specified.'.format( - cache) + err = f"A negative cache value ({cache}) was specified." self.logger.warning(err) raise TypeError(err) @@ -176,22 +172,28 @@ def __init__(self, name=None, mimetype=None, cache=None, **kwargs): self.cache = None # Validate mimetype if specified - if self._mimetype: - if next((t for t in mimetypes.types_map.values() - if self._mimetype == t), None) is None: - err = 'An invalid mime-type ({}) was specified.'.format( - mimetype) - self.logger.warning(err) - raise TypeError(err) + if self._mimetype and ( + next( + ( + t + for t in mimetypes.types_map.values() + if self._mimetype == t + ), + None, + ) + is None): + err = f"An invalid mime-type ({mimetype}) was specified." + self.logger.warning(err) + raise TypeError(err) return @property def path(self): - """ - Returns the absolute path to the filename. If this is not known or - is know but has been considered expired (due to cache setting), then - content is re-retrieved prior to returning. + """Returns the absolute path to the filename. + + If this is not known or is know but has been considered expired (due to + cache setting), then content is re-retrieved prior to returning. """ if not self.exists(): @@ -202,9 +204,7 @@ def path(self): @property def name(self): - """ - Returns the filename - """ + """Returns the filename.""" if self._name: # return our fixed content return self._name @@ -216,20 +216,19 @@ def name(self): if not self.detected_name: # If we get here, our download was successful but we don't have a # filename based on our content. - extension = mimetypes.guess_extension(self.mimetype) - self.detected_name = '{}{}'.format( - self.unknown_filename, - extension if extension else self.unknown_filename_extension) + ext = mimetypes.guess_extension(self.mimetype) + self.detected_name = ( + f"{self.unknown_filename}" + f"{ext if ext else self.unknown_filename_extension}" + ) return self.detected_name @property def mimetype(self): - """ - Returns mime type (if one is present). + """Returns mime type (if one is present). - Content is cached once determied to prevent overhead of future - calls. + Content is cached once determied to prevent overhead of future calls. """ if not self.exists(): # we could not obtain our attachment @@ -242,36 +241,40 @@ def mimetype(self): if not self.detected_mimetype: # guess_type() returns: (type, encoding) and sets type to None # if it can't otherwise determine it. - try: + with contextlib.suppress(TypeError): # Directly reference _name and detected_name to prevent # recursion loop (as self.name calls this function) self.detected_mimetype, _ = mimetypes.guess_type( - self._name if self._name - else self.detected_name, strict=self.strict) - - except TypeError: - # Thrown if None was specified in filename section - pass + self._name if self._name else self.detected_name, + strict=self.strict, + ) # Return our mime type - return self.detected_mimetype \ - if self.detected_mimetype else self.unknown_mimetype + return ( + self.detected_mimetype + if self.detected_mimetype + else self.unknown_mimetype + ) def exists(self, retrieve_if_missing=True): - """ - Simply returns true if the object has downloaded and stored the - attachment AND the attachment has not expired. - """ + """Simply returns true if the object has downloaded and stored the + attachment AND the attachment has not expired.""" if self.location == ContentLocation.INACCESSIBLE: # our content is inaccessible return False - cache = self.template_args['cache']['default'] \ - if self.cache is None else self.cache + cache = ( + self.template_args["cache"]["default"] + if self.cache is None + else self.cache + ) try: - if self.download_path and os.path.isfile(self.download_path) \ - and cache: + if ( + self.download_path + and os.path.isfile(self.download_path) + and cache + ): # We have enough reason to look further into our cached content # and verify it has not expired. @@ -281,53 +284,56 @@ def exists(self, retrieve_if_missing=True): # Verify our cache time to determine whether we will get our # content again. - age_in_sec = \ - time.time() - os.stat(self.download_path).st_mtime + age_in_sec = time.time() - os.stat(self.download_path).st_mtime if age_in_sec <= cache: return True - except (OSError, IOError): + except OSError: # The file is not present pass return False if not retrieve_if_missing else self.download() - def base64(self, encoding='ascii'): - """ - Returns the attachment object as a base64 string otherwise - None is returned if an error occurs. + def base64(self, encoding="ascii"): + """Returns the attachment object as a base64 string otherwise None is + returned if an error occurs. If encoding is set to None, then it is not encoded when returned """ if not self: # We could not access the attachment self.logger.error( - 'Could not access attachment {}.'.format( - self.url(privacy=True))) + f"Could not access attachment {self.url(privacy=True)}." + ) raise exception.AppriseFileNotFound("Attachment Missing") try: with self.open() as f: # Prepare our Attachment in Base64 - return base64.b64encode(f.read()).decode(encoding) \ - if encoding else base64.b64encode(f.read()) + return ( + base64.b64encode(f.read()).decode(encoding) + if encoding + else base64.b64encode(f.read()) + ) - except (TypeError, FileNotFoundError): + except (FileNotFoundError): # We no longer have a path to open - raise exception.AppriseFileNotFound("Attachment Missing") + raise exception.AppriseFileNotFound("Attachment Missing") from None - except (TypeError, OSError, IOError) as e: + except (TypeError, OSError) as e: self.logger.warning( - 'An I/O error occurred while reading {}.'.format( - self.name if self else 'attachment')) - self.logger.debug('I/O Exception: %s' % str(e)) - raise exception.AppriseDiskIOError("Attachment Access Error") + "An I/O error occurred while reading {}.".format( + self.name if self else "attachment" + ) + ) + self.logger.debug(f"I/O Exception: {e!s}") + raise exception.AppriseDiskIOError( + "Attachment Access Error") from e def invalidate(self): - """ - Release any temporary data that may be open by child classes. - Externally fetched content should be automatically cleaned up when - this function is called. + """Release any temporary data that may be open by child classes. + Externally fetched content should be automatically cleaned up when this + function is called. This function should also reset the following entries to None: - detected_name : Should identify a human readable filename @@ -345,8 +351,7 @@ def invalidate(self): return def download(self): - """ - This function must be over-ridden by inheriting classes. + """This function must be over-ridden by inheriting classes. Inherited classes MUST populate: - detected_name: Should identify a human readable filename @@ -356,19 +361,17 @@ def download(self): If a download fails, you should ensure these values are set to None. """ raise NotImplementedError( - "download() is implimented by the child class.") + "download() is implimented by the child class." + ) - def open(self, mode='rb'): - """ - return our file pointer and track it (we'll auto close later) - """ - pointer = open(self.path, mode=mode) + def open(self, mode="rb"): + """Return our file pointer and track it (we'll auto close later)""" + pointer = open(self.path, mode=mode) # noqa: SIM115 self.__pointers.add(pointer) return pointer def chunk(self, size=5242880): - """ - A Generator that yield chunks of a file with the specified size. + """A Generator that yield chunks of a file with the specified size. By default the chunk size is set to 5MB (5242880 bytes) """ @@ -382,15 +385,12 @@ def chunk(self, size=5242880): yield chunk def __enter__(self): - """ - support with keyword - """ + """Support with keyword.""" return self.open() def __exit__(self, value_type, value, traceback): - """ - stub to do nothing; but support exit of with statement gracefully - """ + """Stub to do nothing; but support exit of with statement + gracefully.""" return @staticmethod @@ -413,40 +413,38 @@ def parse_url(url, verify_host=True, mimetype_db=None, sanitize=True): """ results = URLBase.parse_url( - url, verify_host=verify_host, sanitize=sanitize) + url, verify_host=verify_host, sanitize=sanitize + ) if not results: # We're done; we failed to parse our url return results # Allow overriding the default config mime type - if 'mime' in results['qsd']: - results['mimetype'] = results['qsd'].get('mime', '') \ - .strip().lower() + if "mime" in results["qsd"]: + results["mimetype"] = ( + results["qsd"].get("mime", "").strip().lower() + ) # Allow overriding the default file name - if 'name' in results['qsd']: - results['name'] = results['qsd'].get('name', '') \ - .strip().lower() + if "name" in results["qsd"]: + results["name"] = results["qsd"].get("name", "").strip().lower() # Our cache value - if 'cache' in results['qsd']: + if "cache" in results["qsd"]: # First try to get it's integer value try: - results['cache'] = int(results['qsd']['cache']) + results["cache"] = int(results["qsd"]["cache"]) except (ValueError, TypeError): # No problem, it just isn't an integer; now treat it as a bool # instead: - results['cache'] = parse_bool(results['qsd']['cache']) + results["cache"] = parse_bool(results["qsd"]["cache"]) return results def __len__(self): - """ - Returns the filesize of the attachment. - - """ + """Returns the filesize of the attachment.""" if not self: return 0 @@ -458,14 +456,12 @@ def __len__(self): return 0 def __bool__(self): - """ - Allows the Apprise object to be wrapped in an based 'if statement'. + """Allows the Apprise object to be wrapped in an based 'if statement'. + True is returned if our content was downloaded correctly. """ - return True if self.path else False + return bool(self.path) def __del__(self): - """ - Perform any house cleaning - """ + """Perform any house cleaning.""" self.invalidate() diff --git a/libs/apprise/attachment/base.pyi b/libs/apprise/attachment/base.pyi deleted file mode 100644 index 66b7179d36..0000000000 --- a/libs/apprise/attachment/base.pyi +++ /dev/null @@ -1,36 +0,0 @@ -from typing import Any, Dict, Optional - -from .. import ContentLocation - -class AttachBase: - max_detect_buffer_size: int - unknown_mimetype: str - unknown_filename: str - unknown_filename_extension: str - strict: bool - max_file_size: int - location: ContentLocation - template_args: Dict[str, Any] - def __init__( - self, - name: Optional[str] = ..., - mimetype: Optional[str] = ..., - cache: Optional[bool] = ..., - **kwargs: Any - ) -> None: ... - @property - def path(self) -> Optional[str]: ... - @property - def name(self) -> Optional[str]: ... - @property - def mimetype(self) -> Optional[str]: ... - def exists(self) -> bool: ... - def invalidate(self) -> None: ... - def download(self) -> bool: ... - @staticmethod - def parse_url( - url: str, - verify_host: bool = ... - ) -> Dict[str, Any]: ... - def __len__(self) -> int: ... - def __bool__(self) -> bool: ... diff --git a/libs/apprise/attachment/file.py b/libs/apprise/attachment/file.py index 20f01ce403..0e96d3999e 100644 --- a/libs/apprise/attachment/file.py +++ b/libs/apprise/attachment/file.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,34 +25,30 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -import re import os -from .base import AttachBase -from ..utils.disk import path_decode +import re + from ..common import ContentLocation from ..locale import gettext_lazy as _ +from ..utils.disk import path_decode +from .base import AttachBase class AttachFile(AttachBase): - """ - A wrapper for File based attachment sources - """ + """A wrapper for File based attachment sources.""" # The default descriptive name associated with the service - service_name = _('Local File') + service_name = _("Local File") # The default protocol - protocol = 'file' + protocol = "file" # Content is local to the same location as the apprise instance # being called (server-side) location = ContentLocation.LOCAL def __init__(self, path, **kwargs): - """ - Initialize Local File Attachment Object - - """ + """Initialize Local File Attachment Object.""" super().__init__(**kwargs) # Store path but mark it dirty since we have not performed any @@ -65,30 +60,30 @@ def __init__(self, path, **kwargs): return def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = {} if self._mimetype: # A mime-type was enforced - params['mime'] = self._mimetype + params["mime"] = self._mimetype if self._name: # A name was enforced - params['name'] = self._name + params["name"] = self._name - return 'file://{path}{params}'.format( + return "file://{path}{params}".format( path=self.quote(self.__original_path), - params='?{}'.format(self.urlencode(params, safe='/')) - if params else '', + params=( + "?{}".format(self.urlencode(params, safe="/")) + if params + else "" + ), ) def download(self, **kwargs): - """ - Perform retrieval of our data. + """Perform retrieval of our data. For file base attachments, our data already exists, so we only need to validate it. @@ -108,14 +103,17 @@ def download(self, **kwargs): except OSError: return False - if self.max_file_size > 0 and \ - os.path.getsize(self.dirty_path) > self.max_file_size: + if ( + self.max_file_size > 0 + and os.path.getsize(self.dirty_path) > self.max_file_size + ): # The content to attach is to large self.logger.error( - 'Content exceeds allowable maximum file length ' - '({}KB): {}'.format( - int(self.max_file_size / 1024), self.url(privacy=True))) + "Content exceeds allowable maximum file length" + f" ({int(self.max_file_size / 1024)}KB):" + f" {self.url(privacy=True)}" + ) # Return False (signifying a failure) return False @@ -131,20 +129,17 @@ def download(self, **kwargs): @staticmethod def parse_url(url): - """ - Parses the URL so that we can handle all different file paths - and return it as our path object - - """ + """Parses the URL so that we can handle all different file paths and + return it as our path object.""" results = AttachBase.parse_url(url, verify_host=False) if not results: # We're done early; it's not a good URL return results - match = re.match(r'file://(?P[^?]+)(\?.*)?', url, re.I) + match = re.match(r"file://(?P[^?]+)(\?.*)?", url, re.I) if not match: return None - results['path'] = AttachFile.unquote(match.group('path')) + results["path"] = AttachFile.unquote(match.group("path")) return results diff --git a/libs/apprise/attachment/http.py b/libs/apprise/attachment/http.py index 0c7ccc5db9..633f898fdb 100644 --- a/libs/apprise/attachment/http.py +++ b/libs/apprise/attachment/http.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,30 +25,31 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -import re +import contextlib import os -import requests -import threading +import re from tempfile import NamedTemporaryFile -from .base import AttachBase +import threading + +import requests + from ..common import ContentLocation -from ..url import PrivacyMode from ..locale import gettext_lazy as _ +from ..url import PrivacyMode +from .base import AttachBase class AttachHTTP(AttachBase): - """ - A wrapper for HTTP based attachment sources - """ + """A wrapper for HTTP based attachment sources.""" # The default descriptive name associated with the service - service_name = _('Web Based') + service_name = _("Web Based") # The default protocol - protocol = 'http' + protocol = "http" # The default secure protocol - secure_protocol = 'https' + secure_protocol = "https" # The number of bytes in memory to read from the remote source at a time chunk_size = 8192 @@ -61,20 +61,18 @@ class AttachHTTP(AttachBase): _lock = threading.Lock() def __init__(self, headers=None, **kwargs): - """ - Initialize HTTP Object + """Initialize HTTP Object. headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with - """ super().__init__(**kwargs) - self.schema = 'https' if self.secure else 'http' + self.schema = "https" if self.secure else "http" - self.fullpath = kwargs.get('fullpath') + self.fullpath = kwargs.get("fullpath") if not isinstance(self.fullpath, str): - self.fullpath = '/' + self.fullpath = "/" self.headers = {} if headers: @@ -86,15 +84,17 @@ def __init__(self, headers=None, **kwargs): # Our Query String Dictionary; we use this to track arguments # specified that aren't otherwise part of this class - self.qsd = {k: v for k, v in kwargs.get('qsd', {}).items() - if k not in self.template_args} + self.qsd = { + k: v + for k, v in kwargs.get("qsd", {}).items() + if k not in self.template_args + } return def download(self, **kwargs): - """ - Perform retrieval of the configuration based on the specified request - """ + """Perform retrieval of the configuration based on the specified + request.""" if self.location == ContentLocation.INACCESSIBLE: # our content is inaccessible @@ -102,7 +102,7 @@ def download(self, **kwargs): # prepare header headers = { - 'User-Agent': self.app_id, + "User-Agent": self.app_id, } # Apply any/all header over-rides defined @@ -112,9 +112,9 @@ def download(self, **kwargs): if self.user: auth = (self.user, self.password) - url = '%s://%s' % (self.schema, self.host) + url = f"{self.schema}://{self.host}" if isinstance(self.port, int): - url += ':%d' % self.port + url += f":{self.port}" url += self.fullpath @@ -129,67 +129,74 @@ def download(self, **kwargs): # Due to locking; it's possible a concurrent thread already # handled the retrieval in which case we can safely move on self.logger.trace( - 'HTTP Attachment %s already retrieved', - self._temp_file.name) + "HTTP Attachment %s already retrieved", + self._temp_file.name, + ) return True # Ensure any existing content set has been invalidated self.invalidate() self.logger.debug( - 'HTTP Attachment Fetch URL: %s (cert_verify=%r)' % ( - url, self.verify_certificate)) + "HTTP Attachment Fetch URL:" + f" {url} (cert_verify={self.verify_certificate!r})" + ) try: # Make our request with requests.get( - url, - headers=headers, - auth=auth, - params=self.qsd, - verify=self.verify_certificate, - timeout=self.request_timeout, - stream=True) as r: + url, + headers=headers, + auth=auth, + params=self.qsd, + verify=self.verify_certificate, + timeout=self.request_timeout, + stream=True, + ) as r: # Handle Errors r.raise_for_status() # Get our file-size (if known) try: - file_size = int(r.headers.get('Content-Length', '0')) + file_size = int(r.headers.get("Content-Length", "0")) except (TypeError, ValueError): # Handle edge case where Content-Length is a bad value file_size = 0 # Perform a little Q/A on file limitations and restrictions - if self.max_file_size > 0 and \ - file_size > self.max_file_size: + if ( + self.max_file_size > 0 + and file_size > self.max_file_size + ): # The content retrieved is to large self.logger.error( - 'HTTP response exceeds allowable maximum file ' - 'length ({}KB): {}'.format( - int(self.max_file_size / 1024), - self.url(privacy=True))) + "HTTP response exceeds allowable maximum file" + f" length ({int(self.max_file_size / 1024)}KB):" + f" {self.url(privacy=True)}" + ) # Return False (signifying a failure) return False # Detect config format based on mime if the format isn't # already enforced - self.detected_mimetype = r.headers.get('Content-Type') + self.detected_mimetype = r.headers.get("Content-Type") - d = r.headers.get('Content-Disposition', '') + d = r.headers.get("Content-Disposition", "") result = re.search( - "filename=['\"]?(?P[^'\"]+)['\"]?", d, re.I) + r"filename=['\"]?(?P[^'\"]+)['\"]?", d, re.I + ) if result: - self.detected_name = result.group('name').strip() + self.detected_name = result.group("name").strip() # Create a temporary file to work with; delete must be set # to False or it isn't compatible with Microsoft Windows # instances. In lieu of this, __del__ will clean up the # file for us. - self._temp_file = NamedTemporaryFile(delete=False) + self._temp_file = \ + NamedTemporaryFile(delete=False) # noqa: SIM115 # Get our chunk size chunk_size = self.chunk_size @@ -212,11 +219,11 @@ def download(self, **kwargs): if bytes_written > self.max_file_size: # The content retrieved is to large self.logger.error( - 'HTTP response exceeds allowable ' - 'maximum file length ' - '({}KB): {}'.format( - int(self.max_file_size / 1024), - self.url(privacy=True))) + "HTTP response exceeds allowable" + " maximum file length" + f" ({int(self.max_file_size / 1024)}" + f"KB): {self.url(privacy=True)}" + ) # Invalidate any variables previously set self.invalidate() @@ -224,9 +231,11 @@ def download(self, **kwargs): # Return False (signifying a failure) return False - elif bytes_written + chunk_size \ - > self.max_file_size: - # Adjust out next read to accomodate up to + elif ( + bytes_written + chunk_size + > self.max_file_size + ): + # Adjust out next read to accommodate up to # our limit +1. This will prevent us from # reading to much into our memory buffer self.max_file_size - bytes_written + 1 @@ -242,9 +251,10 @@ def download(self, **kwargs): except requests.RequestException as e: self.logger.error( - 'A Connection error occurred retrieving HTTP ' - 'configuration from %s.' % self.host) - self.logger.debug('Socket Exception: %s' % str(e)) + "A Connection error occurred retrieving HTTP " + f"configuration from {self.host}." + ) + self.logger.debug(f"Socket Exception: {e!s}") # Invalidate any variables previously set self.invalidate() @@ -252,14 +262,15 @@ def download(self, **kwargs): # Return False (signifying a failure) return False - except (IOError, OSError): + except OSError: # IOError is present for backwards compatibility with Python # versions older then 3.3. >= 3.3 throw OSError now. # Could not open and/or write the temporary file self.logger.error( - 'Could not write attachment to disk: {}'.format( - self.url(privacy=True))) + "Could not write attachment to disk:" + f" {self.url(privacy=True)}" + ) # Invalidate any variables previously set self.invalidate() @@ -271,21 +282,15 @@ def download(self, **kwargs): return True def invalidate(self): - """ - Close our temporary file - """ + """Close our temporary file.""" if self._temp_file: - self.logger.trace( - 'Attachment cleanup of %s', self._temp_file.name) + self.logger.trace("Attachment cleanup of %s", self._temp_file.name) self._temp_file.close() - try: + with contextlib.suppress(OSError): # Ensure our file is removed (if it exists) os.unlink(self._temp_file.name) - except OSError: - pass - # Reset our temporary file to prevent from entering # this block again self._temp_file = None @@ -293,15 +298,11 @@ def invalidate(self): super().invalidate() def __del__(self): - """ - Tidy memory if open - """ + """Tidy memory if open.""" self.invalidate() def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Our URL parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) @@ -309,59 +310,59 @@ def url(self, privacy=False, *args, **kwargs): # Prepare our cache value if self.cache is not None: if isinstance(self.cache, bool) or not self.cache: - cache = 'yes' if self.cache else 'no' + cache = "yes" if self.cache else "no" else: cache = int(self.cache) # Set our cache value - params['cache'] = cache + params["cache"] = cache if self._mimetype: # A format was enforced - params['mime'] = self._mimetype + params["mime"] = self._mimetype if self._name: # A name was enforced - params['name'] = self._name + params["name"] = self._name # Append our headers into our parameters - params.update({'+{}'.format(k): v for k, v in self.headers.items()}) + params.update({f"+{k}": v for k, v in self.headers.items()}) # Apply any remaining entries to our URL params.update(self.qsd) # Determine Authentication - auth = '' + auth = "" if self.user and self.password: - auth = '{user}:{password}@'.format( - user=self.quote(self.user, safe=''), + auth = "{user}:{password}@".format( + user=self.quote(self.user, safe=""), password=self.pprint( - self.password, privacy, mode=PrivacyMode.Secret, safe=''), + self.password, privacy, mode=PrivacyMode.Secret, safe="" + ), ) elif self.user: - auth = '{user}@'.format( - user=self.quote(self.user, safe=''), + auth = "{user}@".format( + user=self.quote(self.user, safe=""), ) default_port = 443 if self.secure else 80 - - return '{schema}://{auth}{hostname}{port}{fullpath}?{params}'.format( + return "{schema}://{auth}{hostname}{port}{fullpath}?{params}".format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, - hostname=self.quote(self.host, safe=''), - port='' if self.port is None or self.port == default_port - else ':{}'.format(self.port), - fullpath=self.quote(self.fullpath, safe='/'), - params=self.urlencode(params, safe='/'), + hostname=self.quote(self.host, safe=""), + port=( + "" + if self.port is None or self.port == default_port + else f":{self.port}" + ), + fullpath=self.quote(self.fullpath, safe="/"), + params=self.urlencode(params, safe="/"), ) @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = AttachBase.parse_url(url, sanitize=False) if not results: # We're done early as we couldn't load the results @@ -369,7 +370,7 @@ def parse_url(url): # Add our headers that the user can potentially over-ride if they wish # to to our returned result set - results['headers'] = results['qsd-'] - results['headers'].update(results['qsd+']) + results["headers"] = results["qsd-"] + results["headers"].update(results["qsd+"]) return results diff --git a/libs/apprise/attachment/memory.py b/libs/apprise/attachment/memory.py index fe6e33fce9..c7ebdbd60d 100644 --- a/libs/apprise/attachment/memory.py +++ b/libs/apprise/attachment/memory.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,38 +25,40 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -import re -import os -import io import base64 -from .base import AttachBase +import io +import os +import re +import uuid + from .. import exception from ..common import ContentLocation from ..locale import gettext_lazy as _ -import uuid +from .base import AttachBase class AttachMemory(AttachBase): - """ - A wrapper for Memory based attachment sources - """ + """A wrapper for Memory based attachment sources.""" # The default descriptive name associated with the service - service_name = _('Memory') + service_name = _("Memory") # The default protocol - protocol = 'memory' + protocol = "memory" # Content is local to the same location as the apprise instance # being called (server-side) location = ContentLocation.LOCAL - def __init__(self, content=None, name=None, mimetype=None, - encoding='utf-8', **kwargs): - """ - Initialize Memory Based Attachment Object - - """ + def __init__( + self, + content=None, + name=None, + mimetype=None, + encoding="utf-8", + **kwargs, + ): + """Initialize Memory Based Attachment Object.""" # Create our BytesIO object self._data = io.BytesIO() @@ -68,15 +69,16 @@ def __init__(self, content=None, name=None, mimetype=None, elif isinstance(content, str): content = content.encode(encoding) if mimetype is None: - mimetype = 'text/plain' + mimetype = "text/plain" if not name: # Generate a unique filename - name = str(uuid.uuid4()) + '.txt' + name = str(uuid.uuid4()) + ".txt" elif not isinstance(content, bytes): raise TypeError( - 'Provided content for memory attachment is invalid') + "Provided content for memory attachment is invalid" + ) # Store our content if content: @@ -84,11 +86,11 @@ def __init__(self, content=None, name=None, mimetype=None, if mimetype is None: # Default mimetype - mimetype = 'application/octet-stream' + mimetype = "application/octet-stream" if not name: # Generate a unique filename - name = str(uuid.uuid4()) + '.dat' + name = str(uuid.uuid4()) + ".dat" # Initialize our base object super().__init__(name=name, mimetype=mimetype, **kwargs) @@ -96,40 +98,32 @@ def __init__(self, content=None, name=None, mimetype=None, return def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = { - 'mime': self._mimetype, + "mime": self._mimetype, } - return 'memory://{name}?{params}'.format( + return "memory://{name}?{params}".format( name=self.quote(self._name), - params=self.urlencode(params, safe='/') + params=self.urlencode(params, safe="/"), ) def open(self, *args, **kwargs): - """ - return our memory object - """ + """Return our memory object.""" # Return our object self._data.seek(0, 0) return self._data def __enter__(self): - """ - support with clause - """ + """Support with clause.""" # Return our object self._data.seek(0, 0) return self._data def download(self, **kwargs): - """ - Handle memory download() call - """ + """Handle memory download() call.""" if self.location == ContentLocation.INACCESSIBLE: # our content is inaccessible @@ -138,77 +132,74 @@ def download(self, **kwargs): if self.max_file_size > 0 and len(self) > self.max_file_size: # The content to attach is to large self.logger.error( - 'Content exceeds allowable maximum memory size ' - '({}KB): {}'.format( - int(self.max_file_size / 1024), self.url(privacy=True))) + "Content exceeds allowable maximum memory size" + f" ({int(self.max_file_size / 1024)}KB):" + f" {self.url(privacy=True)}" + ) # Return False (signifying a failure) return False return True - def base64(self, encoding='ascii'): - """ - We need to over-ride this since the base64 sub-library seems to close - our file descriptor making it no longer referencable. - """ + def base64(self, encoding="ascii"): + """We need to over-ride this since the base64 sub-library seems to + close our file descriptor making it no longer referencable.""" if not self: # We could not access the attachment self.logger.error( - 'Could not access attachment {}.'.format( - self.url(privacy=True))) + f"Could not access attachment {self.url(privacy=True)}." + ) raise exception.AppriseFileNotFound("Attachment Missing") self._data.seek(0, 0) - return base64.b64encode(self._data.read()).decode(encoding) \ - if encoding else base64.b64encode(self._data.read()) + return ( + base64.b64encode(self._data.read()).decode(encoding) + if encoding + else base64.b64encode(self._data.read()) + ) def invalidate(self): - """ - Removes data - """ + """Removes data.""" self._data.truncate(0) return def exists(self): - """ - over-ride exists() call - """ + """Over-ride exists() call.""" size = len(self) - return True if self.location != ContentLocation.INACCESSIBLE \ - and size > 0 and ( - self.max_file_size <= 0 or - (self.max_file_size > 0 and size <= self.max_file_size)) \ - else False + return bool( + self.location != ContentLocation.INACCESSIBLE + and size > 0 + and ( + self.max_file_size <= 0 + or (self.max_file_size > 0 and size <= self.max_file_size) + ) + ) @staticmethod def parse_url(url): - """ - Parses the URL so that we can handle all different file paths - and return it as our path object - - """ + """Parses the URL so that we can handle all different file paths and + return it as our path object.""" results = AttachBase.parse_url(url, verify_host=False) if not results: # We're done early; it's not a good URL return results - if 'name' not in results: + if "name" not in results: # Allow fall-back to be from URL - match = re.match(r'memory://(?P[^?]+)(\?.*)?', url, re.I) + match = re.match(r"memory://(?P[^?]+)(\?.*)?", url, re.I) if match: # Store our filename only (ignore any defined paths) - results['name'] = \ - os.path.basename(AttachMemory.unquote(match.group('path'))) + results["name"] = os.path.basename( + AttachMemory.unquote(match.group("path")) + ) return results @property def path(self): - """ - return the filename - """ + """Return the filename.""" if not self.exists(): # we could not obtain our path return None @@ -216,15 +207,12 @@ def path(self): return self._name def __len__(self): - """ - Returns the size of he memory attachment - - """ + """Returns the size of he memory attachment.""" return self._data.getbuffer().nbytes def __bool__(self): - """ - Allows the Apprise object to be wrapped in an based 'if statement'. + """Allows the Apprise object to be wrapped in an based 'if statement'. + True is returned if our content was downloaded correctly. """ diff --git a/libs/apprise/cli.py b/libs/apprise/cli.py index 121fc95f43..dbe11886ca 100644 --- a/libs/apprise/cli.py +++ b/libs/apprise/cli.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,185 +25,212 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -import click -import textwrap import logging -import platform -import sys import os -import shutil +from os.path import exists, isfile +import platform import re +import shutil +import sys +import textwrap -from os.path import isfile -from os.path import exists - -from . import Apprise -from . import AppriseAsset -from . import AppriseConfig -from . import PersistentStore +import click -from .utils.parse import parse_list -from .utils.disk import dir_size, bytes_to_str, path_decode -from .common import NOTIFY_TYPES -from .common import NOTIFY_FORMATS -from .common import PERSISTENT_STORE_MODES -from .common import PersistentStoreState -from .common import ContentLocation +from . import ( + Apprise, + AppriseAsset, + AppriseConfig, + PersistentStore, + __copyright__, + __license__, + __title__, + __version__, +) +from .common import ( + NOTIFY_FORMATS, + NOTIFY_TYPES, + PERSISTENT_STORE_MODES, + ContentLocation, + NotifyFormat, + NotifyType, + PersistentStoreMode, + PersistentStoreState, +) from .logger import logger +from .utils.disk import bytes_to_str, dir_size, path_decode +from .utils.parse import parse_list -from . import __title__ -from . import __version__ -from . import __license__ -from . import __copywrite__ - -# By default we allow looking 1 level down recursivly in Apprise configuration +# By default we allow looking 1 level down recursively in Apprise configuration # files. DEFAULT_RECURSION_DEPTH = 1 # Default number of days to prune persistent storage -DEFAULT_STORAGE_PRUNE_DAYS = \ - int(os.environ.get('APPRISE_STORAGE_PRUNE_DAYS', 30)) +DEFAULT_STORAGE_PRUNE_DAYS = int( + os.environ.get("APPRISE_STORAGE_PRUNE_DAYS", 30) +) # The default URL ID Length -DEFAULT_STORAGE_UID_LENGTH = \ - int(os.environ.get('APPRISE_STORAGE_UID_LENGTH', 8)) +DEFAULT_STORAGE_UID_LENGTH = int( + os.environ.get("APPRISE_STORAGE_UID_LENGTH", 8) +) -# Defines the envrionment variable to parse if defined. This is ONLY +# Defines the environment variable to parse if defined. This is ONLY # Referenced if: # - No Configuration Files were found/loaded/specified # - No URLs were provided directly into the CLI Call -DEFAULT_ENV_APPRISE_URLS = 'APPRISE_URLS' +DEFAULT_ENV_APPRISE_URLS = "APPRISE_URLS" -# Defines the over-ride path for the configuration files read -DEFAULT_ENV_APPRISE_CONFIG_PATH = 'APPRISE_CONFIG_PATH' +# Defines the override path for the configuration files read +DEFAULT_ENV_APPRISE_CONFIG_PATH = "APPRISE_CONFIG_PATH" -# Defines the over-ride path for the plugins to load -DEFAULT_ENV_APPRISE_PLUGIN_PATH = 'APPRISE_PLUGIN_PATH' +# Defines the override path for the plugins to load +DEFAULT_ENV_APPRISE_PLUGIN_PATH = "APPRISE_PLUGIN_PATH" -# Defines the over-ride path for the persistent storage -DEFAULT_ENV_APPRISE_STORAGE_PATH = 'APPRISE_STORAGE_PATH' +# Defines the override path for the persistent storage +DEFAULT_ENV_APPRISE_STORAGE_PATH = "APPRISE_STORAGE_PATH" # Defines our click context settings adding -h to the additional options that # can be specified to get the help menu to come up -CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help']) +CONTEXT_SETTINGS = {"help_option_names": ["-h", "--help"]} # Define our default configuration we use if nothing is otherwise specified DEFAULT_CONFIG_PATHS = ( # Legacy Path Support - '~/.apprise', - '~/.apprise.conf', - '~/.apprise.yml', - '~/.apprise.yaml', - '~/.config/apprise', - '~/.config/apprise.conf', - '~/.config/apprise.yml', - '~/.config/apprise.yaml', - + "~/.apprise", + "~/.apprise.conf", + "~/.apprise.yml", + "~/.apprise.yaml", + "~/.config/apprise", + "~/.config/apprise.conf", + "~/.config/apprise.yml", + "~/.config/apprise.yaml", # Plugin Support Extended Directory Search Paths - '~/.apprise/apprise', - '~/.apprise/apprise.conf', - '~/.apprise/apprise.yml', - '~/.apprise/apprise.yaml', - '~/.config/apprise/apprise', - '~/.config/apprise/apprise.conf', - '~/.config/apprise/apprise.yml', - '~/.config/apprise/apprise.yaml', - + "~/.apprise/apprise", + "~/.apprise/apprise.conf", + "~/.apprise/apprise.yml", + "~/.apprise/apprise.yaml", + "~/.config/apprise/apprise", + "~/.config/apprise/apprise.conf", + "~/.config/apprise/apprise.yml", + "~/.config/apprise/apprise.yaml", # Global Configuration File Support - '/etc/apprise', - '/etc/apprise.yml', - '/etc/apprise.yaml', - '/etc/apprise/apprise', - '/etc/apprise/apprise.conf', - '/etc/apprise/apprise.yml', - '/etc/apprise/apprise.yaml', + "/etc/apprise", + "/etc/apprise.yml", + "/etc/apprise.yaml", + "/etc/apprise/apprise", + "/etc/apprise/apprise.conf", + "/etc/apprise/apprise.yml", + "/etc/apprise/apprise.yaml", ) # Define our paths to search for plugins DEFAULT_PLUGIN_PATHS = ( - '~/.apprise/plugins', - '~/.config/apprise/plugins', - + "~/.apprise/plugins", + "~/.config/apprise/plugins", # Global Plugin Support - '/var/lib/apprise/plugins', + "/var/lib/apprise/plugins", +) + + +# +# General Options and Defaults +# +DEFAULT_NOTIFY_TYPE = NotifyType.INFO + +NOTIFY_TYPE_CHOICES: tuple[NotifyType, ...] = ( + NotifyType.INFO, + NotifyType.SUCCESS, + NotifyType.WARNING, + NotifyType.FAILURE, +) + +DEFAULT_NOTIFY_FORMAT = NotifyFormat.TEXT + +NOTIFY_FORMAT_CHOICES: tuple[NotifyFormat, ...] = ( + NotifyFormat.TEXT, + NotifyFormat.MARKDOWN, + NotifyFormat.HTML, ) # # Persistent Storage # -DEFAULT_STORAGE_PATH = '~/.local/share/apprise/cache' +DEFAULT_STORAGE_PATH = "~/.local/share/apprise/cache" + +# Storage Mode +DEFAULT_STORAGE_MODE = PersistentStoreMode.AUTO + +# Create an ordered list of options (first is default) +PERSISTENT_STORE_MODE_CHOICES: tuple[PersistentStoreMode, ...] = ( + PersistentStoreMode.AUTO, + PersistentStoreMode.FLUSH, + PersistentStoreMode.MEMORY, +) # Detect Windows -if platform.system() == 'Windows': +if platform.system() == "Windows": # Default Config Search Path for Windows Users DEFAULT_CONFIG_PATHS = ( - '%APPDATA%\\Apprise\\apprise', - '%APPDATA%\\Apprise\\apprise.conf', - '%APPDATA%\\Apprise\\apprise.yml', - '%APPDATA%\\Apprise\\apprise.yaml', - '%LOCALAPPDATA%\\Apprise\\apprise', - '%LOCALAPPDATA%\\Apprise\\apprise.conf', - '%LOCALAPPDATA%\\Apprise\\apprise.yml', - '%LOCALAPPDATA%\\Apprise\\apprise.yaml', - + "%APPDATA%\\Apprise\\apprise", + "%APPDATA%\\Apprise\\apprise.conf", + "%APPDATA%\\Apprise\\apprise.yml", + "%APPDATA%\\Apprise\\apprise.yaml", + "%LOCALAPPDATA%\\Apprise\\apprise", + "%LOCALAPPDATA%\\Apprise\\apprise.conf", + "%LOCALAPPDATA%\\Apprise\\apprise.yml", + "%LOCALAPPDATA%\\Apprise\\apprise.yaml", # # Global Support # - # C:\ProgramData\Apprise - '%ALLUSERSPROFILE%\\Apprise\\apprise', - '%ALLUSERSPROFILE%\\Apprise\\apprise.conf', - '%ALLUSERSPROFILE%\\Apprise\\apprise.yml', - '%ALLUSERSPROFILE%\\Apprise\\apprise.yaml', - + "%ALLUSERSPROFILE%\\Apprise\\apprise", + "%ALLUSERSPROFILE%\\Apprise\\apprise.conf", + "%ALLUSERSPROFILE%\\Apprise\\apprise.yml", + "%ALLUSERSPROFILE%\\Apprise\\apprise.yaml", # C:\Program Files\Apprise - '%PROGRAMFILES%\\Apprise\\apprise', - '%PROGRAMFILES%\\Apprise\\apprise.conf', - '%PROGRAMFILES%\\Apprise\\apprise.yml', - '%PROGRAMFILES%\\Apprise\\apprise.yaml', - + "%PROGRAMFILES%\\Apprise\\apprise", + "%PROGRAMFILES%\\Apprise\\apprise.conf", + "%PROGRAMFILES%\\Apprise\\apprise.yml", + "%PROGRAMFILES%\\Apprise\\apprise.yaml", # C:\Program Files\Common Files - '%COMMONPROGRAMFILES%\\Apprise\\apprise', - '%COMMONPROGRAMFILES%\\Apprise\\apprise.conf', - '%COMMONPROGRAMFILES%\\Apprise\\apprise.yml', - '%COMMONPROGRAMFILES%\\Apprise\\apprise.yaml', + "%COMMONPROGRAMFILES%\\Apprise\\apprise", + "%COMMONPROGRAMFILES%\\Apprise\\apprise.conf", + "%COMMONPROGRAMFILES%\\Apprise\\apprise.yml", + "%COMMONPROGRAMFILES%\\Apprise\\apprise.yaml", ) # Default Plugin Search Path for Windows Users DEFAULT_PLUGIN_PATHS = ( - '%APPDATA%\\Apprise\\plugins', - '%LOCALAPPDATA%\\Apprise\\plugins', - + "%APPDATA%\\Apprise\\plugins", + "%LOCALAPPDATA%\\Apprise\\plugins", # # Global Support # - # C:\ProgramData\Apprise\plugins - '%ALLUSERSPROFILE%\\Apprise\\plugins', + "%ALLUSERSPROFILE%\\Apprise\\plugins", # C:\Program Files\Apprise\plugins - '%PROGRAMFILES%\\Apprise\\plugins', + "%PROGRAMFILES%\\Apprise\\plugins", # C:\Program Files\Common Files - '%COMMONPROGRAMFILES%\\Apprise\\plugins', + "%COMMONPROGRAMFILES%\\Apprise\\plugins", ) # # Persistent Storage # - DEFAULT_STORAGE_PATH = '%APPDATA%/Apprise/cache' + DEFAULT_STORAGE_PATH = "%APPDATA%/Apprise/cache" class PersistentStorageMode: - """ - Persistent Storage Modes - """ + """Persistent Storage Modes.""" + # List all detected configuration loaded - LIST = 'list' + LIST = "list" # Prune persistent storage based on age - PRUNE = 'prune' + PRUNE = "prune" - # Reset all (reguardless of age) - CLEAR = 'clear' + # Reset all (regardless of age) + CLEAR = "clear" # Define the types in a list for validation purposes @@ -214,43 +240,48 @@ class PersistentStorageMode: PersistentStorageMode.CLEAR, ) -if os.environ.get('APPRISE_STORAGE_PATH', '').strip(): - # Over-ride Default Storage Path - DEFAULT_STORAGE_PATH = os.environ.get('APPRISE_STORAGE_PATH') +if os.environ.get("APPRISE_STORAGE_PATH", "").strip(): + # Override Default Storage Path + DEFAULT_STORAGE_PATH = os.environ.get("APPRISE_STORAGE_PATH") def print_version_msg(): - """ - Prints version message when -V or --version is specified. - - """ - result = list() - result.append('{} v{}'.format(__title__, __version__)) - result.append(__copywrite__) - result.append( - 'This code is licensed under the {} License.'.format(__license__)) - click.echo('\n'.join(result)) + """Prints version message when -V or --version is specified.""" + result = [] + result.append(f"{__title__} v{__version__}") + result.append(__copyright__) + result.append(f"This code is licensed under the {__license__} License.") + click.echo("\n".join(result)) class CustomHelpCommand(click.Command): def format_help(self, ctx, formatter): - formatter.write_text('Usage:') + formatter.write_text("Usage:") formatter.write_text( - ' apprise [OPTIONS] [APPRISE_URL [APPRISE_URL2 [APPRISE_URL3]]]') + " apprise [OPTIONS] [APPRISE_URL [APPRISE_URL2 [APPRISE_URL3]]]" + ) formatter.write_text( - ' apprise storage [OPTIONS] [ACTION] [UID1 [UID2 [UID3]]]') + " apprise storage [OPTIONS] [ACTION] [UID1 [UID2 [UID3]]]" + ) # Custom help message - formatter.write_text('') + formatter.write_text("") content = ( - 'Send a notification to all of the specified servers ' - 'identified by their URLs', - 'the content provided within the title, body and ' - 'notification-type.', - '', - 'For a list of all of the supported services and information on ' - 'how to use ', - 'them, check out at https://github.com/caronc/apprise') + ( + "Send a notification to all of the specified servers " + "identified by their URLs" + ), + ( + "the content provided within the title, body and " + "notification-type." + ), + "", + ( + "For a list of all of the supported services and information" + " on how to use " + ), + "them, check out https://github.com/caronc/apprise", + ) for line in content: formatter.write_text(line) @@ -260,23 +291,33 @@ def format_help(self, ctx, formatter): self.format_epilog(ctx, formatter) # Custom 'Actions:' section after the 'Options:' - formatter.write_text('') - formatter.write_text('Actions:') + formatter.write_text("") + formatter.write_text("Actions:") actions = [( - 'storage', 'Access the persistent storage disk administration', - [( - 'list', - 'List all URL IDs associated with detected URL(s). ' - 'This is also the default action ran if nothing is provided', - ), ( - 'prune', - 'Eliminates stale entries found based on ' - '--storage-prune-days (-SPD)', - ), ( - 'clean', - 'Removes any persistent data created by Apprise', - )], + "storage", + "Access the persistent storage disk administration", + [ + ( + "list", + ( + "List all URL IDs associated with detected URL(s)." + " This is also the default action run if nothing is" + " provided" + ), + ), + ( + "prune", + ( + "Eliminates stale entries found based on " + "--storage-prune-days (-SPD)" + ), + ), + ( + "clean", + "Removes any persistent data created by Apprise", + ), + ], )] # @@ -290,24 +331,29 @@ def format_help(self, ctx, formatter): # label padding (for alignment) action_label_width = 10 - space = ' ' - space_re = re.compile(r'\r*\n') + space = " " + space_re = re.compile(r"\r*\n") cols = 80 indent = 10 # Format each action and its subactions for action, description, sub_actions in actions: # Our action indent - ai = ' ' * action_indent + ai = " " * action_indent # Format the main action description - formatted_description = space_re.split(textwrap.fill( - description, width=(cols - indent - action_indent), - initial_indent=space * indent, - subsequent_indent=space * indent)) + formatted_description = space_re.split( + textwrap.fill( + description, + width=(cols - indent - action_indent), + initial_indent=space * indent, + subsequent_indent=space * indent, + ) + ) for no, line in enumerate(formatted_description): if not no: formatter.write_text( - f'{ai}{action:<{action_label_width}}{line}') + f"{ai}{action:<{action_label_width}}{line}" + ) else: # pragma: no cover # Note: no branch is set intentionally since this is not @@ -316,129 +362,279 @@ def format_help(self, ctx, formatter): # know it works because we repeat this process with # our sub-options below formatter.write_text( - f'{ai}{space:<{action_label_width}}{line}') + f"{ai}{space:<{action_label_width}}{line}" + ) # Format each subaction - ai = ' ' * (action_indent * 2) + ai = " " * (action_indent * 2) for action, description in sub_actions: - formatted_description = space_re.split(textwrap.fill( - description, width=(cols - indent - (action_indent * 3)), - initial_indent=space * (indent - action_indent), - subsequent_indent=space * (indent - action_indent))) + formatted_description = space_re.split( + textwrap.fill( + description, + width=(cols - indent - (action_indent * 3)), + initial_indent=space * (indent - action_indent), + subsequent_indent=space * (indent - action_indent), + ) + ) for no, line in enumerate(formatted_description): if not no: formatter.write_text( - f'{ai}{action:<{action_label_width}}{line}') + f"{ai}{action:<{action_label_width}}{line}" + ) else: formatter.write_text( - f'{ai}{space:<{action_label_width}}{line}') + f"{ai}{space:<{action_label_width}}{line}" + ) # Include any epilog or additional text self.format_epilog(ctx, formatter) @click.command(context_settings=CONTEXT_SETTINGS, cls=CustomHelpCommand) -@click.option('--body', '-b', default=None, type=str, - help='Specify the message body. If no body is specified then ' - 'content is read from .') -@click.option('--title', '-t', default=None, type=str, - help='Specify the message title. This field is complete ' - 'optional.') -@click.option('--plugin-path', '-P', default=None, type=str, multiple=True, - metavar='PATH', - help='Specify one or more plugin paths to scan.') -@click.option('--storage-path', '-S', default=DEFAULT_STORAGE_PATH, type=str, - metavar='PATH', - help='Specify the path to the persistent storage location ' - '(default={}).'.format(DEFAULT_STORAGE_PATH)) -@click.option('--storage-prune-days', '-SPD', - default=DEFAULT_STORAGE_PRUNE_DAYS, type=int, - help='Define the number of days the storage prune ' - 'should run using. Setting this to zero (0) will eliminate ' - 'all accumulated content. By default this value is {} days.' - .format(DEFAULT_STORAGE_PRUNE_DAYS)) -@click.option('--storage-uid-length', '-SUL', - default=DEFAULT_STORAGE_UID_LENGTH, type=int, - help='Define the number of unique characters to store persistent' - 'cache in. By default this value is {} characters.' - .format(DEFAULT_STORAGE_UID_LENGTH)) -@click.option('--storage-mode', '-SM', default=PERSISTENT_STORE_MODES[0], - type=str, metavar='MODE', - help='Specify the persistent storage operational mode ' - '(default={}). Possible values are "{}", and "{}".'.format( - PERSISTENT_STORE_MODES[0], '", "'.join( - PERSISTENT_STORE_MODES[:-1]), - PERSISTENT_STORE_MODES[-1])) -@click.option('--config', '-c', default=None, type=str, multiple=True, - metavar='CONFIG_URL', - help='Specify one or more configuration locations.') -@click.option('--attach', '-a', default=None, type=str, multiple=True, - metavar='ATTACHMENT_URL', - help='Specify one or more attachment.') -@click.option('--notification-type', '-n', default=NOTIFY_TYPES[0], type=str, - metavar='TYPE', - help='Specify the message type (default={}). ' - 'Possible values are "{}", and "{}".'.format( - NOTIFY_TYPES[0], '", "'.join(NOTIFY_TYPES[:-1]), - NOTIFY_TYPES[-1])) -@click.option('--input-format', '-i', default=NOTIFY_FORMATS[0], type=str, - metavar='FORMAT', - help='Specify the message input format (default={}). ' - 'Possible values are "{}", and "{}".'.format( - NOTIFY_FORMATS[0], '", "'.join(NOTIFY_FORMATS[:-1]), - NOTIFY_FORMATS[-1])) -@click.option('--theme', '-T', default='default', type=str, metavar='THEME', - help='Specify the default theme.') -@click.option('--tag', '-g', default=None, type=str, multiple=True, - metavar='TAG', help='Specify one or more tags to filter ' - 'which services to notify. Use multiple --tag (-g) entries to ' - '"OR" the tags together and comma separated to "AND" them. ' - 'If no tags are specified then all services are notified.') -@click.option('--disable-async', '-Da', is_flag=True, - help='Send all notifications sequentially') -@click.option('--dry-run', '-d', is_flag=True, - help='Perform a trial run but only prints the notification ' - 'services to-be triggered to stdout. Notifications are never ' - 'sent using this mode.') -@click.option('--details', '-l', is_flag=True, - help='Prints details about the current services supported by ' - 'Apprise.') -@click.option('--recursion-depth', '-R', default=DEFAULT_RECURSION_DEPTH, - type=int, - help='The number of recursive import entries that can be ' - 'loaded from within Apprise configuration. By default ' - 'this is set to {}.'.format(DEFAULT_RECURSION_DEPTH)) -@click.option('--verbose', '-v', count=True, - help='Makes the operation more talkative. Use multiple v to ' - 'increase the verbosity. I.e.: -vvvv') -@click.option('--interpret-escapes', '-e', is_flag=True, - help='Enable interpretation of backslash escapes') -@click.option('--interpret-emojis', '-j', is_flag=True, - help='Enable interpretation of :emoji: definitions') -@click.option('--debug', '-D', is_flag=True, help='Debug mode') -@click.option('--version', '-V', is_flag=True, - help='Display the apprise version and exit.') -@click.argument('urls', nargs=-1, - metavar='SERVER_URL [SERVER_URL2 [SERVER_URL3]]',) +@click.option( + "--body", + "-b", + default=None, + type=str, + help=( + "Specify the message body. If no body is specified then " + "content is read from ." + ), +) +@click.option( + "--title", + "-t", + default=None, + type=str, + help="Specify the message title. This field is completely optional.", +) +@click.option( + "--plugin-path", + "-P", + default=None, + type=str, + multiple=True, + metavar="PATH", + help="Specify one or more plugin paths to scan.", +) +@click.option( + "--storage-path", + "-S", + default=DEFAULT_STORAGE_PATH, + type=str, + metavar="PATH", + help=( + "Specify the path to the persistent storage location " + f"(default={DEFAULT_STORAGE_PATH})." + ), +) +@click.option( + "--storage-prune-days", + "-SPD", + default=DEFAULT_STORAGE_PRUNE_DAYS, + type=int, + help=( + "Define the number of days the storage prune should run using." + " Setting this to zero (0) will eliminate all accumulated content. By" + f" default this value is {DEFAULT_STORAGE_PRUNE_DAYS} days." + ), +) +@click.option( + "--storage-uid-length", + "-SUL", + default=DEFAULT_STORAGE_UID_LENGTH, + type=int, + help=( + "Define the number of unique characters to store persistent cache in." + f" By default this value is {DEFAULT_STORAGE_UID_LENGTH} characters." + ), +) +@click.option( + "--storage-mode", + "-SM", + default=DEFAULT_STORAGE_MODE.value, + type=str, + metavar="MODE", + help=( + "Specify the persistent storage operational mode " + f'(default={DEFAULT_STORAGE_MODE.value}). ' + 'Possible values are: "{}".'.format( + '", "'.join(mode.value for mode in PERSISTENT_STORE_MODE_CHOICES) + ) + ), +) +@click.option( + "--config", + "-c", + default=None, + type=str, + multiple=True, + metavar="CONFIG_URL", + help="Specify one or more configuration locations.", +) +@click.option( + "--attach", + "-a", + default=None, + type=str, + multiple=True, + metavar="ATTACHMENT_URL", + help="Specify one or more attachments.", +) +@click.option( + "--notification-type", + "-n", + default=DEFAULT_NOTIFY_TYPE.value, + type=str, + metavar="TYPE", + help=( + f"Specify the message type (default={DEFAULT_NOTIFY_TYPE.value}). " + 'Possible values are: "{}".'.format( + '", "'.join(nt.value for nt in NOTIFY_TYPE_CHOICES) + ) + ), +) +@click.option( + "--input-format", + "-i", + default=DEFAULT_NOTIFY_FORMAT.value, + type=str, + metavar="FORMAT", + help=( + f"Specify the message input format " + f"(default={DEFAULT_NOTIFY_FORMAT.value}). " + 'Possible values are: "{}".'.format( + '", "'.join(fmt.value for fmt in NOTIFY_FORMAT_CHOICES) + ) + ), +) +@click.option( + "--theme", + "-T", + default="default", + type=str, + metavar="THEME", + help="Specify the default theme.", +) +@click.option( + "--tag", + "-g", + default=None, + type=str, + multiple=True, + metavar="TAG", + help=( + "Specify one or more tags to filter which services to notify. Use " + "multiple --tag (-g) entries to match ANY tag. Use comma separators " + "to require ALL tags (strict match). Omit to notify untagged services " + 'only, or use "all" to notify everything.' + ), +) +@click.option( + "--disable-async", + "-Da", + is_flag=True, + help="Send all notifications sequentially", +) +@click.option( + "--dry-run", + "-d", + is_flag=True, + help=( + "Perform a trial run but only prints the notification " + "services to-be triggered to stdout. Notifications are never " + "sent using this mode." + ), +) +@click.option( + "--details", + "-l", + is_flag=True, + help="Prints details about the current services supported by Apprise.", +) +@click.option( + "--recursion-depth", + "-R", + default=DEFAULT_RECURSION_DEPTH, + type=int, + help=( + "The number of recursive import entries that can be " + "loaded from within Apprise configuration. By default " + f"this is set to {DEFAULT_RECURSION_DEPTH}." + ), +) +@click.option( + "--verbose", + "-v", + count=True, + help=( + "Makes the operation more talkative. Use multiple v to " + "increase the verbosity. I.e.: -vvvv" + ), +) +@click.option( + "--interpret-escapes", + "-e", + is_flag=True, + help="Enable interpretation of backslash escapes", +) +@click.option( + "--interpret-emojis", + "-j", + is_flag=True, + help="Enable interpretation of :emoji: definitions", +) +@click.option("--debug", "-D", is_flag=True, help="Debug mode") +@click.option( + "--version", + "-V", + is_flag=True, + help="Display the apprise version and exit.", +) +@click.argument( + "urls", + nargs=-1, + metavar="SERVER_URL [SERVER_URL2 [SERVER_URL3]]", +) @click.pass_context -def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, - input_format, dry_run, recursion_depth, verbose, disable_async, - details, interpret_escapes, interpret_emojis, plugin_path, - storage_path, storage_mode, storage_prune_days, storage_uid_length, - debug, version): - """ - Send a notification to all of the specified servers identified by their +def main( + ctx, + body, + title, + config, + attach, + urls, + notification_type, + theme, + tag, + input_format, + dry_run, + recursion_depth, + verbose, + disable_async, + details, + interpret_escapes, + interpret_emojis, + plugin_path, + storage_path, + storage_mode, + storage_prune_days, + storage_uid_length, + debug, + version, +): + """Send a notification to all of the specified servers identified by their URLs the content provided within the title, body and notification-type. - For a list of all of the supported services and information on how to - use them, check out at https://github.com/caronc/apprise + For a list of all of the supported services and information on how to use + them, check out https://github.com/caronc/apprise """ # Note: Click ignores the return values of functions it wraps, If you # want to return a specific error code, you must call ctx.exit() # as you will see below. - debug = True if debug else False + debug = bool(debug) if debug: # Verbosity must be a minimum of 3 verbose = 3 if verbose < 3 else verbose @@ -467,12 +663,12 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, logger.setLevel(logging.ERROR) # Format our logger - formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') + formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s") ch.setFormatter(formatter) logger.addHandler(ch) # Update our asyncio logger - asyncio_logger = logging.getLogger('asyncio') + asyncio_logger = logging.getLogger("asyncio") for handler in logger.handlers: asyncio_logger.addHandler(handler) asyncio_logger.setLevel(logger.level) @@ -485,8 +681,9 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, notification_type = notification_type.strip().lower() if notification_type not in NOTIFY_TYPES: click.echo( - 'The --notification-type (-n) value of {} is not supported.' - .format(notification_type)) + f"The --notification-type (-n) value of {notification_type} is not" + " supported." + ) click.echo("Try 'apprise --help' for more information.") # 2 is the same exit code returned by Click if there is a parameter # issue. For consistency, we also return a 2 @@ -495,8 +692,9 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, input_format = input_format.strip().lower() if input_format not in NOTIFY_FORMATS: click.echo( - 'The --input-format (-i) value of {} is not supported.' - .format(input_format)) + f"The --input-format (-i) value of {input_format} is not" + " supported." + ) click.echo("Try 'apprise --help' for more information.") # 2 is the same exit code returned by Click if there is a parameter # issue. For consistency, we also return a 2 @@ -505,50 +703,56 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, storage_mode = storage_mode.strip().lower() if storage_mode not in PERSISTENT_STORE_MODES: click.echo( - 'The --storage-mode (-SM) value of {} is not supported.' - .format(storage_mode)) + f"The --storage-mode (-SM) value of {storage_mode} is not" + " supported." + ) click.echo("Try 'apprise --help' for more information.") # 2 is the same exit code returned by Click if there is a parameter # issue. For consistency, we also return a 2 ctx.exit(2) # - # Apply Environment Over-rides if defined + # Apply Environment Overrides if defined # - _config_paths = DEFAULT_CONFIG_PATHS - if 'APPRISE_CONFIG' in os.environ: + config_paths = DEFAULT_CONFIG_PATHS + if "APPRISE_CONFIG" in os.environ: # Deprecate (this was from previous versions of Apprise <= 1.9.1) logger.deprecate( - 'APPRISE_CONFIG environment variable has been changed to ' - f'{DEFAULT_ENV_APPRISE_CONFIG_PATH}') + "APPRISE_CONFIG environment variable has been changed to " + f"{DEFAULT_ENV_APPRISE_CONFIG_PATH}" + ) logger.debug( - 'Loading provided APPRISE_CONFIG (deprecated) environment ' - 'variable') - _config_paths = (os.environ.get('APPRISE_CONFIG', '').strip(), ) + "Loading provided APPRISE_CONFIG (deprecated) environment variable" + ) + config_paths = (os.environ.get("APPRISE_CONFIG", "").strip(),) elif DEFAULT_ENV_APPRISE_CONFIG_PATH in os.environ: logger.debug( - f'Loading provided {DEFAULT_ENV_APPRISE_CONFIG_PATH} ' - 'environment variable') - _config_paths = re.split( - r'[\r\n;]+', os.environ.get( - DEFAULT_ENV_APPRISE_CONFIG_PATH).strip()) - - _plugin_paths = DEFAULT_PLUGIN_PATHS + f"Loading provided {DEFAULT_ENV_APPRISE_CONFIG_PATH} " + "environment variable" + ) + config_paths = re.split( + r"[\r\n;]+", + os.environ.get(DEFAULT_ENV_APPRISE_CONFIG_PATH).strip(), + ) + + plugin_paths_ = DEFAULT_PLUGIN_PATHS if DEFAULT_ENV_APPRISE_PLUGIN_PATH in os.environ: logger.debug( - f'Loading provided {DEFAULT_ENV_APPRISE_PLUGIN_PATH} environment ' - 'variable') - _plugin_paths = re.split( - r'[\r\n;]+', os.environ.get( - DEFAULT_ENV_APPRISE_PLUGIN_PATH).strip()) + f"Loading provided {DEFAULT_ENV_APPRISE_PLUGIN_PATH} environment " + "variable" + ) + plugin_paths_ = re.split( + r"[\r\n;]+", + os.environ.get(DEFAULT_ENV_APPRISE_PLUGIN_PATH).strip(), + ) if DEFAULT_ENV_APPRISE_STORAGE_PATH in os.environ: logger.debug( - f'Loading provided {DEFAULT_ENV_APPRISE_STORAGE_PATH} environment ' - 'variable') - storage_path = \ - os.environ.get(DEFAULT_ENV_APPRISE_STORAGE_PATH).strip() + f"Loading provided {DEFAULT_ENV_APPRISE_STORAGE_PATH} environment " + "variable" + ) + storage_path = os.environ.get(DEFAULT_ENV_APPRISE_STORAGE_PATH).strip() # # Continue with initialization process @@ -556,14 +760,17 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, # Prepare a default set of plugin paths to scan; anything specified # on the CLI always trumps - plugin_paths = \ - [path for path in _plugin_paths if exists(path_decode(path))] \ - if not plugin_path else plugin_path + plugin_paths = ( + plugin_path + if plugin_path + else [path for path in plugin_paths_ if exists(path_decode(path))] + ) if storage_uid_length < 2: click.echo( - 'The --storage-uid-length (-SUL) value can not be lower ' - 'then two (2).') + "The --storage-uid-length (-SUL) value can not be lower " + "than two (2)." + ) click.echo("Try 'apprise --help' for more information.") # 2 is the same exit code returned by Click if there is a @@ -574,40 +781,32 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, asset = AppriseAsset( # Our body format body_format=input_format, - # Interpret Escapes interpret_escapes=interpret_escapes, - # Interpret Emojis interpret_emojis=None if not interpret_emojis else True, - # Set the theme theme=theme, - # Async mode allows a user to send all of their notifications # asynchronously. This was made an option incase there are problems # in the future where it is better that everything runs sequentially/ # synchronously instead. async_mode=disable_async is not True, - # Load our plugins plugin_paths=plugin_paths, - # Load our persistent storage path storage_path=path_decode(storage_path), - # Our storage URL ID Length storage_idlen=storage_uid_length, - # Define if we flush to disk as soon as possible or not when required - storage_mode=storage_mode + storage_mode=storage_mode, ) # Create our Apprise object a = Apprise(asset=asset, debug=debug, location=ContentLocation.LOCAL) # Track if we are performing a storage action - storage_action = True if urls and 'storage'.startswith(urls[0]) else False + storage_action = bool(urls and "storage".startswith(urls[0])) if details: # Print details and exit @@ -615,74 +814,90 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, # Sort our results: plugins = sorted( - results['schemas'], key=lambda i: str(i['service_name'])) + results["schemas"], key=lambda i: str(i["service_name"]) + ) for entry in plugins: - protocols = [] if not entry['protocols'] else \ - [p for p in entry['protocols'] - if isinstance(p, str)] + protocols = ( + [] + if not entry["protocols"] + else [p for p in entry["protocols"] if isinstance(p, str)] + ) protocols.extend( - [] if not entry['secure_protocols'] else - [p for p in entry['secure_protocols'] - if isinstance(p, str)]) + [] + if not entry["secure_protocols"] + else [ + p for p in entry["secure_protocols"] if isinstance(p, str) + ] + ) if len(protocols) == 1: # Simplify view by swapping {schema} with the single # protocol value # Convert tuple to list - entry['details']['templates'] = \ - list(entry['details']['templates']) - - for x in range(len(entry['details']['templates'])): - entry['details']['templates'][x] = \ - re.sub( - r'^[^}]+}://', - '{}://'.format(protocols[0]), - entry['details']['templates'][x]) - - fg = "green" if entry['enabled'] else "red" - if entry['category'] == 'custom': + entry["details"]["templates"] = list( + entry["details"]["templates"] + ) + + for x in range(len(entry["details"]["templates"])): + entry["details"]["templates"][x] = re.sub( + r"^[^}]+}://", + f"{protocols[0]}://", + entry["details"]["templates"][x], + ) + + fg = "green" if entry["enabled"] else "red" + if entry["category"] == "custom": # Identify these differently fg = "cyan" # Flip the enable switch so it forces the requirements # to be displayed - entry['enabled'] = False + entry["enabled"] = False - click.echo(click.style( - '{} {:<30} '.format( - '+' if entry['enabled'] else '-', - str(entry['service_name'])), fg=fg, bold=True), - nl=(not entry['enabled'] or len(protocols) == 1)) + click.echo( + click.style( + "{} {:<30} ".format( + "+" if entry["enabled"] else "-", + str(entry["service_name"]), + ), + fg=fg, + bold=True, + ), + nl=(not entry["enabled"] or len(protocols) == 1), + ) - if not entry['enabled']: - if entry['requirements']['details']: - click.echo( - ' ' + str(entry['requirements']['details'])) + if not entry["enabled"]: + if entry["requirements"]["details"]: + click.echo(" " + str(entry["requirements"]["details"])) - if entry['requirements']['packages_required']: - click.echo(' Python Packages Required:') - for req in entry['requirements']['packages_required']: - click.echo(' - ' + req) + if entry["requirements"]["packages_required"]: + click.echo(" Python Packages Required:") + for req in entry["requirements"]["packages_required"]: + click.echo(" - " + req) - if entry['requirements']['packages_recommended']: - click.echo(' Python Packages Recommended:') - for req in entry['requirements']['packages_recommended']: - click.echo(' - ' + req) + if entry["requirements"]["packages_recommended"]: + click.echo(" Python Packages Recommended:") + for req in entry["requirements"]["packages_recommended"]: + click.echo(" - " + req) # new line padding between entries - if entry['category'] == 'native': + if entry["category"] == "native": click.echo() continue if len(protocols) > 1: - click.echo('| Schema(s): {}'.format( - ', '.join(protocols), - )) + click.echo( + "| Schema(s): {}".format( + ", ".join(protocols), + ) + ) - prefix = ' - ' - click.echo('{}{}'.format( - prefix, - '\n{}'.format(prefix).join(entry['details']['templates']))) + prefix = " - " + click.echo( + "{}{}".format( + prefix, f"\n{prefix}".join(entry["details"]["templates"]) + ) + ) # new line padding between entries click.echo() @@ -700,7 +915,8 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, if tag: # Ignore any tags specified logger.warning( - '--tag (-g) entries are ignored when using specified URLs') + "--tag (-g) entries are ignored when using specified URLs" + ) tag = None # Load our URLs (if any defined) @@ -710,23 +926,26 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, if config: # Provide a warning to the end user if they specified both logger.warning( - 'You defined both URLs and a --config (-c) entry; ' - 'Only the URLs will be referenced.') + "You defined both URLs and a --config (-c) entry; " + "Only the URLs will be referenced." + ) elif config: # We load our configuration file(s) now only if no URLs were specified # Specified config entries trump all - a.add(AppriseConfig( - paths=config, asset=asset, recursion=recursion_depth)) + a.add( + AppriseConfig(paths=config, asset=asset, recursion=recursion_depth) + ) - elif os.environ.get(DEFAULT_ENV_APPRISE_URLS, '').strip(): + elif os.environ.get(DEFAULT_ENV_APPRISE_URLS, "").strip(): logger.debug( - f'Loading provided {DEFAULT_ENV_APPRISE_URLS} environment ' - 'variable') + f"Loading provided {DEFAULT_ENV_APPRISE_URLS} environment variable" + ) if tag: # Ignore any tags specified logger.warning( - '--tag (-g) entries are ignored when using specified URLs') + "--tag (-g) entries are ignored when using specified URLs" + ) tag = None # Attempt to use our APPRISE_URLS environment variable (if populated) @@ -734,14 +953,19 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, else: # Load default configuration - a.add(AppriseConfig( - paths=[f for f in _config_paths if isfile(path_decode(f))], - asset=asset, recursion=recursion_depth)) + a.add( + AppriseConfig( + paths=[f for f in config_paths if isfile(path_decode(f))], + asset=asset, + recursion=recursion_depth, + ) + ) if not dry_run and not (a or storage_action): click.echo( - 'You must specify at least one server URL or populated ' - 'configuration file.') + "You must specify at least one server URL or populated " + "configuration file." + ) click.echo("Try 'apprise --help' for more information.") ctx.exit(1) @@ -758,8 +982,9 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, # if storage_prune_days < 0: click.echo( - 'The --storage-prune-days (-SPD) value can not be lower ' - 'then zero (0).') + "The --storage-prune-days (-SPD) value can not be lower " + "than zero (0)." + ) click.echo("Try 'apprise --help' for more information.") # 2 is the same exit code returned by Click if there is a @@ -776,86 +1001,108 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, action = PERSISTENT_STORAGE_MODES[0] if filter_uids: - _action = next( # pragma: no branch - (a for a in PERSISTENT_STORAGE_MODES - if a.startswith(filter_uids[0])), None) + action_ = next( # pragma: no branch + ( + a + for a in PERSISTENT_STORAGE_MODES + if a.startswith(filter_uids[0]) + ), + None, + ) - if _action: + if action_: # pop 'action' off the head of our list filter_uids = filter_uids[1:] - action = _action + action = action_ # Get our detected URL IDs uids = {} - for plugin in (a if not tags else a.find(tag=tags)): - _id = plugin.url_id() - if not _id: + for plugin in a if not tags else a.find(tag=tags): + id_ = plugin.url_id() + if not id_: continue if filter_uids and next( - (False for n in filter_uids if _id.startswith(n)), True): + (False for n in filter_uids if id_.startswith(n)), True + ): continue - if _id not in uids: - uids[_id] = { - 'plugins': [plugin], - 'state': PersistentStoreState.UNUSED, - 'size': 0, + if id_ not in uids: + uids[id_] = { + "plugins": [plugin], + "state": PersistentStoreState.UNUSED.value, + "size": 0, } else: - # It's possible to have more then one URL point to the same - # location (thus match against the same url id more then once - uids[_id]['plugins'].append(plugin) + # It's possible to have more than one URL point to the same + # location (thus match against the same url id more than once + uids[id_]["plugins"].append(plugin) if action == PersistentStorageMode.LIST: detected_uid = PersistentStore.disk_scan( # Use our asset path as it has already been properly parsed path=asset.storage_path, - # Provide filter if specified namespace=filter_uids, ) - for _id in detected_uid: - size, _ = dir_size(os.path.join(asset.storage_path, _id)) - if _id in uids: - uids[_id]['state'] = PersistentStoreState.ACTIVE - uids[_id]['size'] = size + for id_ in detected_uid: + size, _ = dir_size(os.path.join(asset.storage_path, id_)) + if id_ in uids: + uids[id_]["state"] = PersistentStoreState.ACTIVE.value + uids[id_]["size"] = size elif not tags: - uids[_id] = { - 'plugins': [], + uids[id_] = { + "plugins": [], # No cross reference (wasted space?) - 'state': PersistentStoreState.STALE, + "state": PersistentStoreState.STALE.value, # Acquire disk space - 'size': size, + "size": size, } for idx, (uid, meta) in enumerate(uids.items()): - fg = "green" \ - if meta['state'] == PersistentStoreState.ACTIVE else ( + fg = ( + "green" + if meta["state"] == PersistentStoreState.ACTIVE.value + else ( "red" - if meta['state'] == PersistentStoreState.STALE else - "white") + if meta["state"] == PersistentStoreState.STALE.value + else "white" + ) + ) if idx > 0: # New line click.echo() - click.echo("{: 4d}. ".format(idx + 1), nl=False) - click.echo(click.style("{:<52} {:<8} {}".format( - uid, bytes_to_str(meta['size']), meta['state']), - fg=fg, bold=True)) - - for entry in meta['plugins']: + click.echo(f"{idx + 1: 4d}. ", nl=False) + click.echo( + click.style( + "{:<52} {:<8} {}".format( + uid, bytes_to_str(meta["size"]), meta["state"] + ), + fg=fg, + bold=True, + ) + ) + + for entry in meta["plugins"]: url = entry.url(privacy=True) - click.echo("{:>7} {}".format( - '-', - url if len(url) <= (columns - 8) else '{}...'.format( - url[:columns - 11]))) + click.echo( + "{:>7} {}".format( + "-", + ( + url + if len(url) <= (columns - 8) + else f"{url[:columns - 11]}..." + ), + ) + ) if entry.tags: - click.echo("{:>10}: {}".format( - 'tags', ', '.join(entry.tags))) + click.echo( + "{:>10}: {}".format("tags", ", ".join(entry.tags)) + ) else: # PersistentStorageMode.PRUNE or PersistentStorageMode.CLEAR if action == PersistentStorageMode.CLEAR: @@ -866,10 +1113,11 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, # Use our asset path as it has already been properly parsed path=asset.storage_path, # Provide our namespaces if they exist - namespace=None if not filter_uids else filter_uids, + namespace=filter_uids if filter_uids else None, # Convert expiry from days to seconds expires=storage_prune_days * 60 * 60 * 24, - action=not dry_run) + action=not dry_run, + ) ctx.exit(0) # end if disk_prune() @@ -879,14 +1127,18 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, if not dry_run: if body is None: - logger.trace('No --body (-b) specified; reading from stdin') + logger.trace("No --body (-b) specified; reading from stdin") # if no body was specified, then read from STDIN - body = click.get_text_stream('stdin').read() + body = click.get_text_stream("stdin").read() # now print it out result = a.notify( - body=body, title=title, notify_type=notification_type, tag=tags, - attach=attach) + body=body, + title=title, + notify_type=notification_type, + tag=tags, + attach=attach, + ) else: # Number of columns to assume in the terminal. In future, maybe this # can be detected and made dynamic. The actual column count is 80, but @@ -900,18 +1152,27 @@ def main(ctx, body, title, config, attach, urls, notification_type, theme, tag, for idx, server in enumerate(a.find(tag=tags)): url = server.url(privacy=True) - click.echo("{: 4d}. {}".format( - idx + 1, - url if len(url) <= (columns - 8) else '{}...'.format( - url[:columns - 9]))) + click.echo( + "{: 4d}. {}".format( + idx + 1, + ( + url + if len(url) <= (columns - 8) + else f"{url[:columns - 9]}..." + ), + ) + ) # Share our URL ID - click.echo("{:>10}: {}".format( - 'uid', '- n/a -' if not server.url_id() - else server.url_id())) + click.echo( + "{:>10}: {}".format( + "uid", + "- n/a -" if not server.url_id() else server.url_id(), + ) + ) if server.tags: - click.echo("{:>10}: {}".format('tags', ', '.join(server.tags))) + click.echo("{:>10}: {}".format("tags", ", ".join(server.tags))) # Initialize a default response of nothing matched, otherwise # if we matched at least one entry, we can return True diff --git a/libs/apprise/common.py b/libs/apprise/common.py index 9b8e993c25..ec62eaf592 100644 --- a/libs/apprise/common.py +++ b/libs/apprise/common.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,207 +25,182 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +from enum import Enum -class NotifyType: - """ - A simple mapping of notification types most commonly used with - all types of logging and notification services. - """ - INFO = 'info' - SUCCESS = 'success' - WARNING = 'warning' - FAILURE = 'failure' +class NotifyType(str, Enum): + """A simple mapping of notification types most commonly used with all types + of logging and notification services.""" -NOTIFY_TYPES = ( - NotifyType.INFO, - NotifyType.SUCCESS, - NotifyType.WARNING, - NotifyType.FAILURE, -) + INFO = "info" + SUCCESS = "success" + WARNING = "warning" + FAILURE = "failure" -class NotifyImageSize: - """ - A list of pre-defined image sizes to make it easier to work with defined - plugins. - """ - XY_32 = '32x32' - XY_72 = '72x72' - XY_128 = '128x128' - XY_256 = '256x256' +# Define our types so we can verify if we need to +NOTIFY_TYPES: frozenset[str] = frozenset(e.value for e in NotifyType) -NOTIFY_IMAGE_SIZES = ( - NotifyImageSize.XY_32, - NotifyImageSize.XY_72, - NotifyImageSize.XY_128, - NotifyImageSize.XY_256, -) +class NotifyImageSize(str, Enum): + """A list of pre-defined image sizes to make it easier to work with defined + plugins.""" + XY_32 = "32x32" + XY_72 = "72x72" + XY_128 = "128x128" + XY_256 = "256x256" -class NotifyFormat: - """ - A list of pre-defined text message formats that can be passed via the - apprise library. - """ - TEXT = 'text' - HTML = 'html' - MARKDOWN = 'markdown' +# Define our image sizes so we can verify if we need to +NOTIFY_IMAGE_SIZES: frozenset[str] = \ + frozenset(e.value for e in NotifyImageSize) -NOTIFY_FORMATS = ( - NotifyFormat.TEXT, - NotifyFormat.HTML, - NotifyFormat.MARKDOWN, -) +class NotifyFormat(str, Enum): + """A list of pre-defined text message formats that can be passed via the + apprise library.""" -class OverflowMode: - """ - A list of pre-defined modes of how to handle the text when it exceeds the - defined maximum message size. - """ + TEXT = "text" + HTML = "html" + MARKDOWN = "markdown" + + +# Define our formats so we can verify if we need to +NOTIFY_FORMATS: frozenset[str] = frozenset(e.value for e in NotifyFormat) + + +class OverflowMode(str, Enum): + """A list of pre-defined modes of how to handle the text when it exceeds + the defined maximum message size.""" # Send the data as is; untouched. Let the upstream server decide how the # content is handled. Some upstream services might gracefully handle this # with expected intentions; others might not. - UPSTREAM = 'upstream' + UPSTREAM = "upstream" # Always truncate the text when it exceeds the maximum message size and # send it anyway - TRUNCATE = 'truncate' + TRUNCATE = "truncate" # Split the message into multiple smaller messages that fit within the # limits of what is expected. The smaller messages are sent - SPLIT = 'split' + SPLIT = "split" # Define our modes so we can verify if we need to -OVERFLOW_MODES = ( - OverflowMode.UPSTREAM, - OverflowMode.TRUNCATE, - OverflowMode.SPLIT, -) +OVERFLOW_MODES: frozenset[str] = frozenset(e.value for e in OverflowMode) -class ConfigFormat: - """ - A list of pre-defined config formats that can be passed via the - apprise library. - """ +class ConfigFormat(str, Enum): + """A list of pre-defined config formats that can be passed via the apprise + library.""" # A text based configuration. This consists of a list of URLs delimited by # a new line. pound/hashtag (#) or semi-colon (;) can be used as comment # characters. - TEXT = 'text' + TEXT = "text" # YAML files allow a more rich of an experience when settig up your # apprise configuration files. - YAML = 'yaml' + YAML = "yaml" # Define our configuration formats mostly used for verification -CONFIG_FORMATS = ( - ConfigFormat.TEXT, - ConfigFormat.YAML, -) +CONFIG_FORMATS: frozenset[str] = frozenset(e.value for e in ConfigFormat) -class ContentIncludeMode: - """ - The different Content inclusion modes. All content based plugins will - have one of these associated with it. +class ContentIncludeMode(str, Enum): + """The different Content inclusion modes. + + All content based plugins will have one of these associated with it. """ + # - Content inclusion of same type only; hence a file:// can include # a file:// # - Cross file inclusion is not allowed unless insecure_includes (a flag) # is set to True. In these cases STRICT acts as type ALWAYS - STRICT = 'strict' + STRICT = "strict" # This content type can never be included - NEVER = 'never' + NEVER = "never" # This content can always be included - ALWAYS = 'always' + ALWAYS = "always" -CONTENT_INCLUDE_MODES = ( - ContentIncludeMode.STRICT, - ContentIncludeMode.NEVER, - ContentIncludeMode.ALWAYS, -) +# Define our file inclusion types so we can verify if we need to +CONTENT_INCLUDE_MODES: frozenset[str] = \ + frozenset(e.value for e in ContentIncludeMode) -class ContentLocation: - """ - This is primarily used for handling file attachments. The idea is - to track the source of the attachment itself. We don't want - remote calls to a server to access local attachments for example. +class ContentLocation(str, Enum): + """This is primarily used for handling file attachments. The idea is to + track the source of the attachment itself. We don't want remote calls to a + server to access local attachments for example. - By knowing the attachment type and cross-associating it with how - we plan on accessing the content, we can make a judgement call - (for security reasons) if we will allow it. + By knowing the attachment type and cross-associating it with how we plan on + accessing the content, we can make a judgement call (for security reasons) + if we will allow it. - Obviously local uses of apprise can access both local and remote - type files. + Obviously local uses of apprise can access both local and remote type + files. """ + # Content is located locally (on the same server as apprise) - LOCAL = 'local' + LOCAL = "local" # Content is located in a remote location - HOSTED = 'hosted' + HOSTED = "hosted" # Content is inaccessible - INACCESSIBLE = 'n/a' + INACCESSIBLE = "n/a" -CONTENT_LOCATIONS = ( - ContentLocation.LOCAL, - ContentLocation.HOSTED, - ContentLocation.INACCESSIBLE, -) +# Define our location types so we can verify if we need to +CONTENT_LOCATIONS: frozenset[str] = frozenset(e.value for e in ContentLocation) -class PersistentStoreMode: +class PersistentStoreMode(str, Enum): # Allow persistent storage; write on demand - AUTO = 'auto' + AUTO = "auto" # Always flush every change to disk after it's saved. This has higher i/o # but enforces disk reflects what was set immediately - FLUSH = 'flush' + FLUSH = "flush" # memory based store only - MEMORY = 'memory' + MEMORY = "memory" -PERSISTENT_STORE_MODES = ( - PersistentStoreMode.AUTO, - PersistentStoreMode.FLUSH, - PersistentStoreMode.MEMORY, -) +# Define our persistent storage modes so we can verify if we need to +PERSISTENT_STORE_MODES: frozenset[str] = \ + frozenset(e.value for e in PersistentStoreMode) -class PersistentStoreState: - """ - Defines the persistent states describing what has been cached - """ +class PersistentStoreState(str, Enum): + """Defines the persistent states describing what has been cached.""" + # Persistent Directory is actively cross-referenced against a matching URL - ACTIVE = 'active' + ACTIVE = "active" # Persistent Directory is no longer being used or has no cross-reference - STALE = 'stale' + STALE = "stale" # Persistent Directory is not utilizing any disk space at all, however # it potentially could if the plugin it successfully cross-references # is utilized - UNUSED = 'unused' + UNUSED = "unused" + +# Define our persistent storage states so we can verify if we need to +PERSISTENT_STORE_STATES: frozenset[str] = \ + frozenset(e.value for e in PersistentStoreState) # This is a reserved tag that is automatically assigned to every # Notification Plugin -MATCH_ALL_TAG = 'all' +MATCH_ALL_TAG = "all" # Will cause notification to trigger under any circumstance even if an # exclusive tagging was provided. -MATCH_ALWAYS_TAG = 'always' +MATCH_ALWAYS_TAG = "always" diff --git a/libs/apprise/common.pyi b/libs/apprise/common.pyi deleted file mode 100644 index 862fc4f272..0000000000 --- a/libs/apprise/common.pyi +++ /dev/null @@ -1,22 +0,0 @@ -import types -import typing as t - - -class NotifyType: - INFO: NotifyType - SUCCESS: NotifyType - WARNING: NotifyType - FAILURE: NotifyType - -class NotifyFormat: - TEXT: NotifyFormat - HTML: NotifyFormat - MARKDOWN: NotifyFormat - -class ContentLocation: - LOCAL: ContentLocation - HOSTED: ContentLocation - INACCESSIBLE: ContentLocation - - -NOTIFY_MODULE_MAP: t.Dict[str, t.Dict[str, t.Union[t.Type["NotifyBase"], types.ModuleType]]] diff --git a/libs/apprise/compat.py b/libs/apprise/compat.py new file mode 100644 index 0000000000..cbf343f23e --- /dev/null +++ b/libs/apprise/compat.py @@ -0,0 +1,50 @@ +# BSD 2-Clause License +# +# Apprise - Push Notification Library. +# Copyright (c) 2026, Chris Caron +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +# Added for Python 3.9 Compatibility + +from dataclasses import dataclass as _dataclass +from typing import Any, Callable, TypeVar + +_T = TypeVar("_T") + + +def dataclass_compat(*dargs: Any, **dkwargs: Any) -> Callable[[_T], _T]: + """ + dataclass() wrapper that drops unsupported kwargs on older Python. + + Python 3.9 does not support slots= in dataclasses.dataclass(). + """ + try: + return _dataclass(*dargs, **dkwargs) + + except TypeError: + # Only strip slots when it is the cause + if "slots" in dkwargs: + dkwargs.pop("slots", None) + return _dataclass(*dargs, **dkwargs) + raise diff --git a/libs/apprise/config/__init__.py b/libs/apprise/config/__init__.py index edcc550793..7069e7f94c 100644 --- a/libs/apprise/config/__init__.py +++ b/libs/apprise/config/__init__.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -27,14 +26,14 @@ # POSSIBILITY OF SUCH DAMAGE. # Used for testing -from .base import ConfigBase from ..manager_config import ConfigurationManager +from .base import ConfigBase # Initalize our Config Manager Singleton C_MGR = ConfigurationManager() __all__ = [ # Reference - 'ConfigBase', - 'ConfigurationManager', + "ConfigBase", + "ConfigurationManager", ] diff --git a/libs/apprise/config/base.py b/libs/apprise/config/base.py index 5aa64312b5..cfe306f9e5 100644 --- a/libs/apprise/config/base.py +++ b/libs/apprise/config/base.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -25,24 +24,27 @@ # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +from __future__ import annotations +from collections import deque import os import re -import yaml import time -from .. import plugins -from .. import common +import yaml + +from .. import common, plugins from ..asset import AppriseAsset -from ..url import URLBase -from ..utils.parse import GET_SCHEMA_RE, parse_list, parse_bool, parse_urls -from ..utils.cwe312 import cwe312_url +from ..logger import logging from ..manager_config import ConfigurationManager from ..manager_plugins import NotificationManager +from ..url import URLBase +from ..utils.cwe312 import cwe312_url +from ..utils.parse import GET_SCHEMA_RE, parse_bool, parse_list, parse_urls +from ..utils.time import zoneinfo # Test whether token is valid or not -VALID_TOKEN = re.compile( - r'(?P[a-z0-9][a-z0-9_]+)', re.I) +VALID_TOKEN = re.compile(r"(?P[a-z0-9][a-z0-9_]+)", re.I) # Grant access to our Notification Manager Singleton N_MGR = NotificationManager() @@ -52,12 +54,10 @@ class ConfigBase(URLBase): - """ - This is the base class for all supported configuration sources - """ + """This is the base class for all supported configuration sources.""" # The Default Encoding to use if not otherwise detected - encoding = 'utf-8' + encoding = "utf-8" # The default expected configuration format unless otherwise # detected by the sub-modules @@ -79,11 +79,15 @@ class ConfigBase(URLBase): # the config path manages the handling of relative include config_path = os.getcwd() - def __init__(self, cache=True, recursion=0, insecure_includes=False, - **kwargs): - """ - Initialize some general logging and common server arguments that will - keep things consistent when working with the configurations that + def __init__( + self, + cache: bool | int = True, + recursion: int = 0, + insecure_includes: bool = False, + **kwargs: object, + ) -> None: + """Initialize some general logging and common server arguments that + will keep things consistent when working with the configurations that inherit this class. By default we cache our responses so that subsiquent calls does not @@ -137,51 +141,52 @@ def __init__(self, cache=True, recursion=0, insecure_includes=False, # Initialize our insecure_includes flag self.insecure_includes = insecure_includes - if 'encoding' in kwargs: + if "encoding" in kwargs: # Store the encoding - self.encoding = kwargs.get('encoding') + self.encoding = kwargs.get("encoding") - if 'format' in kwargs \ - and isinstance(kwargs['format'], str): - # Store the enforced config format - self.config_format = kwargs.get('format').lower() + fmt = kwargs.get("format") + if fmt: + try: + self.config_format = ( + fmt if isinstance(fmt, common.ConfigFormat) + else common.ConfigFormat(fmt.lower()) + ) - if self.config_format not in common.CONFIG_FORMATS: - # Simple error checking - err = 'An invalid config format ({}) was specified.'.format( - self.config_format) + except (AttributeError, ValueError): + err = f"An invalid config format ({fmt}) was specified." self.logger.warning(err) - raise TypeError(err) + raise TypeError(err) from None # Set our cache flag; it can be True or a (positive) integer try: self.cache = cache if isinstance(cache, bool) else int(cache) if self.cache < 0: - err = 'A negative cache value ({}) was specified.'.format( - cache) + err = f"A negative cache value ({cache}) was specified." self.logger.warning(err) raise TypeError(err) except (ValueError, TypeError): - err = 'An invalid cache value ({}) was specified.'.format(cache) + err = f"An invalid cache value ({cache}) was specified." self.logger.warning(err) - raise TypeError(err) + raise TypeError(err) from None return - def servers(self, asset=None, **kwargs): - """ - Performs reads loaded configuration and returns all of the services - that could be parsed and loaded. - - """ + def servers( + self, + asset: AppriseAsset | None = None, + **kwargs: object, + ) -> list[plugins.NotifyBase]: + """Performs reads loaded configuration and returns all of the services + that could be parsed and loaded.""" if not self.expired(): # We already have cached results to return; use them return self._cached_servers # Our cached response object - self._cached_servers = list() + self._cached_servers = [] # read() causes the child class to do whatever it takes for the # config plugin to load the data source and return unparsed content @@ -196,12 +201,14 @@ def servers(self, asset=None, **kwargs): # Our Configuration format uses a default if one wasn't one detected # or enfored. - config_format = \ - self.default_config_format \ - if self.config_format is None else self.config_format + config_format = ( + self.default_config_format + if self.config_format is None + else self.config_format + ) # Dynamically load our parse_ function based on our config format - fn = getattr(ConfigBase, 'config_parse_{}'.format(config_format)) + fn = getattr(ConfigBase, f"config_parse_{config_format.value}") # Initialize our asset object asset = asset if isinstance(asset, AppriseAsset) else self.asset @@ -209,6 +216,11 @@ def servers(self, asset=None, **kwargs): # Execute our config parse function which always returns a tuple # of our servers and our configuration servers, configs = fn(content=content, asset=asset) + + # Free memory + del content + + # Add entry to our server list self._cached_servers.extend(servers) # Configuration files were detected; recursively populate them @@ -221,27 +233,29 @@ def servers(self, asset=None, **kwargs): schema = GET_SCHEMA_RE.match(url) if schema is None: # Plan B is to assume we're dealing with a file - schema = 'file' + schema = "file" if not os.path.isabs(url): # We're dealing with a relative path; prepend # our current config path url = os.path.join(self.config_path, url) - url = '{}://{}'.format(schema, URLBase.quote(url)) + url = f"{schema}://{URLBase.quote(url)}" else: # Ensure our schema is always in lower case - schema = schema.group('schema').lower() + schema = schema.group("schema").lower() # Some basic validation if schema not in C_MGR: ConfigBase.logger.warning( - 'Unsupported include schema {}.'.format(schema)) + f"Unsupported include schema {schema}." + ) continue # CWE-312 (Secure Logging) Handling - loggable_url = url if not asset.secure_logging \ - else cwe312_url(url) + loggable_url = ( + url if not asset.secure_logging else cwe312_url(url) + ) # Parse our url details of the server object as dictionary # containing all of the information parsed from our URL @@ -249,94 +263,93 @@ def servers(self, asset=None, **kwargs): if not results: # Failed to parse the server URL self.logger.warning( - 'Unparseable include URL {}'.format(loggable_url)) + f"Unparseable include URL {loggable_url}" + ) continue # Handle cross inclusion based on allow_cross_includes rules - if (C_MGR[schema].allow_cross_includes == - common.ContentIncludeMode.STRICT - and schema not in self.schemas() - and not self.insecure_includes) or C_MGR[schema] \ - .allow_cross_includes == \ - common.ContentIncludeMode.NEVER: + if ( + C_MGR[schema].allow_cross_includes + == common.ContentIncludeMode.STRICT + and schema not in self.schemas() + and not self.insecure_includes + ) or C_MGR[ + schema + ].allow_cross_includes == common.ContentIncludeMode.NEVER: # Prevent the loading if insecure base protocols ConfigBase.logger.warning( - 'Including {}:// based configuration is prohibited. ' - 'Ignoring URL {}'.format(schema, loggable_url)) + f"Including {schema}:// based configuration is" + f" prohibited. Ignoring URL {loggable_url}" + ) continue # Prepare our Asset Object - results['asset'] = asset + results["asset"] = asset # No cache is required because we're just lumping this in # and associating it with the cache value we've already # declared (prior to our recursion) - results['cache'] = False + results["cache"] = False # Recursion can never be parsed from the URL; we decrement # it one level - results['recursion'] = self.recursion - 1 + results["recursion"] = self.recursion - 1 # Insecure Includes flag can never be parsed from the URL - results['insecure_includes'] = self.insecure_includes + results["insecure_includes"] = self.insecure_includes try: # Attempt to create an instance of our plugin using the # parsed URL information - cfg_plugin = C_MGR[results['schema']](**results) + cfg_plugin = C_MGR[results["schema"]](**results) except Exception as e: # the arguments are invalid or can not be used. self.logger.warning( - 'Could not load include URL: {}'.format(loggable_url)) - self.logger.debug('Loading Exception: {}'.format(str(e))) + f"Could not load include URL: {loggable_url}" + ) + self.logger.debug(f"Loading Exception: {e!s}") continue # if we reach here, we can now add this servers found # in this configuration file to our list - self._cached_servers.extend( - cfg_plugin.servers(asset=asset)) - - # We no longer need our configuration object - del cfg_plugin + self._cached_servers.extend(cfg_plugin.servers(asset=asset)) else: # CWE-312 (Secure Logging) Handling - loggable_url = url if not asset.secure_logging \ - else cwe312_url(url) + loggable_url = ( + url if not asset.secure_logging else cwe312_url(url) + ) self.logger.debug( - 'Recursion limit reached; ignoring Include URL: %s', - loggable_url) + "Recursion limit reached; ignoring Include URL: %s", + loggable_url, + ) if self._cached_servers: self.logger.info( - 'Loaded {} entries from {}'.format( - len(self._cached_servers), - self.url(privacy=asset.secure_logging))) + f"Loaded {len(self._cached_servers)} entries from" + f" {self.url(privacy=asset.secure_logging)}" + ) else: self.logger.warning( - 'Failed to load Apprise configuration from {}'.format( - self.url(privacy=asset.secure_logging))) + "Failed to load Apprise configuration from" + f" {self.url(privacy=asset.secure_logging)}" + ) # Set the time our content was cached at self._cached_time = time.time() return self._cached_servers - def read(self): - """ - This object should be implimented by the child classes - - """ + def read(self) -> str | None: + """This object should be implimented by the child classes.""" return None - def expired(self): - """ - Simply returns True if the configuration should be considered - as expired or False if content should be retrieved. - """ + def expired(self) -> bool: + """Simply returns True if the configuration should be considered as + expired or False if content should be retrieved.""" if isinstance(self._cached_servers, list) and self.cache: # We have enough reason to look further into our cached content # and verify it has not expired. @@ -356,7 +369,7 @@ def expired(self): return True @staticmethod - def __normalize_tag_groups(group_tags): + def __normalize_tag_groups(group_tags: dict[str, set[str]]) -> None: """ Used to normalize a tag assign map which looks like: { @@ -373,11 +386,10 @@ def __normalize_tag_groups(group_tags): """ # Prepare a key set list we can use - tag_groups = set([str(x) for x in group_tags.keys()]) + tag_groups = {str(x) for x in group_tags} def _expand(tags, ignore=None): - """ - Expands based on tag provided and returns a set + """Expands based on tag provided and returns a set. this also updates the group_tags while it goes """ @@ -414,7 +426,7 @@ def _expand(tags, ignore=None): # Go deeper (recursion) ignore.add(tag) - group_tags[gtag] = _expand(set([gtag]), ignore=ignore) + group_tags[gtag] = _expand({gtag}, ignore=ignore) results |= group_tags[gtag] # Pop ignore @@ -424,14 +436,18 @@ def _expand(tags, ignore=None): for tag in tag_groups: # Get our tags - group_tags[tag] |= _expand(set([tag])) + group_tags[tag] |= _expand({tag}) if not group_tags[tag]: ConfigBase.logger.warning( - 'The group {} has no tags assigned to it'.format(tag)) + f"The group {tag} has no tags assigned to it" + ) del group_tags[tag] @staticmethod - def parse_url(url, verify_host=True): + def parse_url( + url: str, + verify_host: bool = True, + ) -> dict[str, object] | None: """Parses the URL and returns it broken apart into a dictionary. This is very specific and customized for Apprise. @@ -456,38 +472,40 @@ def parse_url(url, verify_host=True): return results # Allow overriding the default config format - if 'format' in results['qsd']: - results['format'] = results['qsd'].get('format') - if results['format'] not in common.CONFIG_FORMATS: + if "format" in results["qsd"]: + results["format"] = results["qsd"].get("format") + if results["format"] not in common.CONFIG_FORMATS: URLBase.logger.warning( - 'Unsupported format specified {}'.format( - results['format'])) - del results['format'] + "Unsupported format specified {}".format(results["format"]) + ) + del results["format"] # Defines the encoding of the payload - if 'encoding' in results['qsd']: - results['encoding'] = results['qsd'].get('encoding') + if "encoding" in results["qsd"]: + results["encoding"] = results["qsd"].get("encoding") # Our cache value - if 'cache' in results['qsd']: + if "cache" in results["qsd"]: # First try to get it's integer value try: - results['cache'] = int(results['qsd']['cache']) + results["cache"] = int(results["qsd"]["cache"]) except (ValueError, TypeError): # No problem, it just isn't an integer; now treat it as a bool # instead: - results['cache'] = parse_bool(results['qsd']['cache']) + results["cache"] = parse_bool(results["qsd"]["cache"]) return results @staticmethod - def detect_config_format(content, **kwargs): - """ - Takes the specified content and attempts to detect the format type - - The function returns the actual format type if detected, otherwise - it returns None + def detect_config_format( + content: str, + **kwargs: object, + ) -> common.ConfigFormat | None: + """Takes the specified content and attempts to detect the format type. + + The function returns the actual format type if detected, otherwise it + returns None """ # Detect Format Logic: @@ -503,18 +521,19 @@ def detect_config_format(content, **kwargs): # Define what a valid line should look like valid_line_re = re.compile( - r'^\s*(?P([;#]+(?P.*))|' - r'(?P((?P[ \t,a-z0-9_-]+)=)?[a-z0-9]+://.*)|' - r'((?P[a-z0-9]+):.*))?$', re.I) + r"^\s*(?P([;#]+(?P.*))|" + r"(?P((?P[ \t,a-z0-9_-]+)=)?[a-z0-9]+://.*)|" + r"((?P[a-z0-9]+):.*))?$", + re.I, + ) try: # split our content up to read line by line - content = re.split(r'\r*\n', content) + content = re.split(r"\r*\n", content) except TypeError: # content was not expected string type - ConfigBase.logger.error( - 'Invalid Apprise configuration specified.') + ConfigBase.logger.error("Invalid Apprise configuration specified.") return None # By default set our return value to None since we don't know @@ -529,24 +548,25 @@ def detect_config_format(content, **kwargs): if not result: # Invalid syntax ConfigBase.logger.error( - 'Undetectable Apprise configuration found ' - 'based on line {}.'.format(line)) + "Undetectable Apprise configuration found " + f"based on line {line}." + ) # Take an early exit return None # Attempt to detect configuration - if result.group('yaml'): + if result.group("yaml"): config_format = common.ConfigFormat.YAML ConfigBase.logger.debug( - 'Detected YAML configuration ' - 'based on line {}.'.format(line)) + f"Detected YAML configuration based on line {line}." + ) break - elif result.group('text'): + elif result.group("text"): config_format = common.ConfigFormat.TEXT ConfigBase.logger.debug( - 'Detected TEXT configuration ' - 'based on line {}.'.format(line)) + f"Detected TEXT configuration based on line {line}." + ) break # If we reach here, we have a comment entry @@ -556,11 +576,16 @@ def detect_config_format(content, **kwargs): return config_format @staticmethod - def config_parse(content, asset=None, config_format=None, **kwargs): - """ - Takes the specified config content and loads it based on the specified - config_format. If a format isn't specified, then it is auto detected. - + def config_parse( + content: str, + asset: AppriseAsset | None = None, + config_format: str | common.ConfigFormat | None = None, + **kwargs: object, + ) -> tuple[list[object], list[str]]: + """Takes the specified config content and loads it based on the + specified config_format. + + If a format isn't specified, then it is auto detected. """ if config_format is None: @@ -569,27 +594,35 @@ def config_parse(content, asset=None, config_format=None, **kwargs): if not config_format: # We couldn't detect configuration - ConfigBase.logger.error('Could not detect configuration') - return (list(), list()) + ConfigBase.logger.error("Could not detect configuration") + return ([], []) + + try: + config_format = ( + config_format if isinstance(config_format, common.ConfigFormat) + else common.ConfigFormat(config_format.lower()) + ) - if config_format not in common.CONFIG_FORMATS: - # Invalid configuration type specified + except (AttributeError, ValueError): ConfigBase.logger.error( - 'An invalid configuration format ({}) was specified'.format( - config_format)) - return (list(), list()) + f"An invalid configuration format ({config_format}) was" + " specified" + ) + return ([], []) # Dynamically load our parse_ function based on our config format - fn = getattr(ConfigBase, 'config_parse_{}'.format(config_format)) + fn = getattr(ConfigBase, f"config_parse_{config_format.value}") # Execute our config parse function which always returns a list return fn(content=content, asset=asset) @staticmethod - def config_parse_text(content, asset=None): - """ - Parse the specified content as though it were a simple text file only - containing a list of URLs. + def config_parse_text( + content: str, + asset: AppriseAsset | None = None, + ) -> tuple[list[object], list[str]]: + """Parse the specified content as though it were a simple text file + only containing a list of URLs. Return a tuple that looks like (servers, configs) where: - servers contains a list of loaded notification plugins @@ -617,14 +650,13 @@ def config_parse_text(content, asset=None): # Assign tag contents to a group identifier = - """ # A list of loaded Notification Services - servers = list() + servers = [] # A list of additional configuration files referenced using # the include keyword - configs = list() + configs = [] # Track all of the tags we want to assign later on group_tags = {} @@ -637,39 +669,44 @@ def config_parse_text(content, asset=None): # Define what a valid line should look like valid_line_re = re.compile( - r'^\s*(?P([;#]+(?P.*))|' - r'(\s*(?P[a-z0-9, \t_-]+)\s*=|=)?\s*' - r'((?P[a-z0-9]{1,12}://.*)|(?P[a-z0-9, \t_-]+))|' - r'include\s+(?P.+))?\s*$', re.I) + r"^\s*(?P([;#]+(?P.*))|" + r"(\s*(?P[a-z0-9, \t_-]+)\s*=|=)?\s*" + r"((?P[a-z0-9]{1,32}://.*)|(?P[a-z0-9, \t_-]+))|" + r"include\s+(?P.+))?\s*$", + re.I, + ) try: # split our content up to read line by line - content = re.split(r'\r*\n', content) + content = re.split(r"\r*\n", content) except TypeError: # content was not expected string type ConfigBase.logger.error( - 'Invalid Apprise TEXT based configuration specified.') - return (list(), list()) + "Invalid Apprise TEXT based configuration specified." + ) + return ([], []) for line, entry in enumerate(content, start=1): result = valid_line_re.match(entry) if not result: # Invalid syntax ConfigBase.logger.error( - 'Invalid Apprise TEXT configuration format found ' - '{} on line {}.'.format(entry, line)) + "Invalid Apprise TEXT configuration format found " + f"{entry} on line {line}." + ) # Assume this is a file we shouldn't be parsing. It's owner # can read the error printed to screen and take action # otherwise. - return (list(), list()) + return ([], []) # Retrieve our line - url, assign, config = \ - result.group('url'), \ - result.group('assign'), \ - result.group('config') + url, assign, config = ( + result.group("url"), + result.group("assign"), + result.group("config"), + ) if not (url or config or assign): # Comment/empty line; do nothing @@ -677,27 +714,27 @@ def config_parse_text(content, asset=None): if config: # CWE-312 (Secure Logging) Handling - loggable_url = config if not asset.secure_logging \ - else cwe312_url(config) + loggable_url = ( + config if not asset.secure_logging else cwe312_url(config) + ) - ConfigBase.logger.debug( - 'Include URL: {}'.format(loggable_url)) + ConfigBase.logger.debug(f"Include URL: {loggable_url}") # Store our include line configs.append(config.strip()) continue # CWE-312 (Secure Logging) Handling - loggable_url = url if not asset.secure_logging \ - else cwe312_url(url) + loggable_url = url if not asset.secure_logging else cwe312_url(url) if assign: - groups = set(parse_list(result.group('tags'), cast=str)) + groups = set(parse_list(result.group("tags"), cast=str)) if not groups: # no tags were assigned ConfigBase.logger.warning( - 'Unparseable tag assignment - no group(s) ' - 'on line {}'.format(line)) + "Unparseable tag assignment - no group(s) " + f"on line {line}" + ) continue # Get our tags @@ -705,8 +742,9 @@ def config_parse_text(content, asset=None): if not tags: # no tags were assigned ConfigBase.logger.warning( - 'Unparseable tag assignment - no tag(s) to assign ' - 'on line {}'.format(line)) + "Unparseable tag assignment - no tag(s) to assign " + f"on line {line}" + ) continue # Update our tag group map @@ -715,31 +753,32 @@ def config_parse_text(content, asset=None): group_tags[tag_group] = set() # ensure our tag group is never included in the assignment - group_tags[tag_group] |= tags - set([tag_group]) + group_tags[tag_group] |= tags - {tag_group} continue # Acquire our url tokens results = plugins.url_to_dict( - url, secure_logging=asset.secure_logging) + url, secure_logging=asset.secure_logging + ) if results is None: # Failed to parse the server URL ConfigBase.logger.warning( - 'Unparseable URL {} on line {}.'.format( - loggable_url, line)) + f"Unparseable URL {loggable_url} on line {line}." + ) continue # Build a list of tags to associate with the newly added # notifications if any were set - results['tag'] = set(parse_list(result.group('tags'), cast=str)) + results["tag"] = set(parse_list(result.group("tags"), cast=str)) # Set our Asset Object - results['asset'] = asset + results["asset"] = asset # Store our preloaded entries preloaded.append({ - 'results': results, - 'line': line, - 'loggable_url': loggable_url, + "results": results, + "line": line, + "loggable_url": loggable_url, }) # @@ -753,7 +792,7 @@ def config_parse_text(content, asset=None): # for entry in preloaded: # Point to our results entry for easier reference below - results = entry['results'] + results = entry["results"] # # Apply our tag groups if they're defined @@ -761,26 +800,30 @@ def config_parse_text(content, asset=None): for group, tags in group_tags.items(): # Detect if anything assigned to this tag also maps back to a # group. If so we want to add the group to our list - if next((True for tag in results['tag'] - if tag in tags), False): - results['tag'].add(group) + if next( + (True for tag in results["tag"] if tag in tags), False + ): + results["tag"].add(group) try: # Attempt to create an instance of our plugin using the # parsed URL information - plugin = N_MGR[results['schema']](**results) + plugin = N_MGR[results["schema"]](**results) # Create log entry of loaded URL ConfigBase.logger.debug( - 'Loaded URL: %s', plugin.url( - privacy=results['asset'].secure_logging)) + "Loaded URL: %s", + plugin.url(privacy=results["asset"].secure_logging), + ) except Exception as e: # the arguments are invalid or can not be used. ConfigBase.logger.warning( - 'Could not load URL {} on line {}.'.format( - entry['loggable_url'], entry['line'])) - ConfigBase.logger.debug('Loading Exception: %s' % str(e)) + "Could not load URL {} on line {}.".format( + entry["loggable_url"], entry["line"] + ) + ) + ConfigBase.logger.debug(f"Loading Exception: {e!s}") continue # if we reach here, we successfully loaded our data @@ -790,9 +833,11 @@ def config_parse_text(content, asset=None): return (servers, configs) @staticmethod - def config_parse_yaml(content, asset=None): - """ - Parse the specified content as though it were a yaml file + def config_parse_yaml( + content: str, + asset: AppriseAsset | None = None, + ) -> tuple[list[object], list[str]]: + """Parse the specified content as though it were a yaml file specifically formatted for Apprise. Return a tuple that looks like (servers, configs) where: @@ -801,15 +846,14 @@ def config_parse_yaml(content, asset=None): referenced. You may optionally associate an asset with the notification. - """ # A list of loaded Notification Services - servers = list() + servers = [] # A list of additional configuration files referenced using # the include keyword - configs = list() + configs = [] # Group Assignments group_tags = {} @@ -821,60 +865,81 @@ def config_parse_yaml(content, asset=None): # Load our data (safely) result = yaml.load(content, Loader=yaml.SafeLoader) - except (AttributeError, - yaml.parser.ParserError, - yaml.error.MarkedYAMLError) as e: + except ( + AttributeError, + yaml.parser.ParserError, + yaml.error.MarkedYAMLError, + ) as e: # Invalid content - ConfigBase.logger.error( - 'Invalid Apprise YAML data specified.') - ConfigBase.logger.debug( - 'YAML Exception:{}{}'.format(os.linesep, e)) - return (list(), list()) + ConfigBase.logger.error("Invalid Apprise YAML data specified.") + ConfigBase.logger.debug(f"YAML Exception:{os.linesep}{e}") + return ([], []) if not isinstance(result, dict): # Invalid content ConfigBase.logger.error( - 'Invalid Apprise YAML based configuration specified.') - return (list(), list()) + "Invalid Apprise YAML based configuration specified." + ) + return ([], []) # YAML Version - version = result.get('version', 1) + version = result.get("version", 1) if version != 1: # Invalid syntax ConfigBase.logger.error( - 'Invalid Apprise YAML version specified {}.'.format(version)) - return (list(), list()) + f"Invalid Apprise YAML version specified {version}." + ) + return ([], []) # # global asset object # asset = asset if isinstance(asset, AppriseAsset) else AppriseAsset() - tokens = result.get('asset', None) + + # Prepare our default timezone + default_timezone = asset.tzinfo + + # Acquire our asset tokens + tokens = result.get("asset", None) if tokens and isinstance(tokens, dict): - for k, v in tokens.items(): + raw_tz = tokens.get("timezone", tokens.get("tz")) + if isinstance(raw_tz, str): + default_timezone = zoneinfo(re.sub(r"[^\w/-]+", "", raw_tz)) + if not default_timezone: + ConfigBase.logger.warning( + 'Ignored invalid timezone "%s"', raw_tz) + default_timezone = asset.tzinfo + else: + asset._tzinfo = default_timezone + + elif raw_tz is not None: + ConfigBase.logger.warning( + 'Ignored invalid timezone "%r"', raw_tz) - if k.startswith('_') or k.endswith('_'): + # Iterate over remaining tokens + for k, v in tokens.items(): + if k.startswith("_") or k.endswith("_"): # Entries are considered reserved if they start or end # with an underscore - ConfigBase.logger.warning( - 'Ignored asset key "{}".'.format(k)) + ConfigBase.logger.warning(f'Ignored asset key "{k}".') continue - if not (hasattr(asset, k) and - isinstance(getattr(asset, k), - (bool, str))): + if not ( + hasattr(asset, k) + and isinstance(getattr(asset, k), (bool, str)) + ): # We can't set a function or non-string set value - ConfigBase.logger.warning( - 'Invalid asset key "{}".'.format(k)) + ConfigBase.logger.warning(f'Invalid asset key "{k}".') continue if v is None: # Convert to an empty string - v = '' + v = "" - if (isinstance(v, (bool, str)) - and isinstance(getattr(asset, k), bool)): + if isinstance(v, (bool, str)) and isinstance( + getattr(asset, k), bool + ): # If the object in the Asset is a boolean, then # we want to convert the specified string to @@ -887,15 +952,14 @@ def config_parse_yaml(content, asset=None): else: # we must set strings with a string - ConfigBase.logger.warning( - 'Invalid asset value to "{}".'.format(k)) + ConfigBase.logger.warning(f'Invalid asset value to "{k}".') continue # # global tag root directive # global_tags = set() - tags = result.get('tag', None) + tags = result.get("tag", result.get("tags", None)) if tags and isinstance(tags, (list, tuple, str)): # Store any preset tags global_tags = set(parse_list(tags, cast=str)) @@ -903,23 +967,23 @@ def config_parse_yaml(content, asset=None): # # groups root directive # - groups = result.get('groups', None) + groups = result.get("groups", None) if isinstance(groups, dict): # # Dictionary # - for _groups, tags in groups.items(): - for group in parse_list(_groups, cast=str): + for groups_, tags in groups.items(): + for group in parse_list(groups_, cast=str): if isinstance(tags, (list, tuple)): - _tags = set() + tags_ = set() for e in tags: if isinstance(e, dict): - _tags |= set(e.keys()) + tags_ |= set(e.keys()) else: - _tags |= set(parse_list(e, cast=str)) + tags_ |= set(parse_list(e, cast=str)) # Final assignment - tags = _tags + tags = tags_ else: tags = set(parse_list(tags, cast=str)) @@ -939,22 +1003,22 @@ def config_parse_yaml(content, asset=None): for no, entry in enumerate(groups): if not isinstance(entry, dict): ConfigBase.logger.warning( - 'No assignment for group {}, entry #{}'.format( - entry, no + 1)) + f"No assignment for group {entry}, entry #{no + 1}" + ) continue - for _groups, tags in entry.items(): - for group in parse_list(_groups, cast=str): + for groups_, tags in entry.items(): + for group in parse_list(groups_, cast=str): if isinstance(tags, (list, tuple)): - _tags = set() + tags_ = set() for e in tags: if isinstance(e, dict): - _tags |= set(e.keys()) + tags_ |= set(e.keys()) else: - _tags |= set(parse_list(e, cast=str)) + tags_ |= set(parse_list(e, cast=str)) # Final assignment - tags = _tags + tags = tags_ else: tags = set(parse_list(tags, cast=str)) @@ -967,7 +1031,7 @@ def config_parse_yaml(content, asset=None): # include root directive # - includes = result.get('include', None) + includes = result.get("include", None) if isinstance(includes, str): # Support a single inline string or multiple ones separated by a # comma and/or space @@ -975,10 +1039,10 @@ def config_parse_yaml(content, asset=None): elif not isinstance(includes, (list, tuple)): # Not a problem; we simply have no includes - includes = list() + includes = [] # Iterate over each config URL - for no, url in enumerate(includes): + for _no, url in enumerate(includes): if isinstance(url, str): # Support a single inline string or multiple ones separated by @@ -987,26 +1051,25 @@ def config_parse_yaml(content, asset=None): elif isinstance(url, dict): # Store the url and ignore arguments associated - configs.extend(u for u in url.keys()) + configs.extend(u for u in url) # # urls root directive # - urls = result.get('urls', None) + urls = result.get("urls", None) if not isinstance(urls, (list, tuple)): # Not a problem; we simply have no urls - urls = list() + urls = [] # Iterate over each URL for no, url in enumerate(urls): # Our results object is what we use to instantiate our object if # we can. Reset it to None on each iteration - results = list() + results = [] # CWE-312 (Secure Logging) Handling - loggable_url = url if not asset.secure_logging \ - else cwe312_url(url) + loggable_url = url if not asset.secure_logging else cwe312_url(url) if isinstance(url, str): # We're just a simple URL string... @@ -1016,22 +1079,23 @@ def config_parse_yaml(content, asset=None): # config file at least has something to take action # with. ConfigBase.logger.warning( - 'Invalid URL {}, entry #{}'.format( - loggable_url, no + 1)) + f"Invalid URL {loggable_url}, entry #{no + 1}" + ) continue # We found a valid schema worthy of tracking; store it's # details: - _results = plugins.url_to_dict( - url, secure_logging=asset.secure_logging) - if _results is None: + results_ = plugins.url_to_dict( + url, secure_logging=asset.secure_logging + ) + if results_ is None: ConfigBase.logger.warning( - 'Unparseable URL {}, entry #{}'.format( - loggable_url, no + 1)) + f"Unparseable URL {loggable_url}, entry #{no + 1}" + ) continue # add our results to our global set - results.append(_results) + results.append(results_) elif isinstance(url, dict): # We are a url string with additional unescaped options. In @@ -1042,41 +1106,51 @@ def config_parse_yaml(content, asset=None): it = iter(url.items()) # Track the URL to-load - _url = None + url_ = None # Track last acquired schema schema = None - for key, tokens in it: + + for key, tokens_ in it: # Test our schema - _schema = GET_SCHEMA_RE.match(key) - if _schema is None: + schema_ = GET_SCHEMA_RE.match(key) + if schema_ is None: # Log invalid entries so that maintainer of config # config file at least has something to take action # with. ConfigBase.logger.warning( - 'Ignored entry {} found under urls, entry #{}' - .format(key, no + 1)) + f"Ignored entry {key} found under urls, entry" + f" #{no + 1}" + ) continue # Store our schema - schema = _schema.group('schema').lower() + schema = schema_.group("schema").lower() # Store our URL and Schema Regex - _url = key + url_ = key + + # Update our token assignment + tokens = tokens_ + + # We're done + break - if _url is None: + if url_ is None: # the loop above failed to match anything ConfigBase.logger.warning( - 'Unsupported URL, entry #{}'.format(no + 1)) + f"Unsupported URL, entry #{no + 1}" + ) continue - _results = plugins.url_to_dict( - _url, secure_logging=asset.secure_logging) - if _results is None: + results_ = plugins.url_to_dict( + url_, secure_logging=asset.secure_logging + ) + if results_ is None: # Setup dictionary - _results = { + results_ = { # Minimum requirements - 'schema': schema, + "schema": schema, } if isinstance(tokens, (list, tuple, set)): @@ -1085,20 +1159,21 @@ def config_parse_yaml(content, asset=None): for entries in tokens: # Copy ourselves a template of our parsed URL as a base # to work with - r = _results.copy() + r = results_.copy() # We are a url string with additional unescaped options if isinstance(entries, dict): - _url, tokens = next(iter(url.items())) + url_, tokens = next(iter(url.items())) # Tags you just can't over-ride - if 'schema' in entries: - del entries['schema'] + if "schema" in entries: + del entries["schema"] # support our special tokens (if they're present) if schema in N_MGR: entries = ConfigBase._special_token_handler( - schema, entries) + schema, entries + ) # Extend our dictionary with our new entries r.update(entries) @@ -1110,11 +1185,12 @@ def config_parse_yaml(content, asset=None): # support our special tokens (if they're present) if schema in N_MGR: tokens = ConfigBase._special_token_handler( - schema, tokens) + schema, tokens + ) # Copy ourselves a template of our parsed URL as a base to # work with - r = _results.copy() + r = results_.copy() # add our result set r.update(tokens) @@ -1124,71 +1200,97 @@ def config_parse_yaml(content, asset=None): else: # add our results to our global set - results.append(_results) + results.append(results_) else: # Unsupported ConfigBase.logger.warning( - 'Unsupported Apprise YAML entry #{}'.format(no + 1)) + f"Unsupported Apprise YAML entry #{no + 1}" + ) continue # Track our entries entry = 0 + # Prepare our results for post-processing + results = deque(results) + while len(results): # Increment our entry count entry += 1 # Grab our first item - _results = results.pop(0) + results_ = results.popleft() - if _results['schema'] not in N_MGR: + if results_["schema"] not in N_MGR: # the arguments are invalid or can not be used. ConfigBase.logger.warning( - 'An invalid Apprise schema ({}) in YAML configuration ' - 'entry #{}, item #{}' - .format(_results['schema'], no + 1, entry)) + "An invalid Apprise schema ({}) in YAML configuration " + "entry #{}, item #{}".format( + results_["schema"], no + 1, entry + ) + ) continue # tag is a special keyword that is managed by Apprise object. # The below ensures our tags are set correctly - if 'tag' in _results: + if "tag" in results_: + # Tidy our list up + results_["tag"] = ( + set(parse_list(results_["tag"], cast=str)) + | global_tags + ) + if "tags" in results_: + ConfigBase.logger.warning(( + "URL #{}: {} contains both 'tag' and 'tags' " + "keyword").format(no + 1, url)) + del results_["tags"] + + elif "tags" in results_: # Tidy our list up - _results['tag'] = set( - parse_list(_results['tag'], cast=str)) | global_tags + results_["tag"] = ( + set(parse_list(results_["tags"], cast=str)) + | global_tags + ) + # Should not carry forward + del results_["tags"] else: # Just use the global settings - _results['tag'] = global_tags + results_["tag"] = global_tags - for key in list(_results.keys()): + for key in list(results_.keys()): # Strip out any tokens we know that we can't accept and # warn the user match = VALID_TOKEN.match(key) if not match: ConfigBase.logger.warning( - 'Ignoring invalid token ({}) found in YAML ' - 'configuration entry #{}, item #{}' - .format(key, no + 1, entry)) - del _results[key] - - ConfigBase.logger.trace( - 'URL #{}: {} unpacked as:{}{}' - .format(no + 1, url, os.linesep, os.linesep.join( - ['{}="{}"'.format(k, a) - for k, a in _results.items()]))) + f"Ignoring invalid token ({key}) found in YAML " + f"configuration entry #{no + 1}, item #{entry}" + ) + del results_[key] + + if ConfigBase.logger.isEnabledFor(logging.TRACE): + ConfigBase.logger.trace( + "URL #%d: %s unpacked as:%s%s", + no + 1, + url, + os.linesep, + os.linesep.join( + [f'{k}="{a}"' for k, a in results_.items()]), + ) # Prepare our Asset Object - _results['asset'] = asset + results_["asset"] = asset # Handle post processing of result set - _results = URLBase.post_process_parse_url_results(_results) + results_ = URLBase.post_process_parse_url_results(results_) # Store our preloaded entries preloaded.append({ - 'results': _results, - 'entry': no + 1, - 'item': entry, + "results": results_, + "entry": no + 1, + "item": entry, }) # @@ -1202,7 +1304,7 @@ def config_parse_yaml(content, asset=None): # for entry in preloaded: # Point to our results entry for easier reference below - results = entry['results'] + results = entry["results"] # # Apply our tag groups if they're defined @@ -1210,38 +1312,41 @@ def config_parse_yaml(content, asset=None): for group, tags in group_tags.items(): # Detect if anything assigned to this tag also maps back to a # group. If so we want to add the group to our list - if next((True for tag in results['tag'] - if tag in tags), False): - results['tag'].add(group) + if next( + (True for tag in results["tag"] if tag in tags), False + ): + results["tag"].add(group) # Now we generate our plugin try: # Attempt to create an instance of our plugin using the # parsed URL information - plugin = N_MGR[results['schema']](**results) + plugin = N_MGR[results["schema"]](**results) # Create log entry of loaded URL ConfigBase.logger.debug( - 'Loaded URL: %s', plugin.url( - privacy=results['asset'].secure_logging)) + "Loaded URL: %s", + plugin.url(privacy=results["asset"].secure_logging), + ) except Exception as e: # the arguments are invalid or can not be used. ConfigBase.logger.warning( - 'Could not load Apprise YAML configuration ' - 'entry #{}, item #{}' - .format(entry['entry'], entry['item'])) - ConfigBase.logger.debug('Loading Exception: %s' % str(e)) + "Could not load Apprise YAML configuration " + "entry #{}, item #{}".format(entry["entry"], entry["item"]) + ) + ConfigBase.logger.debug(f"Loading Exception: {e!s}") continue # if we reach here, we successfully loaded our data servers.append(plugin) + preloaded.clear() return (servers, configs) - def pop(self, index=-1): - """ - Removes an indexed Notification Service from the stack and returns it. + def pop(self, index: int = -1) -> object: + """Removes an indexed Notification Service from the stack and returns + it. By default, the last element of the list is removed. """ @@ -1253,10 +1358,17 @@ def pop(self, index=-1): # Pop the element off of the stack return self._cached_servers.pop(index) + def clear_cache(self) -> None: + """Cleans cache""" + self._cached_servers = None + self._cached_time = None + @staticmethod - def _special_token_handler(schema, tokens): - """ - This function takes a list of tokens and updates them to no longer + def _special_token_handler( + schema: str, + tokens: dict[str, object], + ) -> dict[str, object]: + """This function takes a list of tokens and updates them to no longer include any special tokens such as +,-, and : - schema must be a valid schema of a supported plugin type @@ -1274,12 +1386,14 @@ def _special_token_handler(schema, tokens): for kw, meta in N_MGR[schema].template_kwargs.items(): # Determine our prefix: - prefix = meta.get('prefix', '+') + prefix = meta.get("prefix", "+") # Detect any matches - matches = \ - {k[1:]: str(v) for k, v in tokens.items() - if k.startswith(prefix)} + matches = { + k[1:]: str(v) + for k, v in tokens.items() + if k.startswith(prefix) + } if not matches: # we're done with this entry @@ -1287,11 +1401,12 @@ def _special_token_handler(schema, tokens): if not isinstance(tokens.get(kw), dict): # Invalid; correct it - tokens[kw] = dict() + tokens[kw] = {} # strip out processed tokens - tokens = {k: v for k, v in tokens.items() - if not k.startswith(prefix)} + tokens = { + k: v for k, v in tokens.items() if not k.startswith(prefix) + } # Update our entries tokens[kw].update(matches) @@ -1318,38 +1433,36 @@ def _special_token_handler(schema, tokens): for key in list(tokens.keys()): - if key not in class_templates['args']: + if key not in class_templates["args"]: # No need to handle non-arg entries continue # get our `map_to` and/or 'alias_of' value (if it exists) - map_to = class_templates['args'][key].get( - 'alias_of', class_templates['args'][key].get('map_to', '')) + map_to = class_templates["args"][key].get( + "alias_of", class_templates["args"][key].get("map_to", "") + ) if map_to == key: # We're already good as we are now continue - if map_to in class_templates['tokens']: - meta = class_templates['tokens'][map_to] + if map_to in class_templates["tokens"]: + meta = class_templates["tokens"][map_to] else: - meta = class_templates['args'].get( - map_to, class_templates['args'][key]) + meta = class_templates["args"].get( + map_to, class_templates["args"][key] + ) # Perform a translation/mapping if our code reaches here value = tokens[key] del tokens[key] # Detect if we're dealign with a list or not - is_list = re.search( - r'^list:.*', - meta.get('type'), - re.IGNORECASE) + is_list = re.search(r"^list:.*", meta.get("type"), re.IGNORECASE) if map_to not in tokens: - tokens[map_to] = [] if is_list \ - else meta.get('default') + tokens[map_to] = [] if is_list else meta.get("default") elif is_list and not isinstance(tokens.get(map_to), list): # Convert ourselves to a list if we aren't already @@ -1357,10 +1470,8 @@ def _special_token_handler(schema, tokens): # Type Conversion if re.search( - r'^(choice:)?string', - meta.get('type'), - re.IGNORECASE) \ - and not isinstance(value, str): + r"^(choice:)?string", meta.get("type"), re.IGNORECASE + ) and not isinstance(value, str): # Ensure our format is as expected value = str(value) @@ -1368,7 +1479,7 @@ def _special_token_handler(schema, tokens): # Apply any further translations if required (absolute map) # This is the case when an arg maps to a token which further # maps to a different function arg on the class constructor - abs_map = meta.get('map_to', map_to) + abs_map = meta.get("map_to", map_to) # Set our token as how it was provided by the configuration if isinstance(tokens.get(map_to), list): @@ -1380,44 +1491,38 @@ def _special_token_handler(schema, tokens): # Return our tokens return tokens - def __getitem__(self, index): - """ - Returns the indexed server entry associated with the loaded - notification servers - """ + def __getitem__(self, index: int) -> object: + """Returns the indexed server entry associated with the loaded + notification servers.""" if not isinstance(self._cached_servers, list): # Generate ourselves a list of content we can pull from self.servers() return self._cached_servers[index] - def __iter__(self): - """ - Returns an iterator to our server list - """ + def __iter__(self) -> object: + """Returns an iterator to our server list.""" if not isinstance(self._cached_servers, list): # Generate ourselves a list of content we can pull from self.servers() return iter(self._cached_servers) - def __len__(self): - """ - Returns the total number of servers loaded - """ + def __len__(self) -> int: + """Returns the total number of servers loaded.""" if not isinstance(self._cached_servers, list): # Generate ourselves a list of content we can pull from self.servers() return len(self._cached_servers) - def __bool__(self): - """ - Allows the Apprise object to be wrapped in an 'if statement'. + def __bool__(self) -> bool: + """Allows the Apprise object to be wrapped in an 'if statement'. + True is returned if our content was downloaded correctly. """ if not isinstance(self._cached_servers, list): # Generate ourselves a list of content we can pull from self.servers() - return True if self._cached_servers else False + return bool(self._cached_servers) diff --git a/libs/apprise/config/base.pyi b/libs/apprise/config/base.pyi deleted file mode 100644 index abff1204d8..0000000000 --- a/libs/apprise/config/base.pyi +++ /dev/null @@ -1,3 +0,0 @@ -from .. import URLBase - -class ConfigBase(URLBase): ... \ No newline at end of file diff --git a/libs/apprise/config/file.py b/libs/apprise/config/file.py index 3edd6d9a9c..a689466698 100644 --- a/libs/apprise/config/file.py +++ b/libs/apprise/config/file.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,36 +25,32 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -import re import os -from .base import ConfigBase -from ..utils.disk import path_decode -from ..common import ConfigFormat -from ..common import ContentIncludeMode +import re + +from ..common import ConfigFormat, ContentIncludeMode from ..locale import gettext_lazy as _ +from ..utils.disk import path_decode +from .base import ConfigBase class ConfigFile(ConfigBase): - """ - A wrapper for File based configuration sources - """ + """A wrapper for File based configuration sources.""" # The default descriptive name associated with the service - service_name = _('Local File') + service_name = _("Local File") # The default protocol - protocol = 'file' + protocol = "file" # Configuration file inclusion can only be of the same type allow_cross_includes = ContentIncludeMode.STRICT def __init__(self, path, **kwargs): - """ - Initialize File Object + """Initialize File Object. headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with - """ super().__init__(**kwargs) @@ -71,61 +66,60 @@ def __init__(self, path, **kwargs): return def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Prepare our cache value if isinstance(self.cache, bool) or not self.cache: - cache = 'yes' if self.cache else 'no' + cache = "yes" if self.cache else "no" else: cache = int(self.cache) # Define any URL parameters params = { - 'encoding': self.encoding, - 'cache': cache, + "encoding": self.encoding, + "cache": cache, } if self.config_format: # A format was enforced; make sure it's passed back with the url - params['format'] = self.config_format + params["format"] = self.config_format - return 'file://{path}{params}'.format( + return "file://{path}{params}".format( path=self.quote(self.__original_path), - params='?{}'.format(self.urlencode(params)) if params else '', + params=f"?{self.urlencode(params)}" if params else "", ) def read(self, **kwargs): - """ - Perform retrieval of the configuration based on the specified request - """ + """Perform retrieval of the configuration based on the specified + request.""" response = None try: - if self.max_buffer_size > 0 and \ - os.path.getsize(self.path) > self.max_buffer_size: + if ( + self.max_buffer_size > 0 + and os.path.getsize(self.path) > self.max_buffer_size + ): # Content exceeds maximum buffer size self.logger.error( - 'File size exceeds maximum allowable buffer length' - ' ({}KB).'.format(int(self.max_buffer_size / 1024))) + "File size exceeds maximum allowable buffer length" + f" ({int(self.max_buffer_size / 1024)}KB)." + ) return None except OSError: # getsize() can throw this acception if the file is missing # and or simply isn't accessible - self.logger.error( - 'File is not accessible: {}'.format(self.path)) + self.logger.error(f"File is not accessible: {self.path}") return None # Always call throttle before any server i/o is made self.throttle() try: - with open(self.path, "rt", encoding=self.encoding) as f: + with open(self.path, encoding=self.encoding) as f: # Store our content for parsing response = f.read() @@ -135,24 +129,26 @@ def read(self, **kwargs): # understand the encoding of.. self.logger.error( - 'File not using expected encoding ({}) : {}'.format( - self.encoding, self.path)) + f"File not using expected encoding ({self.encoding}) :" + f" {self.path}" + ) return None - except (IOError, OSError): + except OSError: # IOError is present for backwards compatibility with Python # versions older then 3.3. >= 3.3 throw OSError now. # Could not open and/or read the file; this is not a problem since # we scan a lot of default paths. - self.logger.error( - 'File can not be opened for read: {}'.format(self.path)) + self.logger.error(f"File can not be opened for read: {self.path}") return None # Detect config format based on file extension if it isn't already # enforced - if self.config_format is None and \ - re.match(r'^.*\.ya?ml\s*$', self.path, re.I) is not None: + if ( + self.config_format is None + and re.match(r"^.*\.ya?ml\s*$", self.path, re.I) is not None + ): # YAML Filename Detected self.default_config_format = ConfigFormat.YAML @@ -162,20 +158,17 @@ def read(self, **kwargs): @staticmethod def parse_url(url): - """ - Parses the URL so that we can handle all different file paths - and return it as our path object - - """ + """Parses the URL so that we can handle all different file paths and + return it as our path object.""" results = ConfigBase.parse_url(url, verify_host=False) if not results: # We're done early; it's not a good URL return results - match = re.match(r'[a-z0-9]+://(?P[^?]+)(\?.*)?', url, re.I) + match = re.match(r"[a-z0-9]+://(?P[^?]+)(\?.*)?", url, re.I) if not match: return None - results['path'] = ConfigFile.unquote(match.group('path')) + results["path"] = ConfigFile.unquote(match.group("path")) return results diff --git a/libs/apprise/config/http.py b/libs/apprise/config/http.py index b9c83510aa..a062136ff2 100644 --- a/libs/apprise/config/http.py +++ b/libs/apprise/config/http.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -27,39 +26,38 @@ # POSSIBILITY OF SUCH DAMAGE. import re + import requests -from .base import ConfigBase -from ..common import ConfigFormat -from ..common import ContentIncludeMode -from ..url import PrivacyMode + +from ..common import ConfigFormat, ContentIncludeMode from ..locale import gettext_lazy as _ +from ..url import PrivacyMode +from .base import ConfigBase # Support YAML formats # text/yaml # text/x-yaml # application/yaml # application/x-yaml -MIME_IS_YAML = re.compile('(text|application)/(x-)?yaml', re.I) +MIME_IS_YAML = re.compile(r"(text|application)/(x-)?yaml", re.I) # Support TEXT formats # text/plain # text/html -MIME_IS_TEXT = re.compile('text/(plain|html)', re.I) +MIME_IS_TEXT = re.compile(r"text/(plain|html)", re.I) class ConfigHTTP(ConfigBase): - """ - A wrapper for HTTP based configuration sources - """ + """A wrapper for HTTP based configuration sources.""" # The default descriptive name associated with the service - service_name = _('Web Based') + service_name = _("Web Based") # The default protocol - protocol = 'http' + protocol = "http" # The default secure protocol - secure_protocol = 'https' + secure_protocol = "https" # If an HTTP error occurs, define the number of characters you still want # to read back. This is useful for debugging purposes, but nothing else. @@ -71,20 +69,18 @@ class ConfigHTTP(ConfigBase): allow_cross_includes = ContentIncludeMode.ALWAYS def __init__(self, headers=None, **kwargs): - """ - Initialize HTTP Object + """Initialize HTTP Object. headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with - """ super().__init__(**kwargs) - self.schema = 'https' if self.secure else 'http' + self.schema = "https" if self.secure else "http" - self.fullpath = kwargs.get('fullpath') + self.fullpath = kwargs.get("fullpath") if not isinstance(self.fullpath, str): - self.fullpath = '/' + self.fullpath = "/" self.headers = {} if headers: @@ -94,21 +90,19 @@ def __init__(self, headers=None, **kwargs): return def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Prepare our cache value if isinstance(self.cache, bool) or not self.cache: - cache = 'yes' if self.cache else 'no' + cache = "yes" if self.cache else "no" else: cache = int(self.cache) # Define any arguments set params = { - 'encoding': self.encoding, - 'cache': cache, + "encoding": self.encoding, + "cache": cache, } # Extend our parameters @@ -116,44 +110,46 @@ def url(self, privacy=False, *args, **kwargs): if self.config_format: # A format was enforced; make sure it's passed back with the url - params['format'] = self.config_format + params["format"] = self.config_format # Append our headers into our args - params.update({'+{}'.format(k): v for k, v in self.headers.items()}) + params.update({f"+{k}": v for k, v in self.headers.items()}) # Determine Authentication - auth = '' + auth = "" if self.user and self.password: - auth = '{user}:{password}@'.format( - user=self.quote(self.user, safe=''), + auth = "{user}:{password}@".format( + user=self.quote(self.user, safe=""), password=self.pprint( - self.password, privacy, mode=PrivacyMode.Secret, safe=''), + self.password, privacy, mode=PrivacyMode.Secret, safe="" + ), ) elif self.user: - auth = '{user}@'.format( - user=self.quote(self.user, safe=''), + auth = "{user}@".format( + user=self.quote(self.user, safe=""), ) default_port = 443 if self.secure else 80 - - return '{schema}://{auth}{hostname}{port}{fullpath}/?{params}'.format( + return "{schema}://{auth}{hostname}{port}{fullpath}/?{params}".format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, - hostname=self.quote(self.host, safe=''), - port='' if self.port is None or self.port == default_port - else ':{}'.format(self.port), - fullpath=self.quote(self.fullpath, safe='/'), + hostname=self.quote(self.host, safe=""), + port=( + "" + if self.port is None or self.port == default_port + else f":{self.port}" + ), + fullpath=self.quote(self.fullpath, safe="/"), params=self.urlencode(params), ) def read(self, **kwargs): - """ - Perform retrieval of the configuration based on the specified request - """ + """Perform retrieval of the configuration based on the specified + request.""" # prepare XML Object headers = { - 'User-Agent': self.app_id, + "User-Agent": self.app_id, } # Apply any/all header over-rides defined @@ -163,15 +159,15 @@ def read(self, **kwargs): if self.user: auth = (self.user, self.password) - url = '%s://%s' % (self.schema, self.host) + url = f"{self.schema}://{self.host}" if isinstance(self.port, int): - url += ':%d' % self.port + url += f":{self.port}" url += self.fullpath - self.logger.debug('HTTP POST URL: %s (cert_verify=%r)' % ( - url, self.verify_certificate, - )) + self.logger.debug( + f"HTTP POST URL: {url} (cert_verify={self.verify_certificate!r})" + ) # Prepare our response object response = None @@ -185,44 +181,49 @@ def read(self, **kwargs): try: # Make our request with requests.post( - url, - headers=headers, - auth=auth, - verify=self.verify_certificate, - timeout=self.request_timeout, - stream=True) as r: + url, + headers=headers, + auth=auth, + verify=self.verify_certificate, + timeout=self.request_timeout, + stream=True, + ) as r: # Handle Errors r.raise_for_status() # Get our file-size (if known) try: - file_size = int(r.headers.get('Content-Length', '0')) + file_size = int(r.headers.get("Content-Length", "0")) except (TypeError, ValueError): # Handle edge case where Content-Length is a bad value file_size = 0 # Store our response - if self.max_buffer_size > 0 \ - and file_size > self.max_buffer_size: + if ( + self.max_buffer_size > 0 + and file_size > self.max_buffer_size + ): # Provide warning of data truncation self.logger.error( - 'HTTP config response exceeds maximum buffer length ' - '({}KB);'.format(int(self.max_buffer_size / 1024))) + "HTTP config response exceeds maximum buffer length " + f"({int(self.max_buffer_size / 1024)}KB);" + ) # Return None - buffer execeeded return None # Store our result (but no more than our buffer length) - response = r.text[:self.max_buffer_size + 1] + response = r.text[: self.max_buffer_size + 1] # Verify that our content did not exceed the buffer size: if len(response) > self.max_buffer_size: # Provide warning of data truncation self.logger.error( - 'HTTP config response exceeds maximum buffer length ' - '({}KB);'.format(int(self.max_buffer_size / 1024))) + "HTTP config response exceeds maximum buffer length " + f"({int(self.max_buffer_size / 1024)}KB);" + ) # Return None - buffer execeeded return None @@ -230,7 +231,8 @@ def read(self, **kwargs): # Detect config format based on mime if the format isn't # already enforced content_type = r.headers.get( - 'Content-Type', 'application/octet-stream') + "Content-Type", "application/octet-stream" + ) if self.config_format is None and content_type: if MIME_IS_YAML.match(content_type) is not None: @@ -244,9 +246,10 @@ def read(self, **kwargs): except requests.RequestException as e: self.logger.error( - 'A Connection error occurred retrieving HTTP ' - 'configuration from %s.' % self.host) - self.logger.debug('Socket Exception: %s' % str(e)) + "A Connection error occurred retrieving HTTP " + f"configuration from {self.host}." + ) + self.logger.debug(f"Socket Exception: {e!s}") # Return None (signifying a failure) return None @@ -256,11 +259,8 @@ def read(self, **kwargs): @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = ConfigBase.parse_url(url) if not results: @@ -269,7 +269,7 @@ def parse_url(url): # Add our headers that the user can potentially over-ride if they wish # to to our returned result set - results['headers'] = results['qsd-'] - results['headers'].update(results['qsd+']) + results["headers"] = results["qsd-"] + results["headers"].update(results["qsd+"]) return results diff --git a/libs/apprise/config/memory.py b/libs/apprise/config/memory.py index b2d70096e4..8c391820b7 100644 --- a/libs/apprise/config/memory.py +++ b/libs/apprise/config/memory.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,28 +25,25 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -from .base import ConfigBase from ..locale import gettext_lazy as _ +from .base import ConfigBase class ConfigMemory(ConfigBase): - """ - For information that was loaded from memory and does not - persist anywhere. - """ + """For information that was loaded from memory and does not persist + anywhere.""" # The default descriptive name associated with the service - service_name = _('Memory') + service_name = _("Memory") # The default protocol - protocol = 'memory' + protocol = "memory" def __init__(self, content, **kwargs): - """ - Initialize Memory Object + """Initialize Memory Object. - Memory objects just store the raw configuration in memory. There is - no external reference point. It's always considered cached. + Memory objects just store the raw configuration in memory. There is no + external reference point. It's always considered cached. """ super().__init__(**kwargs) @@ -56,30 +52,24 @@ def __init__(self, content, **kwargs): if self.config_format is None: # Detect our format if possible - self.config_format = \ - ConfigMemory.detect_config_format(self.content) + self.config_format = ConfigMemory.detect_config_format( + self.content + ) return def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" - return 'memory://' + return "memory://" def read(self, **kwargs): - """ - Simply return content stored into memory - """ + """Simply return content stored into memory.""" return self.content @staticmethod def parse_url(url): - """ - Memory objects have no parseable URL - - """ + """Memory objects have no parseable URL.""" # These URLs can not be parsed return None diff --git a/libs/apprise/conversion.py b/libs/apprise/conversion.py index 7f691eae44..e4930f61d6 100644 --- a/libs/apprise/conversion.py +++ b/libs/apprise/conversion.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,18 +25,18 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +from html.parser import HTMLParser import re + from markdown import markdown + from .common import NotifyFormat from .url import URLBase -from html.parser import HTMLParser - def convert_between(from_format, to_format, content): - """ - Converts between different suported formats. If no conversion exists, - or the selected one fails, the original text will be returned. + """Converts between different suported formats. If no conversion exists, or + the selected one fails, the original text will be returned. This function returns the content translated (if required) """ @@ -55,26 +54,22 @@ def convert_between(from_format, to_format, content): def markdown_to_html(content): - """ - Converts specified content from markdown to HTML. - """ - return markdown(content, extensions=[ - 'markdown.extensions.nl2br', 'markdown.extensions.tables']) + """Converts specified content from markdown to HTML.""" + return markdown( + content, + extensions=["markdown.extensions.nl2br", "markdown.extensions.tables"], + ) def text_to_html(content): - """ - Converts specified content from plain text to HTML. - """ + """Converts specified content from plain text to HTML.""" # First eliminate any carriage returns return URLBase.escape_html(content, convert_new_lines=True) def html_to_text(content): - """ - Converts a content from HTML to plain text. - """ + """Converts a content from HTML to plain text.""" parser = HTMLConverter() parser.feed(content) @@ -82,20 +77,46 @@ def html_to_text(content): return parser.converted -class HTMLConverter(HTMLParser, object): +class HTMLConverter(HTMLParser): """An HTML to plain text converter tuned for email messages.""" # The following tags must start on a new line - BLOCK_TAGS = ('p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', - 'div', 'td', 'th', 'code', 'pre', 'label', 'li',) + BLOCK_TAGS = ( + "p", + "h1", + "h2", + "h3", + "h4", + "h5", + "h6", + "div", + "td", + "th", + "code", + "pre", + "label", + "li", + ) # the folowing tags ignore any internal text IGNORE_TAGS = ( - 'form', 'input', 'textarea', 'select', 'ul', 'ol', 'style', 'link', - 'meta', 'title', 'html', 'head', 'script') + "form", + "input", + "textarea", + "select", + "ul", + "ol", + "style", + "link", + "meta", + "title", + "html", + "head", + "script", + ) # Condense Whitespace - WS_TRIM = re.compile(r'[\s]+', re.DOTALL | re.MULTILINE) + WS_TRIM = re.compile(r"[\s]+", re.DOTALL | re.MULTILINE) # Sentinel value for block tag boundaries, which may be consolidated into a # single line break. @@ -115,12 +136,11 @@ def __init__(self, **kwargs): self.converted = "" def close(self): - string = ''.join(self._finalize(self._result)) + string = "".join(self._finalize(self._result)) self.converted = string.strip() def _finalize(self, result): - """ - Combines and strips consecutive strings, then converts consecutive + """Combines and strips consecutive strings, then converts consecutive block ends into singleton newlines. [ {be} " Hello " {be} {be} " World!" ] -> "\nHello\nWorld!" @@ -136,7 +156,7 @@ def _finalize(self, result): continue # First block end; yield the current string, plus a newline. - yield accum.strip() + '\n' + yield accum.strip() + "\n" accum = None # Multiple consecutive strings; combine them. @@ -152,48 +172,40 @@ def _finalize(self, result): yield accum.strip() def handle_data(self, data, *args, **kwargs): - """ - Store our data if it is not on the ignore list - """ + """Store our data if it is not on the ignore list.""" # initialize our previous flag if self._do_store: # Tidy our whitespace - content = self.WS_TRIM.sub(' ', data) + content = self.WS_TRIM.sub(" ", data) self._result.append(content) def handle_starttag(self, tag, attrs): - """ - Process our starting HTML Tag - """ + """Process our starting HTML Tag.""" # Toggle initial states self._do_store = tag not in self.IGNORE_TAGS if tag in self.BLOCK_TAGS: self._result.append(self.BLOCK_END) - if tag == 'li': - self._result.append('- ') + if tag == "li": + self._result.append("- ") - elif tag == 'br': - self._result.append('\n') + elif tag == "br": + self._result.append("\n") - elif tag == 'hr': + elif tag == "hr": if self._result and isinstance(self._result[-1], str): - self._result[-1] = self._result[-1].rstrip(' ') - else: - pass + self._result[-1] = self._result[-1].rstrip(" ") - self._result.append('\n---\n') + self._result.append("\n---\n") - elif tag == 'blockquote': - self._result.append(' >') + elif tag == "blockquote": + self._result.append(" >") def handle_endtag(self, tag): - """ - Edge case handling of open/close tags - """ + """Edge case handling of open/close tags.""" self._do_store = True if tag in self.BLOCK_TAGS: diff --git a/libs/apprise/decorators/__init__.py b/libs/apprise/decorators/__init__.py index bba1b3b064..9d57312c22 100644 --- a/libs/apprise/decorators/__init__.py +++ b/libs/apprise/decorators/__init__.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -28,7 +27,4 @@ from .notify import notify - -__all__ = [ - 'notify' -] +__all__ = ["notify"] diff --git a/libs/apprise/decorators/base.py b/libs/apprise/decorators/base.py index 82b90f18d0..1fcdb15f2f 100644 --- a/libs/apprise/decorators/base.py +++ b/libs/apprise/decorators/base.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -27,31 +26,32 @@ # POSSIBILITY OF SUCH DAMAGE.USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -from ..plugins.base import NotifyBase -from ..manager_plugins import NotificationManager -from ..utils.parse import URL_DETAILS_RE, parse_url, url_assembly -from ..utils.logic import dict_full_update +import inspect + from .. import common from ..logger import logger -import inspect +from ..manager_plugins import NotificationManager +from ..plugins.base import NotifyBase +from ..utils.logic import dict_full_update +from ..utils.parse import URL_DETAILS_RE, parse_url, url_assembly # Grant access to our Notification Manager Singleton N_MGR = NotificationManager() class CustomNotifyPlugin(NotifyBase): - """ - Apprise Custom Plugin Hook + """Apprise Custom Plugin Hook. This gets initialized based on @notify decorator definitions - """ - # Our Custom notification - service_url = 'https://github.com/caronc/apprise/wiki/Custom_Notification' + + # Our Custom notification; identify the URL users can go to learn + # more about the service this wrapper supports: + service_url = "https://appriseit.com/library/extending/decorator/" # Over-ride our category since this inheritance of the NotifyBase class # should be treated differently. - category = 'custom' + category = "custom" # Support Attachments attachment_support = True @@ -60,60 +60,58 @@ class CustomNotifyPlugin(NotifyBase): storage_mode = common.PersistentStoreMode.AUTO # Define object templates - templates = ( - '{schema}://', - ) + templates = ("{schema}://",) @staticmethod def parse_url(url): - """ - Parses the URL and returns arguments retrieved - - """ + """Parses the URL and returns arguments retrieved.""" return parse_url(url, verify_host=False, simple=True) def url(self, privacy=False, *args, **kwargs): - """ - General URL assembly - """ - return '{schema}://'.format(schema=self.secure_protocol) + """General URL assembly.""" + return f"{self.secure_protocol}://" @staticmethod def instantiate_plugin(url, send_func, name=None): - """ - The function used to add a new notification plugin based on the schema - parsed from the provided URL into our supported matrix structure. - """ + """The function used to add a new notification plugin based on the + schema parsed from the provided URL into our supported matrix + structure.""" if not isinstance(url, str): - msg = 'An invalid custom notify url/schema ({}) provided in ' \ - 'function {}.'.format(url, send_func.__name__) + msg = ( + f"An invalid custom notify url/schema ({url}) provided in " + f"function {send_func.__name__}." + ) logger.warning(msg) return None # Validate that our schema is okay re_match = URL_DETAILS_RE.match(url) if not re_match: - msg = 'An invalid custom notify url/schema ({}) provided in ' \ - 'function {}.'.format(url, send_func.__name__) + msg = ( + f"An invalid custom notify url/schema ({url}) provided in " + f"function {send_func.__name__}." + ) logger.warning(msg) return None # Acquire our schema - schema = re_match.group('schema').lower() + schema = re_match.group("schema").lower() - if not re_match.group('base'): - url = '{}://'.format(schema) + if not re_match.group("base"): + url = f"{schema}://" # Keep a default set of arguments to apply to all called references base_args = parse_url( - url, default_schema=schema, verify_host=False, simple=True) + url, default_schema=schema, verify_host=False, simple=True + ) if schema in N_MGR: # we're already handling this object - msg = 'The schema ({}) is already defined and could not be ' \ - 'loaded from custom notify function {}.' \ - .format(url, send_func.__name__) + msg = ( + f"The schema ({url}) is already defined and could not be " + f"loaded from custom notify function {send_func.__name__}." + ) logger.warning(msg) return None @@ -124,15 +122,18 @@ def instantiate_plugin(url, send_func, name=None): class CustomNotifyPluginWrapper(CustomNotifyPlugin): # Our Service Name - service_name = name if isinstance(name, str) \ - and name else 'Custom - {}'.format(schema) + service_name = ( + name + if isinstance(name, str) and name + else f"Custom - {schema}" + ) # Store our matched schema secure_protocol = schema requirements = { # Define our required packaging in order to work - 'details': "Source: {}".format(inspect.getfile(send_func)) + "details": f"Source: {inspect.getfile(send_func)}" } # Assign our send() function @@ -142,18 +143,14 @@ class CustomNotifyPluginWrapper(CustomNotifyPlugin): _base_args = base_args def __init__(self, **kwargs): - """ - Our initialization - - """ + """Our initialization.""" # init parent super().__init__(**kwargs) self._default_args = {} # Some variables do not need to be set - if 'secure' in kwargs: - del kwargs['secure'] + kwargs.pop("secure", None) # Apply our updates based on what was parsed dict_full_update(self._default_args, self._base_args) @@ -161,52 +158,54 @@ def __init__(self, **kwargs): # Update our arguments (applying them to what we originally) # initialized as - self._default_args['url'] = url_assembly(**self._default_args) - - def send(self, body, title='', notify_type=common.NotifyType.INFO, - *args, **kwargs): - """ - Our send() call which triggers our hook - """ + self._default_args["url"] = url_assembly(**self._default_args) + + def send( + self, + body, + title="", + notify_type=common.NotifyType.INFO, + *args, + **kwargs, + ): + """Our send() call which triggers our hook.""" response = False try: # Enforce a boolean response result = self.__send( - body, title, notify_type, *args, - meta=self._default_args, **kwargs) - - if result is None: - # The wrapper did not define a return (or returned - # None) - # this is treated as a successful return as it is - # assumed the developer did not care about the result - # of the call. - response = True - - else: - # Perform boolean check (allowing obects to also be - # returned and check against the __bool__ call - response = True if result else False + body, + title, + notify_type.value, + *args, + meta=self._default_args, + **kwargs, + ) + + # None and True are both considered successful + # False however is passed along further upstream + response = True if result is None else bool(result) except Exception as e: # Unhandled Exception self.logger.warning( - 'An exception occured sending a %s notification.', - N_MGR[self.secure_protocol].service_name) + "An exception occured sending a %s notification.", + N_MGR[self.secure_protocol].service_name, + ) self.logger.debug( - '%s Exception: %s', - N_MGR[self.secure_protocol], str(e)) + "%s Exception: %s", N_MGR[self.secure_protocol], e) return False if response: self.logger.info( - 'Sent %s notification.', - N_MGR[self.secure_protocol].service_name) + "Sent %s notification.", + N_MGR[self.secure_protocol].service_name, + ) else: self.logger.warning( - 'Failed to send %s notification.', - N_MGR[self.secure_protocol].service_name) + "Failed to send %s notification.", + N_MGR[self.secure_protocol].service_name, + ) return response # Store our plugin into our core map file diff --git a/libs/apprise/decorators/notify.py b/libs/apprise/decorators/notify.py index 41e49de855..882e19e827 100644 --- a/libs/apprise/decorators/notify.py +++ b/libs/apprise/decorators/notify.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -113,14 +112,14 @@ def your_action(body, title, *args, **kwargs) treated as as success (True). """ + def wrapper(func): - """ - Instantiate our custom (notification) plugin - """ + """Instantiate our custom (notification) plugin.""" # Generate CustomNotifyPlugin.instantiate_plugin( - url=on, send_func=func, name=name) + url=on, send_func=func, name=name + ) return func diff --git a/libs/apprise/emojis.py b/libs/apprise/emojis.py index 6b9f2dec01..cf6c4ae543 100644 --- a/libs/apprise/emojis.py +++ b/libs/apprise/emojis.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -28,10 +27,11 @@ import re import time + from .logger import logger # All Emoji's are wrapped in this character -DELIM = ':' +DELIM = ":" # the map simply contains the emoji that should be mapped to the regular # expression it should be swapped on. @@ -40,2208 +40,2110 @@ # # Face Smiling # - DELIM + r'grinning' + DELIM: '😄', - DELIM + r'smile' + DELIM: '😄', - DELIM + r'(laughing|satisfied)' + DELIM: '😆', - DELIM + r'rofl' + DELIM: '🤣', - DELIM + r'slightly_smiling_face' + DELIM: '🙂', - DELIM + r'wink' + DELIM: '😉', - DELIM + r'innocent' + DELIM: '😇', - DELIM + r'smiley' + DELIM: '😃', - DELIM + r'grin' + DELIM: '😃', - DELIM + r'sweat_smile' + DELIM: '😅', - DELIM + r'joy' + DELIM: '😂', - DELIM + r'upside_down_face' + DELIM: '🙃', - DELIM + r'blush' + DELIM: '😊', - + DELIM + r"grinning" + DELIM: "😄", + DELIM + r"smile" + DELIM: "😄", + DELIM + r"(laughing|satisfied)" + DELIM: "😆", + DELIM + r"rofl" + DELIM: "🤣", + DELIM + r"slightly_smiling_face" + DELIM: "🙂", + DELIM + r"wink" + DELIM: "😉", + DELIM + r"innocent" + DELIM: "😇", + DELIM + r"smiley" + DELIM: "😃", + DELIM + r"grin" + DELIM: "😃", + DELIM + r"sweat_smile" + DELIM: "😅", + DELIM + r"joy" + DELIM: "😂", + DELIM + r"upside_down_face" + DELIM: "🙃", + DELIM + r"blush" + DELIM: "😊", # # Face Affection # - DELIM + r'smiling_face_with_three_hearts' + DELIM: '🥰', - DELIM + r'star_struck' + DELIM: '🤩', - DELIM + r'kissing' + DELIM: '😗', - DELIM + r'kissing_closed_eyes' + DELIM: '😚', - DELIM + r'smiling_face_with_tear' + DELIM: '🥲', - DELIM + r'heart_eyes' + DELIM: 'ðŸ˜', - DELIM + r'kissing_heart' + DELIM: '😘', - DELIM + r'relaxed' + DELIM: '☺ï¸', - DELIM + r'kissing_smiling_eyes' + DELIM: '😙', - + DELIM + r"smiling_face_with_three_hearts" + DELIM: "🥰", + DELIM + r"star_struck" + DELIM: "🤩", + DELIM + r"kissing" + DELIM: "😗", + DELIM + r"kissing_closed_eyes" + DELIM: "😚", + DELIM + r"smiling_face_with_tear" + DELIM: "🥲", + DELIM + r"heart_eyes" + DELIM: "ðŸ˜", + DELIM + r"kissing_heart" + DELIM: "😘", + DELIM + r"relaxed" + DELIM: "☺ï¸", + DELIM + r"kissing_smiling_eyes" + DELIM: "😙", # # Face Tongue # - DELIM + r'yum' + DELIM: '😋', - DELIM + r'stuck_out_tongue_winking_eye' + DELIM: '😜', - DELIM + r'stuck_out_tongue_closed_eyes' + DELIM: 'ðŸ˜', - DELIM + r'stuck_out_tongue' + DELIM: '😛', - DELIM + r'zany_face' + DELIM: '🤪', - DELIM + r'money_mouth_face' + DELIM: '🤑', - + DELIM + r"yum" + DELIM: "😋", + DELIM + r"stuck_out_tongue_winking_eye" + DELIM: "😜", + DELIM + r"stuck_out_tongue_closed_eyes" + DELIM: "ðŸ˜", + DELIM + r"stuck_out_tongue" + DELIM: "😛", + DELIM + r"zany_face" + DELIM: "🤪", + DELIM + r"money_mouth_face" + DELIM: "🤑", # # Face Hand # - DELIM + r'hugs' + DELIM: '🤗', - DELIM + r'shushing_face' + DELIM: '🤫', - DELIM + r'hand_over_mouth' + DELIM: '🤭', - DELIM + r'thinking' + DELIM: '🤔', - + DELIM + r"hugs" + DELIM: "🤗", + DELIM + r"shushing_face" + DELIM: "🤫", + DELIM + r"hand_over_mouth" + DELIM: "🤭", + DELIM + r"thinking" + DELIM: "🤔", # # Face Neutral Skeptical # - DELIM + r'zipper_mouth_face' + DELIM: 'ðŸ¤', - DELIM + r'neutral_face' + DELIM: 'ðŸ˜', - DELIM + r'no_mouth' + DELIM: '😶', - DELIM + r'smirk' + DELIM: 'ðŸ˜', - DELIM + r'roll_eyes' + DELIM: '🙄', - DELIM + r'face_exhaling' + DELIM: '😮â€ðŸ’¨', - DELIM + r'raised_eyebrow' + DELIM: '🤨', - DELIM + r'expressionless' + DELIM: '😑', - DELIM + r'face_in_clouds' + DELIM: '😶â€ðŸŒ«ï¸', - DELIM + r'unamused' + DELIM: '😒', - DELIM + r'grimacing' + DELIM: '😬', - DELIM + r'lying_face' + DELIM: '🤥', - + DELIM + r"zipper_mouth_face" + DELIM: "ðŸ¤", + DELIM + r"neutral_face" + DELIM: "ðŸ˜", + DELIM + r"no_mouth" + DELIM: "😶", + DELIM + r"smirk" + DELIM: "ðŸ˜", + DELIM + r"roll_eyes" + DELIM: "🙄", + DELIM + r"face_exhaling" + DELIM: "😮â€ðŸ’¨", + DELIM + r"raised_eyebrow" + DELIM: "🤨", + DELIM + r"expressionless" + DELIM: "😑", + DELIM + r"face_in_clouds" + DELIM: "😶â€ðŸŒ«ï¸", + DELIM + r"unamused" + DELIM: "😒", + DELIM + r"grimacing" + DELIM: "😬", + DELIM + r"lying_face" + DELIM: "🤥", # # Face Sleepy # - DELIM + r'relieved' + DELIM: '😌', - DELIM + r'sleepy' + DELIM: '😪', - DELIM + r'sleeping' + DELIM: '😴', - DELIM + r'pensive' + DELIM: '😔', - DELIM + r'drooling_face' + DELIM: '🤤', - + DELIM + r"relieved" + DELIM: "😌", + DELIM + r"sleepy" + DELIM: "😪", + DELIM + r"sleeping" + DELIM: "😴", + DELIM + r"pensive" + DELIM: "😔", + DELIM + r"drooling_face" + DELIM: "🤤", # # Face Unwell # - DELIM + r'mask' + DELIM: '😷', - DELIM + r'face_with_head_bandage' + DELIM: '🤕', - DELIM + r'vomiting_face' + DELIM: '🤮', - DELIM + r'hot_face' + DELIM: '🥵', - DELIM + r'woozy_face' + DELIM: '🥴', - DELIM + r'face_with_spiral_eyes' + DELIM: '😵â€ðŸ’«', - DELIM + r'face_with_thermometer' + DELIM: '🤒', - DELIM + r'nauseated_face' + DELIM: '🤢', - DELIM + r'sneezing_face' + DELIM: '🤧', - DELIM + r'cold_face' + DELIM: '🥶', - DELIM + r'dizzy_face' + DELIM: '😵', - DELIM + r'exploding_head' + DELIM: '🤯', - + DELIM + r"mask" + DELIM: "😷", + DELIM + r"face_with_head_bandage" + DELIM: "🤕", + DELIM + r"vomiting_face" + DELIM: "🤮", + DELIM + r"hot_face" + DELIM: "🥵", + DELIM + r"woozy_face" + DELIM: "🥴", + DELIM + r"face_with_spiral_eyes" + DELIM: "😵â€ðŸ’«", + DELIM + r"face_with_thermometer" + DELIM: "🤒", + DELIM + r"nauseated_face" + DELIM: "🤢", + DELIM + r"sneezing_face" + DELIM: "🤧", + DELIM + r"cold_face" + DELIM: "🥶", + DELIM + r"dizzy_face" + DELIM: "😵", + DELIM + r"exploding_head" + DELIM: "🤯", # # Face Hat # - DELIM + r'cowboy_hat_face' + DELIM: '🤠', - DELIM + r'disguised_face' + DELIM: '🥸', - DELIM + r'partying_face' + DELIM: '🥳', - + DELIM + r"cowboy_hat_face" + DELIM: "🤠", + DELIM + r"disguised_face" + DELIM: "🥸", + DELIM + r"partying_face" + DELIM: "🥳", # # Face Glasses # - DELIM + r'sunglasses' + DELIM: '😎', - DELIM + r'monocle_face' + DELIM: 'ðŸ§', - DELIM + r'nerd_face' + DELIM: '🤓', - + DELIM + r"sunglasses" + DELIM: "😎", + DELIM + r"monocle_face" + DELIM: "ðŸ§", + DELIM + r"nerd_face" + DELIM: "🤓", # # Face Concerned # - DELIM + r'confused' + DELIM: '😕', - DELIM + r'slightly_frowning_face' + DELIM: 'ðŸ™', - DELIM + r'open_mouth' + DELIM: '😮', - DELIM + r'astonished' + DELIM: '😲', - DELIM + r'pleading_face' + DELIM: '🥺', - DELIM + r'anguished' + DELIM: '😧', - DELIM + r'cold_sweat' + DELIM: '😰', - DELIM + r'cry' + DELIM: '😢', - DELIM + r'scream' + DELIM: '😱', - DELIM + r'persevere' + DELIM: '😣', - DELIM + r'sweat' + DELIM: '😓', - DELIM + r'tired_face' + DELIM: '😫', - DELIM + r'worried' + DELIM: '😟', - DELIM + r'frowning_face' + DELIM: '☹ï¸', - DELIM + r'hushed' + DELIM: '😯', - DELIM + r'flushed' + DELIM: '😳', - DELIM + r'frowning' + DELIM: '😦', - DELIM + r'fearful' + DELIM: '😨', - DELIM + r'disappointed_relieved' + DELIM: '😥', - DELIM + r'sob' + DELIM: '😭', - DELIM + r'confounded' + DELIM: '😖', - DELIM + r'disappointed' + DELIM: '😞', - DELIM + r'weary' + DELIM: '😩', - DELIM + r'yawning_face' + DELIM: '🥱', - + DELIM + r"confused" + DELIM: "😕", + DELIM + r"slightly_frowning_face" + DELIM: "ðŸ™", + DELIM + r"open_mouth" + DELIM: "😮", + DELIM + r"astonished" + DELIM: "😲", + DELIM + r"pleading_face" + DELIM: "🥺", + DELIM + r"anguished" + DELIM: "😧", + DELIM + r"cold_sweat" + DELIM: "😰", + DELIM + r"cry" + DELIM: "😢", + DELIM + r"scream" + DELIM: "😱", + DELIM + r"persevere" + DELIM: "😣", + DELIM + r"sweat" + DELIM: "😓", + DELIM + r"tired_face" + DELIM: "😫", + DELIM + r"worried" + DELIM: "😟", + DELIM + r"frowning_face" + DELIM: "☹ï¸", + DELIM + r"hushed" + DELIM: "😯", + DELIM + r"flushed" + DELIM: "😳", + DELIM + r"frowning" + DELIM: "😦", + DELIM + r"fearful" + DELIM: "😨", + DELIM + r"disappointed_relieved" + DELIM: "😥", + DELIM + r"sob" + DELIM: "😭", + DELIM + r"confounded" + DELIM: "😖", + DELIM + r"disappointed" + DELIM: "😞", + DELIM + r"weary" + DELIM: "😩", + DELIM + r"yawning_face" + DELIM: "🥱", # # Face Negative # - DELIM + r'triumph' + DELIM: '😤', - DELIM + r'angry' + DELIM: '😠', - DELIM + r'smiling_imp' + DELIM: '😈', - DELIM + r'skull' + DELIM: '💀', - DELIM + r'(pout|rage)' + DELIM: '😡', - DELIM + r'cursing_face' + DELIM: '🤬', - DELIM + r'imp' + DELIM: '👿', - DELIM + r'skull_and_crossbones' + DELIM: '☠ï¸', - + DELIM + r"triumph" + DELIM: "😤", + DELIM + r"angry" + DELIM: "😠", + DELIM + r"smiling_imp" + DELIM: "😈", + DELIM + r"skull" + DELIM: "💀", + DELIM + r"(pout|rage)" + DELIM: "😡", + DELIM + r"cursing_face" + DELIM: "🤬", + DELIM + r"imp" + DELIM: "👿", + DELIM + r"skull_and_crossbones" + DELIM: "☠ï¸", # # Face Costume # - DELIM + r'(hankey|poop|shit)' + DELIM: '💩', - DELIM + r'japanese_ogre' + DELIM: '👹', - DELIM + r'ghost' + DELIM: '👻', - DELIM + r'space_invader' + DELIM: '👾', - DELIM + r'clown_face' + DELIM: '🤡', - DELIM + r'japanese_goblin' + DELIM: '👺', - DELIM + r'alien' + DELIM: '👽', - DELIM + r'robot' + DELIM: '🤖', - + DELIM + r"(hankey|poop|shit)" + DELIM: "💩", + DELIM + r"japanese_ogre" + DELIM: "👹", + DELIM + r"ghost" + DELIM: "👻", + DELIM + r"space_invader" + DELIM: "👾", + DELIM + r"clown_face" + DELIM: "🤡", + DELIM + r"japanese_goblin" + DELIM: "👺", + DELIM + r"alien" + DELIM: "👽", + DELIM + r"robot" + DELIM: "🤖", # # Cat Face # - DELIM + r'smiley_cat' + DELIM: '😺', - DELIM + r'joy_cat' + DELIM: '😹', - DELIM + r'smirk_cat' + DELIM: '😼', - DELIM + r'scream_cat' + DELIM: '🙀', - DELIM + r'pouting_cat' + DELIM: '😾', - DELIM + r'smile_cat' + DELIM: '😸', - DELIM + r'heart_eyes_cat' + DELIM: '😻', - DELIM + r'kissing_cat' + DELIM: '😽', - DELIM + r'crying_cat_face' + DELIM: '😿', - + DELIM + r"smiley_cat" + DELIM: "😺", + DELIM + r"joy_cat" + DELIM: "😹", + DELIM + r"smirk_cat" + DELIM: "😼", + DELIM + r"scream_cat" + DELIM: "🙀", + DELIM + r"pouting_cat" + DELIM: "😾", + DELIM + r"smile_cat" + DELIM: "😸", + DELIM + r"heart_eyes_cat" + DELIM: "😻", + DELIM + r"kissing_cat" + DELIM: "😽", + DELIM + r"crying_cat_face" + DELIM: "😿", # # Monkey Face # - DELIM + r'see_no_evil' + DELIM: '🙈', - DELIM + r'speak_no_evil' + DELIM: '🙊', - DELIM + r'hear_no_evil' + DELIM: '🙉', - + DELIM + r"see_no_evil" + DELIM: "🙈", + DELIM + r"speak_no_evil" + DELIM: "🙊", + DELIM + r"hear_no_evil" + DELIM: "🙉", # # Heart # - DELIM + r'love_letter' + DELIM: '💌', - DELIM + r'gift_heart' + DELIM: 'ðŸ’', - DELIM + r'heartpulse' + DELIM: '💗', - DELIM + r'revolving_hearts' + DELIM: '💞', - DELIM + r'heart_decoration' + DELIM: '💟', - DELIM + r'broken_heart' + DELIM: '💔', - DELIM + r'mending_heart' + DELIM: 'â¤ï¸â€ðŸ©¹', - DELIM + r'orange_heart' + DELIM: '🧡', - DELIM + r'green_heart' + DELIM: '💚', - DELIM + r'purple_heart' + DELIM: '💜', - DELIM + r'black_heart' + DELIM: '🖤', - DELIM + r'cupid' + DELIM: '💘', - DELIM + r'sparkling_heart' + DELIM: '💖', - DELIM + r'heartbeat' + DELIM: '💓', - DELIM + r'two_hearts' + DELIM: '💕', - DELIM + r'heavy_heart_exclamation' + DELIM: 'â£ï¸', - DELIM + r'heart_on_fire' + DELIM: 'â¤ï¸â€ðŸ”¥', - DELIM + r'heart' + DELIM: 'â¤ï¸', - DELIM + r'yellow_heart' + DELIM: '💛', - DELIM + r'blue_heart' + DELIM: '💙', - DELIM + r'brown_heart' + DELIM: '🤎', - DELIM + r'white_heart' + DELIM: 'ðŸ¤', - + DELIM + r"love_letter" + DELIM: "💌", + DELIM + r"gift_heart" + DELIM: "ðŸ’", + DELIM + r"heartpulse" + DELIM: "💗", + DELIM + r"revolving_hearts" + DELIM: "💞", + DELIM + r"heart_decoration" + DELIM: "💟", + DELIM + r"broken_heart" + DELIM: "💔", + DELIM + r"mending_heart" + DELIM: "â¤ï¸â€ðŸ©¹", + DELIM + r"orange_heart" + DELIM: "🧡", + DELIM + r"green_heart" + DELIM: "💚", + DELIM + r"purple_heart" + DELIM: "💜", + DELIM + r"black_heart" + DELIM: "🖤", + DELIM + r"cupid" + DELIM: "💘", + DELIM + r"sparkling_heart" + DELIM: "💖", + DELIM + r"heartbeat" + DELIM: "💓", + DELIM + r"two_hearts" + DELIM: "💕", + DELIM + r"heavy_heart_exclamation" + DELIM: "â£ï¸", + DELIM + r"heart_on_fire" + DELIM: "â¤ï¸â€ðŸ”¥", + DELIM + r"heart" + DELIM: "â¤ï¸", + DELIM + r"yellow_heart" + DELIM: "💛", + DELIM + r"blue_heart" + DELIM: "💙", + DELIM + r"brown_heart" + DELIM: "🤎", + DELIM + r"white_heart" + DELIM: "ðŸ¤", # # Emotion # - DELIM + r'kiss' + DELIM: '💋', - DELIM + r'anger' + DELIM: '💢', - DELIM + r'dizzy' + DELIM: '💫', - DELIM + r'dash' + DELIM: '💨', - DELIM + r'speech_balloon' + DELIM: '💬', - DELIM + r'left_speech_bubble' + DELIM: '🗨ï¸', - DELIM + r'thought_balloon' + DELIM: '💭', - DELIM + r'100' + DELIM: '💯', - DELIM + r'(boom|collision)' + DELIM: '💥', - DELIM + r'sweat_drops' + DELIM: '💦', - DELIM + r'hole' + DELIM: '🕳ï¸', - DELIM + r'eye_speech_bubble' + DELIM: 'ðŸ‘ï¸â€ðŸ—¨ï¸', - DELIM + r'right_anger_bubble' + DELIM: '🗯ï¸', - DELIM + r'zzz' + DELIM: '💤', - + DELIM + r"kiss" + DELIM: "💋", + DELIM + r"anger" + DELIM: "💢", + DELIM + r"dizzy" + DELIM: "💫", + DELIM + r"dash" + DELIM: "💨", + DELIM + r"speech_balloon" + DELIM: "💬", + DELIM + r"left_speech_bubble" + DELIM: "🗨ï¸", + DELIM + r"thought_balloon" + DELIM: "💭", + DELIM + r"100" + DELIM: "💯", + DELIM + r"(boom|collision)" + DELIM: "💥", + DELIM + r"sweat_drops" + DELIM: "💦", + DELIM + r"hole" + DELIM: "🕳ï¸", + DELIM + r"eye_speech_bubble" + DELIM: "ðŸ‘ï¸â€ðŸ—¨ï¸", + DELIM + r"right_anger_bubble" + DELIM: "🗯ï¸", + DELIM + r"zzz" + DELIM: "💤", # # Hand Fingers Open # - DELIM + r'wave' + DELIM: '👋', - DELIM + r'raised_hand_with_fingers_splayed' + DELIM: 'ðŸ–ï¸', - DELIM + r'vulcan_salute' + DELIM: '🖖', - DELIM + r'raised_back_of_hand' + DELIM: '🤚', - DELIM + r'(raised_)?hand' + DELIM: '✋', - + DELIM + r"wave" + DELIM: "👋", + DELIM + r"raised_hand_with_fingers_splayed" + DELIM: "ðŸ–ï¸", + DELIM + r"vulcan_salute" + DELIM: "🖖", + DELIM + r"raised_back_of_hand" + DELIM: "🤚", + DELIM + r"(raised_)?hand" + DELIM: "✋", # # Hand Fingers Partial # - DELIM + r'ok_hand' + DELIM: '👌', - DELIM + r'pinched_fingers' + DELIM: '🤌', - DELIM + r'pinching_hand' + DELIM: 'ðŸ¤', - DELIM + r'v' + DELIM: '✌ï¸', - DELIM + r'crossed_fingers' + DELIM: '🤞', - DELIM + r'love_you_gesture' + DELIM: '🤟', - DELIM + r'metal' + DELIM: '🤘', - DELIM + r'call_me_hand' + DELIM: '🤙', - + DELIM + r"ok_hand" + DELIM: "👌", + DELIM + r"pinched_fingers" + DELIM: "🤌", + DELIM + r"pinching_hand" + DELIM: "ðŸ¤", + DELIM + r"v" + DELIM: "✌ï¸", + DELIM + r"crossed_fingers" + DELIM: "🤞", + DELIM + r"love_you_gesture" + DELIM: "🤟", + DELIM + r"metal" + DELIM: "🤘", + DELIM + r"call_me_hand" + DELIM: "🤙", # # Hand Single Finger # - DELIM + r'point_left' + DELIM: '👈', - DELIM + r'point_right' + DELIM: '👉', - DELIM + r'point_up_2' + DELIM: '👆', - DELIM + r'(fu|middle_finger)' + DELIM: '🖕', - DELIM + r'point_down' + DELIM: '👇', - DELIM + r'point_up' + DELIM: 'â˜ï¸', - + DELIM + r"point_left" + DELIM: "👈", + DELIM + r"point_right" + DELIM: "👉", + DELIM + r"point_up_2" + DELIM: "👆", + DELIM + r"(fu|middle_finger)" + DELIM: "🖕", + DELIM + r"point_down" + DELIM: "👇", + DELIM + r"point_up" + DELIM: "â˜ï¸", # # Hand Fingers Closed # - DELIM + r'(\+1|thumbsup)' + DELIM: 'ðŸ‘', - DELIM + r'(-1|thumbsdown)' + DELIM: '👎', - DELIM + r'fist' + DELIM: '✊', - DELIM + r'(fist_(raised|oncoming)|(face)?punch)' + DELIM: '👊', - DELIM + r'fist_left' + DELIM: '🤛', - DELIM + r'fist_right' + DELIM: '🤜', - + DELIM + r"(\+1|thumbsup)" + DELIM: "ðŸ‘", + DELIM + r"(-1|thumbsdown)" + DELIM: "👎", + DELIM + r"fist" + DELIM: "✊", + DELIM + r"(fist_(raised|oncoming)|(face)?punch)" + DELIM: "👊", + DELIM + r"fist_left" + DELIM: "🤛", + DELIM + r"fist_right" + DELIM: "🤜", # # Hands # - DELIM + r'clap' + DELIM: 'ðŸ‘', - DELIM + r'raised_hands' + DELIM: '🙌', - DELIM + r'open_hands' + DELIM: 'ðŸ‘', - DELIM + r'palms_up_together' + DELIM: '🤲', - DELIM + r'handshake' + DELIM: 'ðŸ¤', - DELIM + r'pray' + DELIM: 'ðŸ™', - + DELIM + r"clap" + DELIM: "ðŸ‘", + DELIM + r"raised_hands" + DELIM: "🙌", + DELIM + r"open_hands" + DELIM: "ðŸ‘", + DELIM + r"palms_up_together" + DELIM: "🤲", + DELIM + r"handshake" + DELIM: "ðŸ¤", + DELIM + r"pray" + DELIM: "ðŸ™", # # Hand Prop # - DELIM + r'writing_hand' + DELIM: 'âœï¸', - DELIM + r'nail_care' + DELIM: '💅', - DELIM + r'selfie' + DELIM: '🤳', - + DELIM + r"writing_hand" + DELIM: "âœï¸", + DELIM + r"nail_care" + DELIM: "💅", + DELIM + r"selfie" + DELIM: "🤳", # # Body Parts # - DELIM + r'muscle' + DELIM: '💪', - DELIM + r'mechanical_arm' + DELIM: '🦾', - DELIM + r'mechanical_leg' + DELIM: '🦿', - DELIM + r'leg' + DELIM: '🦵', - DELIM + r'foot' + DELIM: '🦶', - DELIM + r'ear' + DELIM: '👂', - DELIM + r'ear_with_hearing_aid' + DELIM: '🦻', - DELIM + r'nose' + DELIM: '👃', - DELIM + r'brain' + DELIM: '🧠', - DELIM + r'anatomical_heart' + DELIM: '🫀', - DELIM + r'lungs' + DELIM: 'ðŸ«', - DELIM + r'tooth' + DELIM: '🦷', - DELIM + r'bone' + DELIM: '🦴', - DELIM + r'eyes' + DELIM: '👀', - DELIM + r'eye' + DELIM: 'ðŸ‘ï¸', - DELIM + r'tongue' + DELIM: '👅', - DELIM + r'lips' + DELIM: '👄', - + DELIM + r"muscle" + DELIM: "💪", + DELIM + r"mechanical_arm" + DELIM: "🦾", + DELIM + r"mechanical_leg" + DELIM: "🦿", + DELIM + r"leg" + DELIM: "🦵", + DELIM + r"foot" + DELIM: "🦶", + DELIM + r"ear" + DELIM: "👂", + DELIM + r"ear_with_hearing_aid" + DELIM: "🦻", + DELIM + r"nose" + DELIM: "👃", + DELIM + r"brain" + DELIM: "🧠", + DELIM + r"anatomical_heart" + DELIM: "🫀", + DELIM + r"lungs" + DELIM: "ðŸ«", + DELIM + r"tooth" + DELIM: "🦷", + DELIM + r"bone" + DELIM: "🦴", + DELIM + r"eyes" + DELIM: "👀", + DELIM + r"eye" + DELIM: "ðŸ‘ï¸", + DELIM + r"tongue" + DELIM: "👅", + DELIM + r"lips" + DELIM: "👄", # # Person # - DELIM + r'baby' + DELIM: '👶', - DELIM + r'child' + DELIM: '🧒', - DELIM + r'boy' + DELIM: '👦', - DELIM + r'girl' + DELIM: '👧', - DELIM + r'adult' + DELIM: '🧑', - DELIM + r'blond_haired_person' + DELIM: '👱', - DELIM + r'man' + DELIM: '👨', - DELIM + r'bearded_person' + DELIM: '🧔', - DELIM + r'man_beard' + DELIM: '🧔â€â™‚ï¸', - DELIM + r'woman_beard' + DELIM: '🧔â€â™€ï¸', - DELIM + r'red_haired_man' + DELIM: '👨â€ðŸ¦°', - DELIM + r'curly_haired_man' + DELIM: '👨â€ðŸ¦±', - DELIM + r'white_haired_man' + DELIM: '👨â€ðŸ¦³', - DELIM + r'bald_man' + DELIM: '👨â€ðŸ¦²', - DELIM + r'woman' + DELIM: '👩', - DELIM + r'red_haired_woman' + DELIM: '👩â€ðŸ¦°', - DELIM + r'person_red_hair' + DELIM: '🧑â€ðŸ¦°', - DELIM + r'curly_haired_woman' + DELIM: '👩â€ðŸ¦±', - DELIM + r'person_curly_hair' + DELIM: '🧑â€ðŸ¦±', - DELIM + r'white_haired_woman' + DELIM: '👩â€ðŸ¦³', - DELIM + r'person_white_hair' + DELIM: '🧑â€ðŸ¦³', - DELIM + r'bald_woman' + DELIM: '👩â€ðŸ¦²', - DELIM + r'person_bald' + DELIM: '🧑â€ðŸ¦²', - DELIM + r'blond_(haired_)?woman' + DELIM: '👱â€â™€ï¸', - DELIM + r'blond_haired_man' + DELIM: '👱â€â™‚ï¸', - DELIM + r'older_adult' + DELIM: '🧓', - DELIM + r'older_man' + DELIM: '👴', - DELIM + r'older_woman' + DELIM: '👵', - + DELIM + r"baby" + DELIM: "👶", + DELIM + r"child" + DELIM: "🧒", + DELIM + r"boy" + DELIM: "👦", + DELIM + r"girl" + DELIM: "👧", + DELIM + r"adult" + DELIM: "🧑", + DELIM + r"blond_haired_person" + DELIM: "👱", + DELIM + r"man" + DELIM: "👨", + DELIM + r"bearded_person" + DELIM: "🧔", + DELIM + r"man_beard" + DELIM: "🧔â€â™‚ï¸", + DELIM + r"woman_beard" + DELIM: "🧔â€â™€ï¸", + DELIM + r"red_haired_man" + DELIM: "👨â€ðŸ¦°", + DELIM + r"curly_haired_man" + DELIM: "👨â€ðŸ¦±", + DELIM + r"white_haired_man" + DELIM: "👨â€ðŸ¦³", + DELIM + r"bald_man" + DELIM: "👨â€ðŸ¦²", + DELIM + r"woman" + DELIM: "👩", + DELIM + r"red_haired_woman" + DELIM: "👩â€ðŸ¦°", + DELIM + r"person_red_hair" + DELIM: "🧑â€ðŸ¦°", + DELIM + r"curly_haired_woman" + DELIM: "👩â€ðŸ¦±", + DELIM + r"person_curly_hair" + DELIM: "🧑â€ðŸ¦±", + DELIM + r"white_haired_woman" + DELIM: "👩â€ðŸ¦³", + DELIM + r"person_white_hair" + DELIM: "🧑â€ðŸ¦³", + DELIM + r"bald_woman" + DELIM: "👩â€ðŸ¦²", + DELIM + r"person_bald" + DELIM: "🧑â€ðŸ¦²", + DELIM + r"blond_(haired_)?woman" + DELIM: "👱â€â™€ï¸", + DELIM + r"blond_haired_man" + DELIM: "👱â€â™‚ï¸", + DELIM + r"older_adult" + DELIM: "🧓", + DELIM + r"older_man" + DELIM: "👴", + DELIM + r"older_woman" + DELIM: "👵", # # Person Gesture # - DELIM + r'frowning_person' + DELIM: 'ðŸ™', - DELIM + r'frowning_man' + DELIM: 'ðŸ™â€â™‚ï¸', - DELIM + r'frowning_woman' + DELIM: 'ðŸ™â€â™€ï¸', - DELIM + r'pouting_face' + DELIM: '🙎', - DELIM + r'pouting_man' + DELIM: '🙎â€â™‚ï¸', - DELIM + r'pouting_woman' + DELIM: '🙎â€â™€ï¸', - DELIM + r'no_good' + DELIM: '🙅', - DELIM + r'(ng|no_good)_man' + DELIM: '🙅â€â™‚ï¸', - DELIM + r'(ng_woman|no_good_woman)' + DELIM: '🙅â€â™€ï¸', - DELIM + r'ok_person' + DELIM: '🙆', - DELIM + r'ok_man' + DELIM: '🙆â€â™‚ï¸', - DELIM + r'ok_woman' + DELIM: '🙆â€â™€ï¸', - DELIM + r'(information_desk|tipping_hand_)person' + DELIM: 'ðŸ’', - DELIM + r'(sassy_man|tipping_hand_man)' + DELIM: 'ðŸ’â€â™‚ï¸', - DELIM + r'(sassy_woman|tipping_hand_woman)' + DELIM: 'ðŸ’â€â™€ï¸', - DELIM + r'raising_hand' + DELIM: '🙋', - DELIM + r'raising_hand_man' + DELIM: '🙋â€â™‚ï¸', - DELIM + r'raising_hand_woman' + DELIM: '🙋â€â™€ï¸', - DELIM + r'deaf_person' + DELIM: 'ðŸ§', - DELIM + r'deaf_man' + DELIM: 'ðŸ§â€â™‚ï¸', - DELIM + r'deaf_woman' + DELIM: 'ðŸ§â€â™€ï¸', - DELIM + r'bow' + DELIM: '🙇', - DELIM + r'bowing_man' + DELIM: '🙇â€â™‚ï¸', - DELIM + r'bowing_woman' + DELIM: '🙇â€â™€ï¸', - DELIM + r'facepalm' + DELIM: '🤦', - DELIM + r'man_facepalming' + DELIM: '🤦â€â™‚ï¸', - DELIM + r'woman_facepalming' + DELIM: '🤦â€â™€ï¸', - DELIM + r'shrug' + DELIM: '🤷', - DELIM + r'man_shrugging' + DELIM: '🤷â€â™‚ï¸', - DELIM + r'woman_shrugging' + DELIM: '🤷â€â™€ï¸', - + DELIM + r"frowning_person" + DELIM: "ðŸ™", + DELIM + r"frowning_man" + DELIM: "ðŸ™â€â™‚ï¸", + DELIM + r"frowning_woman" + DELIM: "ðŸ™â€â™€ï¸", + DELIM + r"pouting_face" + DELIM: "🙎", + DELIM + r"pouting_man" + DELIM: "🙎â€â™‚ï¸", + DELIM + r"pouting_woman" + DELIM: "🙎â€â™€ï¸", + DELIM + r"no_good" + DELIM: "🙅", + DELIM + r"(ng|no_good)_man" + DELIM: "🙅â€â™‚ï¸", + DELIM + r"(ng_woman|no_good_woman)" + DELIM: "🙅â€â™€ï¸", + DELIM + r"ok_person" + DELIM: "🙆", + DELIM + r"ok_man" + DELIM: "🙆â€â™‚ï¸", + DELIM + r"ok_woman" + DELIM: "🙆â€â™€ï¸", + DELIM + r"(information_desk|tipping_hand_)person" + DELIM: "ðŸ’", + DELIM + r"(sassy_man|tipping_hand_man)" + DELIM: "ðŸ’â€â™‚ï¸", + DELIM + r"(sassy_woman|tipping_hand_woman)" + DELIM: "ðŸ’â€â™€ï¸", + DELIM + r"raising_hand" + DELIM: "🙋", + DELIM + r"raising_hand_man" + DELIM: "🙋â€â™‚ï¸", + DELIM + r"raising_hand_woman" + DELIM: "🙋â€â™€ï¸", + DELIM + r"deaf_person" + DELIM: "ðŸ§", + DELIM + r"deaf_man" + DELIM: "ðŸ§â€â™‚ï¸", + DELIM + r"deaf_woman" + DELIM: "ðŸ§â€â™€ï¸", + DELIM + r"bow" + DELIM: "🙇", + DELIM + r"bowing_man" + DELIM: "🙇â€â™‚ï¸", + DELIM + r"bowing_woman" + DELIM: "🙇â€â™€ï¸", + DELIM + r"facepalm" + DELIM: "🤦", + DELIM + r"man_facepalming" + DELIM: "🤦â€â™‚ï¸", + DELIM + r"woman_facepalming" + DELIM: "🤦â€â™€ï¸", + DELIM + r"shrug" + DELIM: "🤷", + DELIM + r"man_shrugging" + DELIM: "🤷â€â™‚ï¸", + DELIM + r"woman_shrugging" + DELIM: "🤷â€â™€ï¸", # # Person Role # - DELIM + r'health_worker' + DELIM: '🧑â€âš•ï¸', - DELIM + r'man_health_worker' + DELIM: '👨â€âš•ï¸', - DELIM + r'woman_health_worker' + DELIM: '👩â€âš•ï¸', - DELIM + r'student' + DELIM: '🧑â€ðŸŽ“', - DELIM + r'man_student' + DELIM: '👨â€ðŸŽ“', - DELIM + r'woman_student' + DELIM: '👩â€ðŸŽ“', - DELIM + r'teacher' + DELIM: '🧑â€ðŸ«', - DELIM + r'man_teacher' + DELIM: '👨â€ðŸ«', - DELIM + r'woman_teacher' + DELIM: '👩â€ðŸ«', - DELIM + r'judge' + DELIM: '🧑â€âš–ï¸', - DELIM + r'man_judge' + DELIM: '👨â€âš–ï¸', - DELIM + r'woman_judge' + DELIM: '👩â€âš–ï¸', - DELIM + r'farmer' + DELIM: '🧑â€ðŸŒ¾', - DELIM + r'man_farmer' + DELIM: '👨â€ðŸŒ¾', - DELIM + r'woman_farmer' + DELIM: '👩â€ðŸŒ¾', - DELIM + r'cook' + DELIM: '🧑â€ðŸ³', - DELIM + r'man_cook' + DELIM: '👨â€ðŸ³', - DELIM + r'woman_cook' + DELIM: '👩â€ðŸ³', - DELIM + r'mechanic' + DELIM: '🧑â€ðŸ”§', - DELIM + r'man_mechanic' + DELIM: '👨â€ðŸ”§', - DELIM + r'woman_mechanic' + DELIM: '👩â€ðŸ”§', - DELIM + r'factory_worker' + DELIM: '🧑â€ðŸ­', - DELIM + r'man_factory_worker' + DELIM: '👨â€ðŸ­', - DELIM + r'woman_factory_worker' + DELIM: '👩â€ðŸ­', - DELIM + r'office_worker' + DELIM: '🧑â€ðŸ’¼', - DELIM + r'man_office_worker' + DELIM: '👨â€ðŸ’¼', - DELIM + r'woman_office_worker' + DELIM: '👩â€ðŸ’¼', - DELIM + r'scientist' + DELIM: '🧑â€ðŸ”¬', - DELIM + r'man_scientist' + DELIM: '👨â€ðŸ”¬', - DELIM + r'woman_scientist' + DELIM: '👩â€ðŸ”¬', - DELIM + r'technologist' + DELIM: '🧑â€ðŸ’»', - DELIM + r'man_technologist' + DELIM: '👨â€ðŸ’»', - DELIM + r'woman_technologist' + DELIM: '👩â€ðŸ’»', - DELIM + r'singer' + DELIM: '🧑â€ðŸŽ¤', - DELIM + r'man_singer' + DELIM: '👨â€ðŸŽ¤', - DELIM + r'woman_singer' + DELIM: '👩â€ðŸŽ¤', - DELIM + r'artist' + DELIM: '🧑â€ðŸŽ¨', - DELIM + r'man_artist' + DELIM: '👨â€ðŸŽ¨', - DELIM + r'woman_artist' + DELIM: '👩â€ðŸŽ¨', - DELIM + r'pilot' + DELIM: '🧑â€âœˆï¸', - DELIM + r'man_pilot' + DELIM: '👨â€âœˆï¸', - DELIM + r'woman_pilot' + DELIM: '👩â€âœˆï¸', - DELIM + r'astronaut' + DELIM: '🧑â€ðŸš€', - DELIM + r'man_astronaut' + DELIM: '👨â€ðŸš€', - DELIM + r'woman_astronaut' + DELIM: '👩â€ðŸš€', - DELIM + r'firefighter' + DELIM: '🧑â€ðŸš’', - DELIM + r'man_firefighter' + DELIM: '👨â€ðŸš’', - DELIM + r'woman_firefighter' + DELIM: '👩â€ðŸš’', - DELIM + r'cop' + DELIM: '👮', - DELIM + r'police(_officer|man)' + DELIM: '👮â€â™‚ï¸', - DELIM + r'policewoman' + DELIM: '👮â€â™€ï¸', - DELIM + r'detective' + DELIM: '🕵ï¸', - DELIM + r'male_detective' + DELIM: '🕵ï¸â€â™‚ï¸', - DELIM + r'female_detective' + DELIM: '🕵ï¸â€â™€ï¸', - DELIM + r'guard' + DELIM: '💂', - DELIM + r'guardsman' + DELIM: '💂â€â™‚ï¸', - DELIM + r'guardswoman' + DELIM: '💂â€â™€ï¸', - DELIM + r'ninja' + DELIM: '🥷', - DELIM + r'construction_worker' + DELIM: '👷', - DELIM + r'construction_worker_man' + DELIM: '👷â€â™‚ï¸', - DELIM + r'construction_worker_woman' + DELIM: '👷â€â™€ï¸', - DELIM + r'prince' + DELIM: '🤴', - DELIM + r'princess' + DELIM: '👸', - DELIM + r'person_with_turban' + DELIM: '👳', - DELIM + r'man_with_turban' + DELIM: '👳â€â™‚ï¸', - DELIM + r'woman_with_turban' + DELIM: '👳â€â™€ï¸', - DELIM + r'man_with_gua_pi_mao' + DELIM: '👲', - DELIM + r'woman_with_headscarf' + DELIM: '🧕', - DELIM + r'person_in_tuxedo' + DELIM: '🤵', - DELIM + r'man_in_tuxedo' + DELIM: '🤵â€â™‚ï¸', - DELIM + r'woman_in_tuxedo' + DELIM: '🤵â€â™€ï¸', - DELIM + r'person_with_veil' + DELIM: '👰', - DELIM + r'man_with_veil' + DELIM: '👰â€â™‚ï¸', - DELIM + r'(bride|woman)_with_veil' + DELIM: '👰â€â™€ï¸', - DELIM + r'pregnant_woman' + DELIM: '🤰', - DELIM + r'breast_feeding' + DELIM: '🤱', - DELIM + r'woman_feeding_baby' + DELIM: '👩â€ðŸ¼', - DELIM + r'man_feeding_baby' + DELIM: '👨â€ðŸ¼', - DELIM + r'person_feeding_baby' + DELIM: '🧑â€ðŸ¼', - + DELIM + r"health_worker" + DELIM: "🧑â€âš•ï¸", + DELIM + r"man_health_worker" + DELIM: "👨â€âš•ï¸", + DELIM + r"woman_health_worker" + DELIM: "👩â€âš•ï¸", + DELIM + r"student" + DELIM: "🧑â€ðŸŽ“", + DELIM + r"man_student" + DELIM: "👨â€ðŸŽ“", + DELIM + r"woman_student" + DELIM: "👩â€ðŸŽ“", + DELIM + r"teacher" + DELIM: "🧑â€ðŸ«", + DELIM + r"man_teacher" + DELIM: "👨â€ðŸ«", + DELIM + r"woman_teacher" + DELIM: "👩â€ðŸ«", + DELIM + r"judge" + DELIM: "🧑â€âš–ï¸", + DELIM + r"man_judge" + DELIM: "👨â€âš–ï¸", + DELIM + r"woman_judge" + DELIM: "👩â€âš–ï¸", + DELIM + r"farmer" + DELIM: "🧑â€ðŸŒ¾", + DELIM + r"man_farmer" + DELIM: "👨â€ðŸŒ¾", + DELIM + r"woman_farmer" + DELIM: "👩â€ðŸŒ¾", + DELIM + r"cook" + DELIM: "🧑â€ðŸ³", + DELIM + r"man_cook" + DELIM: "👨â€ðŸ³", + DELIM + r"woman_cook" + DELIM: "👩â€ðŸ³", + DELIM + r"mechanic" + DELIM: "🧑â€ðŸ”§", + DELIM + r"man_mechanic" + DELIM: "👨â€ðŸ”§", + DELIM + r"woman_mechanic" + DELIM: "👩â€ðŸ”§", + DELIM + r"factory_worker" + DELIM: "🧑â€ðŸ­", + DELIM + r"man_factory_worker" + DELIM: "👨â€ðŸ­", + DELIM + r"woman_factory_worker" + DELIM: "👩â€ðŸ­", + DELIM + r"office_worker" + DELIM: "🧑â€ðŸ’¼", + DELIM + r"man_office_worker" + DELIM: "👨â€ðŸ’¼", + DELIM + r"woman_office_worker" + DELIM: "👩â€ðŸ’¼", + DELIM + r"scientist" + DELIM: "🧑â€ðŸ”¬", + DELIM + r"man_scientist" + DELIM: "👨â€ðŸ”¬", + DELIM + r"woman_scientist" + DELIM: "👩â€ðŸ”¬", + DELIM + r"technologist" + DELIM: "🧑â€ðŸ’»", + DELIM + r"man_technologist" + DELIM: "👨â€ðŸ’»", + DELIM + r"woman_technologist" + DELIM: "👩â€ðŸ’»", + DELIM + r"singer" + DELIM: "🧑â€ðŸŽ¤", + DELIM + r"man_singer" + DELIM: "👨â€ðŸŽ¤", + DELIM + r"woman_singer" + DELIM: "👩â€ðŸŽ¤", + DELIM + r"artist" + DELIM: "🧑â€ðŸŽ¨", + DELIM + r"man_artist" + DELIM: "👨â€ðŸŽ¨", + DELIM + r"woman_artist" + DELIM: "👩â€ðŸŽ¨", + DELIM + r"pilot" + DELIM: "🧑â€âœˆï¸", + DELIM + r"man_pilot" + DELIM: "👨â€âœˆï¸", + DELIM + r"woman_pilot" + DELIM: "👩â€âœˆï¸", + DELIM + r"astronaut" + DELIM: "🧑â€ðŸš€", + DELIM + r"man_astronaut" + DELIM: "👨â€ðŸš€", + DELIM + r"woman_astronaut" + DELIM: "👩â€ðŸš€", + DELIM + r"firefighter" + DELIM: "🧑â€ðŸš’", + DELIM + r"man_firefighter" + DELIM: "👨â€ðŸš’", + DELIM + r"woman_firefighter" + DELIM: "👩â€ðŸš’", + DELIM + r"cop" + DELIM: "👮", + DELIM + r"police(_officer|man)" + DELIM: "👮â€â™‚ï¸", + DELIM + r"policewoman" + DELIM: "👮â€â™€ï¸", + DELIM + r"detective" + DELIM: "🕵ï¸", + DELIM + r"male_detective" + DELIM: "🕵ï¸â€â™‚ï¸", + DELIM + r"female_detective" + DELIM: "🕵ï¸â€â™€ï¸", + DELIM + r"guard" + DELIM: "💂", + DELIM + r"guardsman" + DELIM: "💂â€â™‚ï¸", + DELIM + r"guardswoman" + DELIM: "💂â€â™€ï¸", + DELIM + r"ninja" + DELIM: "🥷", + DELIM + r"construction_worker" + DELIM: "👷", + DELIM + r"construction_worker_man" + DELIM: "👷â€â™‚ï¸", + DELIM + r"construction_worker_woman" + DELIM: "👷â€â™€ï¸", + DELIM + r"prince" + DELIM: "🤴", + DELIM + r"princess" + DELIM: "👸", + DELIM + r"person_with_turban" + DELIM: "👳", + DELIM + r"man_with_turban" + DELIM: "👳â€â™‚ï¸", + DELIM + r"woman_with_turban" + DELIM: "👳â€â™€ï¸", + DELIM + r"man_with_gua_pi_mao" + DELIM: "👲", + DELIM + r"woman_with_headscarf" + DELIM: "🧕", + DELIM + r"person_in_tuxedo" + DELIM: "🤵", + DELIM + r"man_in_tuxedo" + DELIM: "🤵â€â™‚ï¸", + DELIM + r"woman_in_tuxedo" + DELIM: "🤵â€â™€ï¸", + DELIM + r"person_with_veil" + DELIM: "👰", + DELIM + r"man_with_veil" + DELIM: "👰â€â™‚ï¸", + DELIM + r"(bride|woman)_with_veil" + DELIM: "👰â€â™€ï¸", + DELIM + r"pregnant_woman" + DELIM: "🤰", + DELIM + r"breast_feeding" + DELIM: "🤱", + DELIM + r"woman_feeding_baby" + DELIM: "👩â€ðŸ¼", + DELIM + r"man_feeding_baby" + DELIM: "👨â€ðŸ¼", + DELIM + r"person_feeding_baby" + DELIM: "🧑â€ðŸ¼", # # Person Fantasy # - DELIM + r'angel' + DELIM: '👼', - DELIM + r'santa' + DELIM: '🎅', - DELIM + r'mrs_claus' + DELIM: '🤶', - DELIM + r'mx_claus' + DELIM: '🧑â€ðŸŽ„', - DELIM + r'superhero' + DELIM: '🦸', - DELIM + r'superhero_man' + DELIM: '🦸â€â™‚ï¸', - DELIM + r'superhero_woman' + DELIM: '🦸â€â™€ï¸', - DELIM + r'supervillain' + DELIM: '🦹', - DELIM + r'supervillain_man' + DELIM: '🦹â€â™‚ï¸', - DELIM + r'supervillain_woman' + DELIM: '🦹â€â™€ï¸', - DELIM + r'mage' + DELIM: '🧙', - DELIM + r'mage_man' + DELIM: '🧙â€â™‚ï¸', - DELIM + r'mage_woman' + DELIM: '🧙â€â™€ï¸', - DELIM + r'fairy' + DELIM: '🧚', - DELIM + r'fairy_man' + DELIM: '🧚â€â™‚ï¸', - DELIM + r'fairy_woman' + DELIM: '🧚â€â™€ï¸', - DELIM + r'vampire' + DELIM: '🧛', - DELIM + r'vampire_man' + DELIM: '🧛â€â™‚ï¸', - DELIM + r'vampire_woman' + DELIM: '🧛â€â™€ï¸', - DELIM + r'merperson' + DELIM: '🧜', - DELIM + r'merman' + DELIM: '🧜â€â™‚ï¸', - DELIM + r'mermaid' + DELIM: '🧜â€â™€ï¸', - DELIM + r'elf' + DELIM: 'ðŸ§', - DELIM + r'elf_man' + DELIM: 'ðŸ§â€â™‚ï¸', - DELIM + r'elf_woman' + DELIM: 'ðŸ§â€â™€ï¸', - DELIM + r'genie' + DELIM: '🧞', - DELIM + r'genie_man' + DELIM: '🧞â€â™‚ï¸', - DELIM + r'genie_woman' + DELIM: '🧞â€â™€ï¸', - DELIM + r'zombie' + DELIM: '🧟', - DELIM + r'zombie_man' + DELIM: '🧟â€â™‚ï¸', - DELIM + r'zombie_woman' + DELIM: '🧟â€â™€ï¸', - + DELIM + r"angel" + DELIM: "👼", + DELIM + r"santa" + DELIM: "🎅", + DELIM + r"mrs_claus" + DELIM: "🤶", + DELIM + r"mx_claus" + DELIM: "🧑â€ðŸŽ„", + DELIM + r"superhero" + DELIM: "🦸", + DELIM + r"superhero_man" + DELIM: "🦸â€â™‚ï¸", + DELIM + r"superhero_woman" + DELIM: "🦸â€â™€ï¸", + DELIM + r"supervillain" + DELIM: "🦹", + DELIM + r"supervillain_man" + DELIM: "🦹â€â™‚ï¸", + DELIM + r"supervillain_woman" + DELIM: "🦹â€â™€ï¸", + DELIM + r"mage" + DELIM: "🧙", + DELIM + r"mage_man" + DELIM: "🧙â€â™‚ï¸", + DELIM + r"mage_woman" + DELIM: "🧙â€â™€ï¸", + DELIM + r"fairy" + DELIM: "🧚", + DELIM + r"fairy_man" + DELIM: "🧚â€â™‚ï¸", + DELIM + r"fairy_woman" + DELIM: "🧚â€â™€ï¸", + DELIM + r"vampire" + DELIM: "🧛", + DELIM + r"vampire_man" + DELIM: "🧛â€â™‚ï¸", + DELIM + r"vampire_woman" + DELIM: "🧛â€â™€ï¸", + DELIM + r"merperson" + DELIM: "🧜", + DELIM + r"merman" + DELIM: "🧜â€â™‚ï¸", + DELIM + r"mermaid" + DELIM: "🧜â€â™€ï¸", + DELIM + r"elf" + DELIM: "ðŸ§", + DELIM + r"elf_man" + DELIM: "ðŸ§â€â™‚ï¸", + DELIM + r"elf_woman" + DELIM: "ðŸ§â€â™€ï¸", + DELIM + r"genie" + DELIM: "🧞", + DELIM + r"genie_man" + DELIM: "🧞â€â™‚ï¸", + DELIM + r"genie_woman" + DELIM: "🧞â€â™€ï¸", + DELIM + r"zombie" + DELIM: "🧟", + DELIM + r"zombie_man" + DELIM: "🧟â€â™‚ï¸", + DELIM + r"zombie_woman" + DELIM: "🧟â€â™€ï¸", # # Person Activity # - DELIM + r'massage' + DELIM: '💆', - DELIM + r'massage_man' + DELIM: '💆â€â™‚ï¸', - DELIM + r'massage_woman' + DELIM: '💆â€â™€ï¸', - DELIM + r'haircut' + DELIM: '💇', - DELIM + r'haircut_man' + DELIM: '💇â€â™‚ï¸', - DELIM + r'haircut_woman' + DELIM: '💇â€â™€ï¸', - DELIM + r'walking' + DELIM: '🚶', - DELIM + r'walking_man' + DELIM: '🚶â€â™‚ï¸', - DELIM + r'walking_woman' + DELIM: '🚶â€â™€ï¸', - DELIM + r'standing_person' + DELIM: 'ðŸ§', - DELIM + r'standing_man' + DELIM: 'ðŸ§â€â™‚ï¸', - DELIM + r'standing_woman' + DELIM: 'ðŸ§â€â™€ï¸', - DELIM + r'kneeling_person' + DELIM: '🧎', - DELIM + r'kneeling_man' + DELIM: '🧎â€â™‚ï¸', - DELIM + r'kneeling_woman' + DELIM: '🧎â€â™€ï¸', - DELIM + r'person_with_probing_cane' + DELIM: '🧑â€ðŸ¦¯', - DELIM + r'man_with_probing_cane' + DELIM: '👨â€ðŸ¦¯', - DELIM + r'woman_with_probing_cane' + DELIM: '👩â€ðŸ¦¯', - DELIM + r'person_in_motorized_wheelchair' + DELIM: '🧑â€ðŸ¦¼', - DELIM + r'man_in_motorized_wheelchair' + DELIM: '👨â€ðŸ¦¼', - DELIM + r'woman_in_motorized_wheelchair' + DELIM: '👩â€ðŸ¦¼', - DELIM + r'person_in_manual_wheelchair' + DELIM: '🧑â€ðŸ¦½', - DELIM + r'man_in_manual_wheelchair' + DELIM: '👨â€ðŸ¦½', - DELIM + r'woman_in_manual_wheelchair' + DELIM: '👩â€ðŸ¦½', - DELIM + r'runn(er|ing)' + DELIM: 'ðŸƒ', - DELIM + r'running_man' + DELIM: 'ðŸƒâ€â™‚ï¸', - DELIM + r'running_woman' + DELIM: 'ðŸƒâ€â™€ï¸', - DELIM + r'(dancer|woman_dancing)' + DELIM: '💃', - DELIM + r'man_dancing' + DELIM: '🕺', - DELIM + r'business_suit_levitating' + DELIM: '🕴ï¸', - DELIM + r'dancers' + DELIM: '👯', - DELIM + r'dancing_men' + DELIM: '👯â€â™‚ï¸', - DELIM + r'dancing_women' + DELIM: '👯â€â™€ï¸', - DELIM + r'sauna_person' + DELIM: '🧖', - DELIM + r'sauna_man' + DELIM: '🧖â€â™‚ï¸', - DELIM + r'sauna_woman' + DELIM: '🧖â€â™€ï¸', - DELIM + r'climbing' + DELIM: '🧗', - DELIM + r'climbing_man' + DELIM: '🧗â€â™‚ï¸', - DELIM + r'climbing_woman' + DELIM: '🧗â€â™€ï¸', - + DELIM + r"massage" + DELIM: "💆", + DELIM + r"massage_man" + DELIM: "💆â€â™‚ï¸", + DELIM + r"massage_woman" + DELIM: "💆â€â™€ï¸", + DELIM + r"haircut" + DELIM: "💇", + DELIM + r"haircut_man" + DELIM: "💇â€â™‚ï¸", + DELIM + r"haircut_woman" + DELIM: "💇â€â™€ï¸", + DELIM + r"walking" + DELIM: "🚶", + DELIM + r"walking_man" + DELIM: "🚶â€â™‚ï¸", + DELIM + r"walking_woman" + DELIM: "🚶â€â™€ï¸", + DELIM + r"standing_person" + DELIM: "ðŸ§", + DELIM + r"standing_man" + DELIM: "ðŸ§â€â™‚ï¸", + DELIM + r"standing_woman" + DELIM: "ðŸ§â€â™€ï¸", + DELIM + r"kneeling_person" + DELIM: "🧎", + DELIM + r"kneeling_man" + DELIM: "🧎â€â™‚ï¸", + DELIM + r"kneeling_woman" + DELIM: "🧎â€â™€ï¸", + DELIM + r"person_with_probing_cane" + DELIM: "🧑â€ðŸ¦¯", + DELIM + r"man_with_probing_cane" + DELIM: "👨â€ðŸ¦¯", + DELIM + r"woman_with_probing_cane" + DELIM: "👩â€ðŸ¦¯", + DELIM + r"person_in_motorized_wheelchair" + DELIM: "🧑â€ðŸ¦¼", + DELIM + r"man_in_motorized_wheelchair" + DELIM: "👨â€ðŸ¦¼", + DELIM + r"woman_in_motorized_wheelchair" + DELIM: "👩â€ðŸ¦¼", + DELIM + r"person_in_manual_wheelchair" + DELIM: "🧑â€ðŸ¦½", + DELIM + r"man_in_manual_wheelchair" + DELIM: "👨â€ðŸ¦½", + DELIM + r"woman_in_manual_wheelchair" + DELIM: "👩â€ðŸ¦½", + DELIM + r"runn(er|ing)" + DELIM: "ðŸƒ", + DELIM + r"running_man" + DELIM: "ðŸƒâ€â™‚ï¸", + DELIM + r"running_woman" + DELIM: "ðŸƒâ€â™€ï¸", + DELIM + r"(dancer|woman_dancing)" + DELIM: "💃", + DELIM + r"man_dancing" + DELIM: "🕺", + DELIM + r"business_suit_levitating" + DELIM: "🕴ï¸", + DELIM + r"dancers" + DELIM: "👯", + DELIM + r"dancing_men" + DELIM: "👯â€â™‚ï¸", + DELIM + r"dancing_women" + DELIM: "👯â€â™€ï¸", + DELIM + r"sauna_person" + DELIM: "🧖", + DELIM + r"sauna_man" + DELIM: "🧖â€â™‚ï¸", + DELIM + r"sauna_woman" + DELIM: "🧖â€â™€ï¸", + DELIM + r"climbing" + DELIM: "🧗", + DELIM + r"climbing_man" + DELIM: "🧗â€â™‚ï¸", + DELIM + r"climbing_woman" + DELIM: "🧗â€â™€ï¸", # # Person Sport # - DELIM + r'person_fencing' + DELIM: '🤺', - DELIM + r'horse_racing' + DELIM: 'ðŸ‡', - DELIM + r'skier' + DELIM: 'â›·ï¸', - DELIM + r'snowboarder' + DELIM: 'ðŸ‚', - DELIM + r'golfing' + DELIM: 'ðŸŒï¸', - DELIM + r'golfing_man' + DELIM: 'ðŸŒï¸â€â™‚ï¸', - DELIM + r'golfing_woman' + DELIM: 'ðŸŒï¸â€â™€ï¸', - DELIM + r'surfer' + DELIM: 'ðŸ„', - DELIM + r'surfing_man' + DELIM: 'ðŸ„â€â™‚ï¸', - DELIM + r'surfing_woman' + DELIM: 'ðŸ„â€â™€ï¸', - DELIM + r'rowboat' + DELIM: '🚣', - DELIM + r'rowing_man' + DELIM: '🚣â€â™‚ï¸', - DELIM + r'rowing_woman' + DELIM: '🚣â€â™€ï¸', - DELIM + r'swimmer' + DELIM: 'ðŸŠ', - DELIM + r'swimming_man' + DELIM: 'ðŸŠâ€â™‚ï¸', - DELIM + r'swimming_woman' + DELIM: 'ðŸŠâ€â™€ï¸', - DELIM + r'bouncing_ball_person' + DELIM: '⛹ï¸', - DELIM + r'(basketball|bouncing_ball)_man' + DELIM: '⛹ï¸â€â™‚ï¸', - DELIM + r'(basketball|bouncing_ball)_woman' + DELIM: '⛹ï¸â€â™€ï¸', - DELIM + r'weight_lifting' + DELIM: 'ðŸ‹ï¸', - DELIM + r'weight_lifting_man' + DELIM: 'ðŸ‹ï¸â€â™‚ï¸', - DELIM + r'weight_lifting_woman' + DELIM: 'ðŸ‹ï¸â€â™€ï¸', - DELIM + r'bicyclist' + DELIM: '🚴', - DELIM + r'biking_man' + DELIM: '🚴â€â™‚ï¸', - DELIM + r'biking_woman' + DELIM: '🚴â€â™€ï¸', - DELIM + r'mountain_bicyclist' + DELIM: '🚵', - DELIM + r'mountain_biking_man' + DELIM: '🚵â€â™‚ï¸', - DELIM + r'mountain_biking_woman' + DELIM: '🚵â€â™€ï¸', - DELIM + r'cartwheeling' + DELIM: '🤸', - DELIM + r'man_cartwheeling' + DELIM: '🤸â€â™‚ï¸', - DELIM + r'woman_cartwheeling' + DELIM: '🤸â€â™€ï¸', - DELIM + r'wrestling' + DELIM: '🤼', - DELIM + r'men_wrestling' + DELIM: '🤼â€â™‚ï¸', - DELIM + r'women_wrestling' + DELIM: '🤼â€â™€ï¸', - DELIM + r'water_polo' + DELIM: '🤽', - DELIM + r'man_playing_water_polo' + DELIM: '🤽â€â™‚ï¸', - DELIM + r'woman_playing_water_polo' + DELIM: '🤽â€â™€ï¸', - DELIM + r'handball_person' + DELIM: '🤾', - DELIM + r'man_playing_handball' + DELIM: '🤾â€â™‚ï¸', - DELIM + r'woman_playing_handball' + DELIM: '🤾â€â™€ï¸', - DELIM + r'juggling_person' + DELIM: '🤹', - DELIM + r'man_juggling' + DELIM: '🤹â€â™‚ï¸', - DELIM + r'woman_juggling' + DELIM: '🤹â€â™€ï¸', - + DELIM + r"person_fencing" + DELIM: "🤺", + DELIM + r"horse_racing" + DELIM: "ðŸ‡", + DELIM + r"skier" + DELIM: "â›·ï¸", + DELIM + r"snowboarder" + DELIM: "ðŸ‚", + DELIM + r"golfing" + DELIM: "ðŸŒï¸", + DELIM + r"golfing_man" + DELIM: "ðŸŒï¸â€â™‚ï¸", + DELIM + r"golfing_woman" + DELIM: "ðŸŒï¸â€â™€ï¸", + DELIM + r"surfer" + DELIM: "ðŸ„", + DELIM + r"surfing_man" + DELIM: "ðŸ„â€â™‚ï¸", + DELIM + r"surfing_woman" + DELIM: "ðŸ„â€â™€ï¸", + DELIM + r"rowboat" + DELIM: "🚣", + DELIM + r"rowing_man" + DELIM: "🚣â€â™‚ï¸", + DELIM + r"rowing_woman" + DELIM: "🚣â€â™€ï¸", + DELIM + r"swimmer" + DELIM: "ðŸŠ", + DELIM + r"swimming_man" + DELIM: "ðŸŠâ€â™‚ï¸", + DELIM + r"swimming_woman" + DELIM: "ðŸŠâ€â™€ï¸", + DELIM + r"bouncing_ball_person" + DELIM: "⛹ï¸", + DELIM + r"(basketball|bouncing_ball)_man" + DELIM: "⛹ï¸â€â™‚ï¸", + DELIM + r"(basketball|bouncing_ball)_woman" + DELIM: "⛹ï¸â€â™€ï¸", + DELIM + r"weight_lifting" + DELIM: "ðŸ‹ï¸", + DELIM + r"weight_lifting_man" + DELIM: "ðŸ‹ï¸â€â™‚ï¸", + DELIM + r"weight_lifting_woman" + DELIM: "ðŸ‹ï¸â€â™€ï¸", + DELIM + r"bicyclist" + DELIM: "🚴", + DELIM + r"biking_man" + DELIM: "🚴â€â™‚ï¸", + DELIM + r"biking_woman" + DELIM: "🚴â€â™€ï¸", + DELIM + r"mountain_bicyclist" + DELIM: "🚵", + DELIM + r"mountain_biking_man" + DELIM: "🚵â€â™‚ï¸", + DELIM + r"mountain_biking_woman" + DELIM: "🚵â€â™€ï¸", + DELIM + r"cartwheeling" + DELIM: "🤸", + DELIM + r"man_cartwheeling" + DELIM: "🤸â€â™‚ï¸", + DELIM + r"woman_cartwheeling" + DELIM: "🤸â€â™€ï¸", + DELIM + r"wrestling" + DELIM: "🤼", + DELIM + r"men_wrestling" + DELIM: "🤼â€â™‚ï¸", + DELIM + r"women_wrestling" + DELIM: "🤼â€â™€ï¸", + DELIM + r"water_polo" + DELIM: "🤽", + DELIM + r"man_playing_water_polo" + DELIM: "🤽â€â™‚ï¸", + DELIM + r"woman_playing_water_polo" + DELIM: "🤽â€â™€ï¸", + DELIM + r"handball_person" + DELIM: "🤾", + DELIM + r"man_playing_handball" + DELIM: "🤾â€â™‚ï¸", + DELIM + r"woman_playing_handball" + DELIM: "🤾â€â™€ï¸", + DELIM + r"juggling_person" + DELIM: "🤹", + DELIM + r"man_juggling" + DELIM: "🤹â€â™‚ï¸", + DELIM + r"woman_juggling" + DELIM: "🤹â€â™€ï¸", # # Person Resting # - DELIM + r'lotus_position' + DELIM: '🧘', - DELIM + r'lotus_position_man' + DELIM: '🧘â€â™‚ï¸', - DELIM + r'lotus_position_woman' + DELIM: '🧘â€â™€ï¸', - DELIM + r'bath' + DELIM: '🛀', - DELIM + r'sleeping_bed' + DELIM: '🛌', - + DELIM + r"lotus_position" + DELIM: "🧘", + DELIM + r"lotus_position_man" + DELIM: "🧘â€â™‚ï¸", + DELIM + r"lotus_position_woman" + DELIM: "🧘â€â™€ï¸", + DELIM + r"bath" + DELIM: "🛀", + DELIM + r"sleeping_bed" + DELIM: "🛌", # # Family # - DELIM + r'people_holding_hands' + DELIM: '🧑â€ðŸ¤â€ðŸ§‘', - DELIM + r'two_women_holding_hands' + DELIM: '👭', - DELIM + r'couple' + DELIM: '👫', - DELIM + r'two_men_holding_hands' + DELIM: '👬', - DELIM + r'couplekiss' + DELIM: 'ðŸ’', - DELIM + r'couplekiss_man_woman' + DELIM: '👩â€â¤ï¸â€ðŸ’‹â€ðŸ‘¨', - DELIM + r'couplekiss_man_man' + DELIM: '👨â€â¤ï¸â€ðŸ’‹â€ðŸ‘¨', - DELIM + r'couplekiss_woman_woman' + DELIM: '👩â€â¤ï¸â€ðŸ’‹â€ðŸ‘©', - DELIM + r'couple_with_heart' + DELIM: '💑', - DELIM + r'couple_with_heart_woman_man' + DELIM: '👩â€â¤ï¸â€ðŸ‘¨', - DELIM + r'couple_with_heart_man_man' + DELIM: '👨â€â¤ï¸â€ðŸ‘¨', - DELIM + r'couple_with_heart_woman_woman' + DELIM: '👩â€â¤ï¸â€ðŸ‘©', - DELIM + r'family_man_woman_boy' + DELIM: '👨â€ðŸ‘©â€ðŸ‘¦', - DELIM + r'family_man_woman_girl' + DELIM: '👨â€ðŸ‘©â€ðŸ‘§', - DELIM + r'family_man_woman_girl_boy' + DELIM: '👨â€ðŸ‘©â€ðŸ‘§â€ðŸ‘¦', - DELIM + r'family_man_woman_boy_boy' + DELIM: '👨â€ðŸ‘©â€ðŸ‘¦â€ðŸ‘¦', - DELIM + r'family_man_woman_girl_girl' + DELIM: '👨â€ðŸ‘©â€ðŸ‘§â€ðŸ‘§', - DELIM + r'family_man_man_boy' + DELIM: '👨â€ðŸ‘¨â€ðŸ‘¦', - DELIM + r'family_man_man_girl' + DELIM: '👨â€ðŸ‘¨â€ðŸ‘§', - DELIM + r'family_man_man_girl_boy' + DELIM: '👨â€ðŸ‘¨â€ðŸ‘§â€ðŸ‘¦', - DELIM + r'family_man_man_boy_boy' + DELIM: '👨â€ðŸ‘¨â€ðŸ‘¦â€ðŸ‘¦', - DELIM + r'family_man_man_girl_girl' + DELIM: '👨â€ðŸ‘¨â€ðŸ‘§â€ðŸ‘§', - DELIM + r'family_woman_woman_boy' + DELIM: '👩â€ðŸ‘©â€ðŸ‘¦', - DELIM + r'family_woman_woman_girl' + DELIM: '👩â€ðŸ‘©â€ðŸ‘§', - DELIM + r'family_woman_woman_girl_boy' + DELIM: '👩â€ðŸ‘©â€ðŸ‘§â€ðŸ‘¦', - DELIM + r'family_woman_woman_boy_boy' + DELIM: '👩â€ðŸ‘©â€ðŸ‘¦â€ðŸ‘¦', - DELIM + r'family_woman_woman_girl_girl' + DELIM: '👩â€ðŸ‘©â€ðŸ‘§â€ðŸ‘§', - DELIM + r'family_man_boy' + DELIM: '👨â€ðŸ‘¦', - DELIM + r'family_man_boy_boy' + DELIM: '👨â€ðŸ‘¦â€ðŸ‘¦', - DELIM + r'family_man_girl' + DELIM: '👨â€ðŸ‘§', - DELIM + r'family_man_girl_boy' + DELIM: '👨â€ðŸ‘§â€ðŸ‘¦', - DELIM + r'family_man_girl_girl' + DELIM: '👨â€ðŸ‘§â€ðŸ‘§', - DELIM + r'family_woman_boy' + DELIM: '👩â€ðŸ‘¦', - DELIM + r'family_woman_boy_boy' + DELIM: '👩â€ðŸ‘¦â€ðŸ‘¦', - DELIM + r'family_woman_girl' + DELIM: '👩â€ðŸ‘§', - DELIM + r'family_woman_girl_boy' + DELIM: '👩â€ðŸ‘§â€ðŸ‘¦', - DELIM + r'family_woman_girl_girl' + DELIM: '👩â€ðŸ‘§â€ðŸ‘§', - + DELIM + r"people_holding_hands" + DELIM: "🧑â€ðŸ¤â€ðŸ§‘", + DELIM + r"two_women_holding_hands" + DELIM: "👭", + DELIM + r"couple" + DELIM: "👫", + DELIM + r"two_men_holding_hands" + DELIM: "👬", + DELIM + r"couplekiss" + DELIM: "ðŸ’", + DELIM + r"couplekiss_man_woman" + DELIM: "👩â€â¤ï¸â€ðŸ’‹â€ðŸ‘¨", + DELIM + r"couplekiss_man_man" + DELIM: "👨â€â¤ï¸â€ðŸ’‹â€ðŸ‘¨", + DELIM + r"couplekiss_woman_woman" + DELIM: "👩â€â¤ï¸â€ðŸ’‹â€ðŸ‘©", + DELIM + r"couple_with_heart" + DELIM: "💑", + DELIM + r"couple_with_heart_woman_man" + DELIM: "👩â€â¤ï¸â€ðŸ‘¨", + DELIM + r"couple_with_heart_man_man" + DELIM: "👨â€â¤ï¸â€ðŸ‘¨", + DELIM + r"couple_with_heart_woman_woman" + DELIM: "👩â€â¤ï¸â€ðŸ‘©", + DELIM + r"family_man_woman_boy" + DELIM: "👨â€ðŸ‘©â€ðŸ‘¦", + DELIM + r"family_man_woman_girl" + DELIM: "👨â€ðŸ‘©â€ðŸ‘§", + DELIM + r"family_man_woman_girl_boy" + DELIM: "👨â€ðŸ‘©â€ðŸ‘§â€ðŸ‘¦", + DELIM + r"family_man_woman_boy_boy" + DELIM: "👨â€ðŸ‘©â€ðŸ‘¦â€ðŸ‘¦", + DELIM + r"family_man_woman_girl_girl" + DELIM: "👨â€ðŸ‘©â€ðŸ‘§â€ðŸ‘§", + DELIM + r"family_man_man_boy" + DELIM: "👨â€ðŸ‘¨â€ðŸ‘¦", + DELIM + r"family_man_man_girl" + DELIM: "👨â€ðŸ‘¨â€ðŸ‘§", + DELIM + r"family_man_man_girl_boy" + DELIM: "👨â€ðŸ‘¨â€ðŸ‘§â€ðŸ‘¦", + DELIM + r"family_man_man_boy_boy" + DELIM: "👨â€ðŸ‘¨â€ðŸ‘¦â€ðŸ‘¦", + DELIM + r"family_man_man_girl_girl" + DELIM: "👨â€ðŸ‘¨â€ðŸ‘§â€ðŸ‘§", + DELIM + r"family_woman_woman_boy" + DELIM: "👩â€ðŸ‘©â€ðŸ‘¦", + DELIM + r"family_woman_woman_girl" + DELIM: "👩â€ðŸ‘©â€ðŸ‘§", + DELIM + r"family_woman_woman_girl_boy" + DELIM: "👩â€ðŸ‘©â€ðŸ‘§â€ðŸ‘¦", + DELIM + r"family_woman_woman_boy_boy" + DELIM: "👩â€ðŸ‘©â€ðŸ‘¦â€ðŸ‘¦", + DELIM + r"family_woman_woman_girl_girl" + DELIM: "👩â€ðŸ‘©â€ðŸ‘§â€ðŸ‘§", + DELIM + r"family_man_boy" + DELIM: "👨â€ðŸ‘¦", + DELIM + r"family_man_boy_boy" + DELIM: "👨â€ðŸ‘¦â€ðŸ‘¦", + DELIM + r"family_man_girl" + DELIM: "👨â€ðŸ‘§", + DELIM + r"family_man_girl_boy" + DELIM: "👨â€ðŸ‘§â€ðŸ‘¦", + DELIM + r"family_man_girl_girl" + DELIM: "👨â€ðŸ‘§â€ðŸ‘§", + DELIM + r"family_woman_boy" + DELIM: "👩â€ðŸ‘¦", + DELIM + r"family_woman_boy_boy" + DELIM: "👩â€ðŸ‘¦â€ðŸ‘¦", + DELIM + r"family_woman_girl" + DELIM: "👩â€ðŸ‘§", + DELIM + r"family_woman_girl_boy" + DELIM: "👩â€ðŸ‘§â€ðŸ‘¦", + DELIM + r"family_woman_girl_girl" + DELIM: "👩â€ðŸ‘§â€ðŸ‘§", # # Person Symbol # - DELIM + r'speaking_head' + DELIM: '🗣ï¸', - DELIM + r'bust_in_silhouette' + DELIM: '👤', - DELIM + r'busts_in_silhouette' + DELIM: '👥', - DELIM + r'people_hugging' + DELIM: '🫂', - DELIM + r'family' + DELIM: '👪', - DELIM + r'footprints' + DELIM: '👣', - + DELIM + r"speaking_head" + DELIM: "🗣ï¸", + DELIM + r"bust_in_silhouette" + DELIM: "👤", + DELIM + r"busts_in_silhouette" + DELIM: "👥", + DELIM + r"people_hugging" + DELIM: "🫂", + DELIM + r"family" + DELIM: "👪", + DELIM + r"footprints" + DELIM: "👣", # # Animal Mammal # - DELIM + r'monkey_face' + DELIM: 'ðŸµ', - DELIM + r'monkey' + DELIM: 'ðŸ’', - DELIM + r'gorilla' + DELIM: 'ðŸ¦', - DELIM + r'orangutan' + DELIM: '🦧', - DELIM + r'dog' + DELIM: 'ðŸ¶', - DELIM + r'dog2' + DELIM: 'ðŸ•', - DELIM + r'guide_dog' + DELIM: '🦮', - DELIM + r'service_dog' + DELIM: 'ðŸ•â€ðŸ¦º', - DELIM + r'poodle' + DELIM: 'ðŸ©', - DELIM + r'wolf' + DELIM: 'ðŸº', - DELIM + r'fox_face' + DELIM: '🦊', - DELIM + r'raccoon' + DELIM: 'ðŸ¦', - DELIM + r'cat' + DELIM: 'ðŸ±', - DELIM + r'cat2' + DELIM: 'ðŸˆ', - DELIM + r'black_cat' + DELIM: 'ðŸˆâ€â¬›', - DELIM + r'lion' + DELIM: 'ðŸ¦', - DELIM + r'tiger' + DELIM: 'ðŸ¯', - DELIM + r'tiger2' + DELIM: 'ðŸ…', - DELIM + r'leopard' + DELIM: 'ðŸ†', - DELIM + r'horse' + DELIM: 'ðŸ´', - DELIM + r'racehorse' + DELIM: 'ðŸŽ', - DELIM + r'unicorn' + DELIM: '🦄', - DELIM + r'zebra' + DELIM: '🦓', - DELIM + r'deer' + DELIM: '🦌', - DELIM + r'bison' + DELIM: '🦬', - DELIM + r'cow' + DELIM: 'ðŸ®', - DELIM + r'ox' + DELIM: 'ðŸ‚', - DELIM + r'water_buffalo' + DELIM: 'ðŸƒ', - DELIM + r'cow2' + DELIM: 'ðŸ„', - DELIM + r'pig' + DELIM: 'ðŸ·', - DELIM + r'pig2' + DELIM: 'ðŸ–', - DELIM + r'boar' + DELIM: 'ðŸ—', - DELIM + r'pig_nose' + DELIM: 'ðŸ½', - DELIM + r'ram' + DELIM: 'ðŸ', - DELIM + r'sheep' + DELIM: 'ðŸ‘', - DELIM + r'goat' + DELIM: 'ðŸ', - DELIM + r'dromedary_camel' + DELIM: 'ðŸª', - DELIM + r'camel' + DELIM: 'ðŸ«', - DELIM + r'llama' + DELIM: '🦙', - DELIM + r'giraffe' + DELIM: '🦒', - DELIM + r'elephant' + DELIM: 'ðŸ˜', - DELIM + r'mammoth' + DELIM: '🦣', - DELIM + r'rhinoceros' + DELIM: 'ðŸ¦', - DELIM + r'hippopotamus' + DELIM: '🦛', - DELIM + r'mouse' + DELIM: 'ðŸ­', - DELIM + r'mouse2' + DELIM: 'ðŸ', - DELIM + r'rat' + DELIM: 'ðŸ€', - DELIM + r'hamster' + DELIM: 'ðŸ¹', - DELIM + r'rabbit' + DELIM: 'ðŸ°', - DELIM + r'rabbit2' + DELIM: 'ðŸ‡', - DELIM + r'chipmunk' + DELIM: 'ðŸ¿ï¸', - DELIM + r'beaver' + DELIM: '🦫', - DELIM + r'hedgehog' + DELIM: '🦔', - DELIM + r'bat' + DELIM: '🦇', - DELIM + r'bear' + DELIM: 'ðŸ»', - DELIM + r'polar_bear' + DELIM: 'ðŸ»â€â„ï¸', - DELIM + r'koala' + DELIM: 'ðŸ¨', - DELIM + r'panda_face' + DELIM: 'ðŸ¼', - DELIM + r'sloth' + DELIM: '🦥', - DELIM + r'otter' + DELIM: '🦦', - DELIM + r'skunk' + DELIM: '🦨', - DELIM + r'kangaroo' + DELIM: '🦘', - DELIM + r'badger' + DELIM: '🦡', - DELIM + r'(feet|paw_prints)' + DELIM: 'ðŸ¾', - + DELIM + r"monkey_face" + DELIM: "ðŸµ", + DELIM + r"monkey" + DELIM: "ðŸ’", + DELIM + r"gorilla" + DELIM: "ðŸ¦", + DELIM + r"orangutan" + DELIM: "🦧", + DELIM + r"dog" + DELIM: "ðŸ¶", + DELIM + r"dog2" + DELIM: "ðŸ•", + DELIM + r"guide_dog" + DELIM: "🦮", + DELIM + r"service_dog" + DELIM: "ðŸ•â€ðŸ¦º", + DELIM + r"poodle" + DELIM: "ðŸ©", + DELIM + r"wolf" + DELIM: "ðŸº", + DELIM + r"fox_face" + DELIM: "🦊", + DELIM + r"raccoon" + DELIM: "ðŸ¦", + DELIM + r"cat" + DELIM: "ðŸ±", + DELIM + r"cat2" + DELIM: "ðŸˆ", + DELIM + r"black_cat" + DELIM: "ðŸˆâ€â¬›", + DELIM + r"lion" + DELIM: "ðŸ¦", + DELIM + r"tiger" + DELIM: "ðŸ¯", + DELIM + r"tiger2" + DELIM: "ðŸ…", + DELIM + r"leopard" + DELIM: "ðŸ†", + DELIM + r"horse" + DELIM: "ðŸ´", + DELIM + r"racehorse" + DELIM: "ðŸŽ", + DELIM + r"unicorn" + DELIM: "🦄", + DELIM + r"zebra" + DELIM: "🦓", + DELIM + r"deer" + DELIM: "🦌", + DELIM + r"bison" + DELIM: "🦬", + DELIM + r"cow" + DELIM: "ðŸ®", + DELIM + r"ox" + DELIM: "ðŸ‚", + DELIM + r"water_buffalo" + DELIM: "ðŸƒ", + DELIM + r"cow2" + DELIM: "ðŸ„", + DELIM + r"pig" + DELIM: "ðŸ·", + DELIM + r"pig2" + DELIM: "ðŸ–", + DELIM + r"boar" + DELIM: "ðŸ—", + DELIM + r"pig_nose" + DELIM: "ðŸ½", + DELIM + r"ram" + DELIM: "ðŸ", + DELIM + r"sheep" + DELIM: "ðŸ‘", + DELIM + r"goat" + DELIM: "ðŸ", + DELIM + r"dromedary_camel" + DELIM: "ðŸª", + DELIM + r"camel" + DELIM: "ðŸ«", + DELIM + r"llama" + DELIM: "🦙", + DELIM + r"giraffe" + DELIM: "🦒", + DELIM + r"elephant" + DELIM: "ðŸ˜", + DELIM + r"mammoth" + DELIM: "🦣", + DELIM + r"rhinoceros" + DELIM: "ðŸ¦", + DELIM + r"hippopotamus" + DELIM: "🦛", + DELIM + r"mouse" + DELIM: "ðŸ­", + DELIM + r"mouse2" + DELIM: "ðŸ", + DELIM + r"rat" + DELIM: "ðŸ€", + DELIM + r"hamster" + DELIM: "ðŸ¹", + DELIM + r"rabbit" + DELIM: "ðŸ°", + DELIM + r"rabbit2" + DELIM: "ðŸ‡", + DELIM + r"chipmunk" + DELIM: "ðŸ¿ï¸", + DELIM + r"beaver" + DELIM: "🦫", + DELIM + r"hedgehog" + DELIM: "🦔", + DELIM + r"bat" + DELIM: "🦇", + DELIM + r"bear" + DELIM: "ðŸ»", + DELIM + r"polar_bear" + DELIM: "ðŸ»â€â„ï¸", + DELIM + r"koala" + DELIM: "ðŸ¨", + DELIM + r"panda_face" + DELIM: "ðŸ¼", + DELIM + r"sloth" + DELIM: "🦥", + DELIM + r"otter" + DELIM: "🦦", + DELIM + r"skunk" + DELIM: "🦨", + DELIM + r"kangaroo" + DELIM: "🦘", + DELIM + r"badger" + DELIM: "🦡", + DELIM + r"(feet|paw_prints)" + DELIM: "ðŸ¾", # # Animal Bird # - DELIM + r'turkey' + DELIM: '🦃', - DELIM + r'chicken' + DELIM: 'ðŸ”', - DELIM + r'rooster' + DELIM: 'ðŸ“', - DELIM + r'hatching_chick' + DELIM: 'ðŸ£', - DELIM + r'baby_chick' + DELIM: 'ðŸ¤', - DELIM + r'hatched_chick' + DELIM: 'ðŸ¥', - DELIM + r'bird' + DELIM: 'ðŸ¦', - DELIM + r'penguin' + DELIM: 'ðŸ§', - DELIM + r'dove' + DELIM: '🕊ï¸', - DELIM + r'eagle' + DELIM: '🦅', - DELIM + r'duck' + DELIM: '🦆', - DELIM + r'swan' + DELIM: '🦢', - DELIM + r'owl' + DELIM: '🦉', - DELIM + r'dodo' + DELIM: '🦤', - DELIM + r'feather' + DELIM: '🪶', - DELIM + r'flamingo' + DELIM: '🦩', - DELIM + r'peacock' + DELIM: '🦚', - DELIM + r'parrot' + DELIM: '🦜', - + DELIM + r"turkey" + DELIM: "🦃", + DELIM + r"chicken" + DELIM: "ðŸ”", + DELIM + r"rooster" + DELIM: "ðŸ“", + DELIM + r"hatching_chick" + DELIM: "ðŸ£", + DELIM + r"baby_chick" + DELIM: "ðŸ¤", + DELIM + r"hatched_chick" + DELIM: "ðŸ¥", + DELIM + r"bird" + DELIM: "ðŸ¦", + DELIM + r"penguin" + DELIM: "ðŸ§", + DELIM + r"dove" + DELIM: "🕊ï¸", + DELIM + r"eagle" + DELIM: "🦅", + DELIM + r"duck" + DELIM: "🦆", + DELIM + r"swan" + DELIM: "🦢", + DELIM + r"owl" + DELIM: "🦉", + DELIM + r"dodo" + DELIM: "🦤", + DELIM + r"feather" + DELIM: "🪶", + DELIM + r"flamingo" + DELIM: "🦩", + DELIM + r"peacock" + DELIM: "🦚", + DELIM + r"parrot" + DELIM: "🦜", # # Animal Amphibian # - DELIM + r'frog' + DELIM: 'ðŸ¸', - + DELIM + r"frog" + DELIM: "ðŸ¸", # # Animal Reptile # - DELIM + r'crocodile' + DELIM: 'ðŸŠ', - DELIM + r'turtle' + DELIM: 'ðŸ¢', - DELIM + r'lizard' + DELIM: '🦎', - DELIM + r'snake' + DELIM: 'ðŸ', - DELIM + r'dragon_face' + DELIM: 'ðŸ²', - DELIM + r'dragon' + DELIM: 'ðŸ‰', - DELIM + r'sauropod' + DELIM: '🦕', - DELIM + r't-rex' + DELIM: '🦖', - + DELIM + r"crocodile" + DELIM: "ðŸŠ", + DELIM + r"turtle" + DELIM: "ðŸ¢", + DELIM + r"lizard" + DELIM: "🦎", + DELIM + r"snake" + DELIM: "ðŸ", + DELIM + r"dragon_face" + DELIM: "ðŸ²", + DELIM + r"dragon" + DELIM: "ðŸ‰", + DELIM + r"sauropod" + DELIM: "🦕", + DELIM + r"t-rex" + DELIM: "🦖", # # Animal Marine # - DELIM + r'whale' + DELIM: 'ðŸ³', - DELIM + r'whale2' + DELIM: 'ðŸ‹', - DELIM + r'dolphin' + DELIM: 'ðŸ¬', - DELIM + r'(seal|flipper)' + DELIM: '🦭', - DELIM + r'fish' + DELIM: 'ðŸŸ', - DELIM + r'tropical_fish' + DELIM: 'ðŸ ', - DELIM + r'blowfish' + DELIM: 'ðŸ¡', - DELIM + r'shark' + DELIM: '🦈', - DELIM + r'octopus' + DELIM: 'ðŸ™', - DELIM + r'shell' + DELIM: 'ðŸš', - + DELIM + r"whale" + DELIM: "ðŸ³", + DELIM + r"whale2" + DELIM: "ðŸ‹", + DELIM + r"dolphin" + DELIM: "ðŸ¬", + DELIM + r"(seal|flipper)" + DELIM: "🦭", + DELIM + r"fish" + DELIM: "ðŸŸ", + DELIM + r"tropical_fish" + DELIM: "ðŸ ", + DELIM + r"blowfish" + DELIM: "ðŸ¡", + DELIM + r"shark" + DELIM: "🦈", + DELIM + r"octopus" + DELIM: "ðŸ™", + DELIM + r"shell" + DELIM: "ðŸš", # # Animal Bug # - DELIM + r'snail' + DELIM: 'ðŸŒ', - DELIM + r'butterfly' + DELIM: '🦋', - DELIM + r'bug' + DELIM: 'ðŸ›', - DELIM + r'ant' + DELIM: 'ðŸœ', - DELIM + r'bee' + DELIM: 'ðŸ', - DELIM + r'honeybee' + DELIM: '🪲', - DELIM + r'(lady_)?beetle' + DELIM: 'ðŸž', - DELIM + r'cricket' + DELIM: '🦗', - DELIM + r'cockroach' + DELIM: '🪳', - DELIM + r'spider' + DELIM: '🕷ï¸', - DELIM + r'spider_web' + DELIM: '🕸ï¸', - DELIM + r'scorpion' + DELIM: '🦂', - DELIM + r'mosquito' + DELIM: '🦟', - DELIM + r'fly' + DELIM: '🪰', - DELIM + r'worm' + DELIM: '🪱', - DELIM + r'microbe' + DELIM: '🦠', - + DELIM + r"snail" + DELIM: "ðŸŒ", + DELIM + r"butterfly" + DELIM: "🦋", + DELIM + r"bug" + DELIM: "ðŸ›", + DELIM + r"ant" + DELIM: "ðŸœ", + DELIM + r"bee" + DELIM: "ðŸ", + DELIM + r"honeybee" + DELIM: "🪲", + DELIM + r"(lady_)?beetle" + DELIM: "ðŸž", + DELIM + r"cricket" + DELIM: "🦗", + DELIM + r"cockroach" + DELIM: "🪳", + DELIM + r"spider" + DELIM: "🕷ï¸", + DELIM + r"spider_web" + DELIM: "🕸ï¸", + DELIM + r"scorpion" + DELIM: "🦂", + DELIM + r"mosquito" + DELIM: "🦟", + DELIM + r"fly" + DELIM: "🪰", + DELIM + r"worm" + DELIM: "🪱", + DELIM + r"microbe" + DELIM: "🦠", # # Plant Flower # - DELIM + r'bouquet' + DELIM: 'ðŸ’', - DELIM + r'cherry_blossom' + DELIM: '🌸', - DELIM + r'white_flower' + DELIM: '💮', - DELIM + r'rosette' + DELIM: 'ðŸµï¸', - DELIM + r'rose' + DELIM: '🌹', - DELIM + r'wilted_flower' + DELIM: '🥀', - DELIM + r'hibiscus' + DELIM: '🌺', - DELIM + r'sunflower' + DELIM: '🌻', - DELIM + r'blossom' + DELIM: '🌼', - DELIM + r'tulip' + DELIM: '🌷', - + DELIM + r"bouquet" + DELIM: "ðŸ’", + DELIM + r"cherry_blossom" + DELIM: "🌸", + DELIM + r"white_flower" + DELIM: "💮", + DELIM + r"rosette" + DELIM: "ðŸµï¸", + DELIM + r"rose" + DELIM: "🌹", + DELIM + r"wilted_flower" + DELIM: "🥀", + DELIM + r"hibiscus" + DELIM: "🌺", + DELIM + r"sunflower" + DELIM: "🌻", + DELIM + r"blossom" + DELIM: "🌼", + DELIM + r"tulip" + DELIM: "🌷", # # Plant Other # - DELIM + r'seedling' + DELIM: '🌱', - DELIM + r'potted_plant' + DELIM: '🪴', - DELIM + r'evergreen_tree' + DELIM: '🌲', - DELIM + r'deciduous_tree' + DELIM: '🌳', - DELIM + r'palm_tree' + DELIM: '🌴', - DELIM + r'cactus' + DELIM: '🌵', - DELIM + r'ear_of_rice' + DELIM: '🌾', - DELIM + r'herb' + DELIM: '🌿', - DELIM + r'shamrock' + DELIM: '☘ï¸', - DELIM + r'four_leaf_clover' + DELIM: 'ðŸ€', - DELIM + r'maple_leaf' + DELIM: 'ðŸ', - DELIM + r'fallen_leaf' + DELIM: 'ðŸ‚', - DELIM + r'leaves' + DELIM: 'ðŸƒ', - DELIM + r'mushroom' + DELIM: 'ðŸ„', - + DELIM + r"seedling" + DELIM: "🌱", + DELIM + r"potted_plant" + DELIM: "🪴", + DELIM + r"evergreen_tree" + DELIM: "🌲", + DELIM + r"deciduous_tree" + DELIM: "🌳", + DELIM + r"palm_tree" + DELIM: "🌴", + DELIM + r"cactus" + DELIM: "🌵", + DELIM + r"ear_of_rice" + DELIM: "🌾", + DELIM + r"herb" + DELIM: "🌿", + DELIM + r"shamrock" + DELIM: "☘ï¸", + DELIM + r"four_leaf_clover" + DELIM: "ðŸ€", + DELIM + r"maple_leaf" + DELIM: "ðŸ", + DELIM + r"fallen_leaf" + DELIM: "ðŸ‚", + DELIM + r"leaves" + DELIM: "ðŸƒ", + DELIM + r"mushroom" + DELIM: "ðŸ„", # # Food Fruit # - DELIM + r'grapes' + DELIM: 'ðŸ‡', - DELIM + r'melon' + DELIM: 'ðŸˆ', - DELIM + r'watermelon' + DELIM: 'ðŸ‰', - DELIM + r'(orange|mandarin|tangerine)' + DELIM: 'ðŸŠ', - DELIM + r'lemon' + DELIM: 'ðŸ‹', - DELIM + r'banana' + DELIM: 'ðŸŒ', - DELIM + r'pineapple' + DELIM: 'ðŸ', - DELIM + r'mango' + DELIM: '🥭', - DELIM + r'apple' + DELIM: 'ðŸŽ', - DELIM + r'green_apple' + DELIM: 'ðŸ', - DELIM + r'pear' + DELIM: 'ðŸ', - DELIM + r'peach' + DELIM: 'ðŸ‘', - DELIM + r'cherries' + DELIM: 'ðŸ’', - DELIM + r'strawberry' + DELIM: 'ðŸ“', - DELIM + r'blueberries' + DELIM: 'ðŸ«', - DELIM + r'kiwi_fruit' + DELIM: 'ðŸ¥', - DELIM + r'tomato' + DELIM: 'ðŸ…', - DELIM + r'olive' + DELIM: '🫒', - DELIM + r'coconut' + DELIM: '🥥', - + DELIM + r"grapes" + DELIM: "ðŸ‡", + DELIM + r"melon" + DELIM: "ðŸˆ", + DELIM + r"watermelon" + DELIM: "ðŸ‰", + DELIM + r"(orange|mandarin|tangerine)" + DELIM: "ðŸŠ", + DELIM + r"lemon" + DELIM: "ðŸ‹", + DELIM + r"banana" + DELIM: "ðŸŒ", + DELIM + r"pineapple" + DELIM: "ðŸ", + DELIM + r"mango" + DELIM: "🥭", + DELIM + r"apple" + DELIM: "ðŸŽ", + DELIM + r"green_apple" + DELIM: "ðŸ", + DELIM + r"pear" + DELIM: "ðŸ", + DELIM + r"peach" + DELIM: "ðŸ‘", + DELIM + r"cherries" + DELIM: "ðŸ’", + DELIM + r"strawberry" + DELIM: "ðŸ“", + DELIM + r"blueberries" + DELIM: "ðŸ«", + DELIM + r"kiwi_fruit" + DELIM: "ðŸ¥", + DELIM + r"tomato" + DELIM: "ðŸ…", + DELIM + r"olive" + DELIM: "🫒", + DELIM + r"coconut" + DELIM: "🥥", # # Food Vegetable # - DELIM + r'avocado' + DELIM: '🥑', - DELIM + r'eggplant' + DELIM: 'ðŸ†', - DELIM + r'potato' + DELIM: '🥔', - DELIM + r'carrot' + DELIM: '🥕', - DELIM + r'corn' + DELIM: '🌽', - DELIM + r'hot_pepper' + DELIM: '🌶ï¸', - DELIM + r'bell_pepper' + DELIM: '🫑', - DELIM + r'cucumber' + DELIM: '🥒', - DELIM + r'leafy_green' + DELIM: '🥬', - DELIM + r'broccoli' + DELIM: '🥦', - DELIM + r'garlic' + DELIM: '🧄', - DELIM + r'onion' + DELIM: '🧅', - DELIM + r'peanuts' + DELIM: '🥜', - DELIM + r'chestnut' + DELIM: '🌰', - + DELIM + r"avocado" + DELIM: "🥑", + DELIM + r"eggplant" + DELIM: "ðŸ†", + DELIM + r"potato" + DELIM: "🥔", + DELIM + r"carrot" + DELIM: "🥕", + DELIM + r"corn" + DELIM: "🌽", + DELIM + r"hot_pepper" + DELIM: "🌶ï¸", + DELIM + r"bell_pepper" + DELIM: "🫑", + DELIM + r"cucumber" + DELIM: "🥒", + DELIM + r"leafy_green" + DELIM: "🥬", + DELIM + r"broccoli" + DELIM: "🥦", + DELIM + r"garlic" + DELIM: "🧄", + DELIM + r"onion" + DELIM: "🧅", + DELIM + r"peanuts" + DELIM: "🥜", + DELIM + r"chestnut" + DELIM: "🌰", # # Food Prepared # - DELIM + r'bread' + DELIM: 'ðŸž', - DELIM + r'croissant' + DELIM: 'ðŸ¥', - DELIM + r'baguette_bread' + DELIM: '🥖', - DELIM + r'flatbread' + DELIM: '🫓', - DELIM + r'pretzel' + DELIM: '🥨', - DELIM + r'bagel' + DELIM: '🥯', - DELIM + r'pancakes' + DELIM: '🥞', - DELIM + r'waffle' + DELIM: '🧇', - DELIM + r'cheese' + DELIM: '🧀', - DELIM + r'meat_on_bone' + DELIM: 'ðŸ–', - DELIM + r'poultry_leg' + DELIM: 'ðŸ—', - DELIM + r'cut_of_meat' + DELIM: '🥩', - DELIM + r'bacon' + DELIM: '🥓', - DELIM + r'hamburger' + DELIM: 'ðŸ”', - DELIM + r'fries' + DELIM: 'ðŸŸ', - DELIM + r'pizza' + DELIM: 'ðŸ•', - DELIM + r'hotdog' + DELIM: '🌭', - DELIM + r'sandwich' + DELIM: '🥪', - DELIM + r'taco' + DELIM: '🌮', - DELIM + r'burrito' + DELIM: '🌯', - DELIM + r'tamale' + DELIM: '🫔', - DELIM + r'stuffed_flatbread' + DELIM: '🥙', - DELIM + r'falafel' + DELIM: '🧆', - DELIM + r'egg' + DELIM: '🥚', - DELIM + r'fried_egg' + DELIM: 'ðŸ³', - DELIM + r'shallow_pan_of_food' + DELIM: '🥘', - DELIM + r'stew' + DELIM: 'ðŸ²', - DELIM + r'fondue' + DELIM: '🫕', - DELIM + r'bowl_with_spoon' + DELIM: '🥣', - DELIM + r'green_salad' + DELIM: '🥗', - DELIM + r'popcorn' + DELIM: 'ðŸ¿', - DELIM + r'butter' + DELIM: '🧈', - DELIM + r'salt' + DELIM: '🧂', - DELIM + r'canned_food' + DELIM: '🥫', - + DELIM + r"bread" + DELIM: "ðŸž", + DELIM + r"croissant" + DELIM: "ðŸ¥", + DELIM + r"baguette_bread" + DELIM: "🥖", + DELIM + r"flatbread" + DELIM: "🫓", + DELIM + r"pretzel" + DELIM: "🥨", + DELIM + r"bagel" + DELIM: "🥯", + DELIM + r"pancakes" + DELIM: "🥞", + DELIM + r"waffle" + DELIM: "🧇", + DELIM + r"cheese" + DELIM: "🧀", + DELIM + r"meat_on_bone" + DELIM: "ðŸ–", + DELIM + r"poultry_leg" + DELIM: "ðŸ—", + DELIM + r"cut_of_meat" + DELIM: "🥩", + DELIM + r"bacon" + DELIM: "🥓", + DELIM + r"hamburger" + DELIM: "ðŸ”", + DELIM + r"fries" + DELIM: "ðŸŸ", + DELIM + r"pizza" + DELIM: "ðŸ•", + DELIM + r"hotdog" + DELIM: "🌭", + DELIM + r"sandwich" + DELIM: "🥪", + DELIM + r"taco" + DELIM: "🌮", + DELIM + r"burrito" + DELIM: "🌯", + DELIM + r"tamale" + DELIM: "🫔", + DELIM + r"stuffed_flatbread" + DELIM: "🥙", + DELIM + r"falafel" + DELIM: "🧆", + DELIM + r"egg" + DELIM: "🥚", + DELIM + r"fried_egg" + DELIM: "ðŸ³", + DELIM + r"shallow_pan_of_food" + DELIM: "🥘", + DELIM + r"stew" + DELIM: "ðŸ²", + DELIM + r"fondue" + DELIM: "🫕", + DELIM + r"bowl_with_spoon" + DELIM: "🥣", + DELIM + r"green_salad" + DELIM: "🥗", + DELIM + r"popcorn" + DELIM: "ðŸ¿", + DELIM + r"butter" + DELIM: "🧈", + DELIM + r"salt" + DELIM: "🧂", + DELIM + r"canned_food" + DELIM: "🥫", # # Food Asian # - DELIM + r'bento' + DELIM: 'ðŸ±', - DELIM + r'rice_cracker' + DELIM: 'ðŸ˜', - DELIM + r'rice_ball' + DELIM: 'ðŸ™', - DELIM + r'rice' + DELIM: 'ðŸš', - DELIM + r'curry' + DELIM: 'ðŸ›', - DELIM + r'ramen' + DELIM: 'ðŸœ', - DELIM + r'spaghetti' + DELIM: 'ðŸ', - DELIM + r'sweet_potato' + DELIM: 'ðŸ ', - DELIM + r'oden' + DELIM: 'ðŸ¢', - DELIM + r'sushi' + DELIM: 'ðŸ£', - DELIM + r'fried_shrimp' + DELIM: 'ðŸ¤', - DELIM + r'fish_cake' + DELIM: 'ðŸ¥', - DELIM + r'moon_cake' + DELIM: '🥮', - DELIM + r'dango' + DELIM: 'ðŸ¡', - DELIM + r'dumpling' + DELIM: '🥟', - DELIM + r'fortune_cookie' + DELIM: '🥠', - DELIM + r'takeout_box' + DELIM: '🥡', - + DELIM + r"bento" + DELIM: "ðŸ±", + DELIM + r"rice_cracker" + DELIM: "ðŸ˜", + DELIM + r"rice_ball" + DELIM: "ðŸ™", + DELIM + r"rice" + DELIM: "ðŸš", + DELIM + r"curry" + DELIM: "ðŸ›", + DELIM + r"ramen" + DELIM: "ðŸœ", + DELIM + r"spaghetti" + DELIM: "ðŸ", + DELIM + r"sweet_potato" + DELIM: "ðŸ ", + DELIM + r"oden" + DELIM: "ðŸ¢", + DELIM + r"sushi" + DELIM: "ðŸ£", + DELIM + r"fried_shrimp" + DELIM: "ðŸ¤", + DELIM + r"fish_cake" + DELIM: "ðŸ¥", + DELIM + r"moon_cake" + DELIM: "🥮", + DELIM + r"dango" + DELIM: "ðŸ¡", + DELIM + r"dumpling" + DELIM: "🥟", + DELIM + r"fortune_cookie" + DELIM: "🥠", + DELIM + r"takeout_box" + DELIM: "🥡", # # Food Marine # - DELIM + r'crab' + DELIM: '🦀', - DELIM + r'lobster' + DELIM: '🦞', - DELIM + r'shrimp' + DELIM: 'ðŸ¦', - DELIM + r'squid' + DELIM: '🦑', - DELIM + r'oyster' + DELIM: '🦪', - + DELIM + r"crab" + DELIM: "🦀", + DELIM + r"lobster" + DELIM: "🦞", + DELIM + r"shrimp" + DELIM: "ðŸ¦", + DELIM + r"squid" + DELIM: "🦑", + DELIM + r"oyster" + DELIM: "🦪", # # Food Sweet # - DELIM + r'icecream' + DELIM: 'ðŸ¦', - DELIM + r'shaved_ice' + DELIM: 'ðŸ§', - DELIM + r'ice_cream' + DELIM: 'ðŸ¨', - DELIM + r'doughnut' + DELIM: 'ðŸ©', - DELIM + r'cookie' + DELIM: 'ðŸª', - DELIM + r'birthday' + DELIM: '🎂', - DELIM + r'cake' + DELIM: 'ðŸ°', - DELIM + r'cupcake' + DELIM: 'ðŸ§', - DELIM + r'pie' + DELIM: '🥧', - DELIM + r'chocolate_bar' + DELIM: 'ðŸ«', - DELIM + r'candy' + DELIM: 'ðŸ¬', - DELIM + r'lollipop' + DELIM: 'ðŸ­', - DELIM + r'custard' + DELIM: 'ðŸ®', - DELIM + r'honey_pot' + DELIM: 'ðŸ¯', - + DELIM + r"icecream" + DELIM: "ðŸ¦", + DELIM + r"shaved_ice" + DELIM: "ðŸ§", + DELIM + r"ice_cream" + DELIM: "ðŸ¨", + DELIM + r"doughnut" + DELIM: "ðŸ©", + DELIM + r"cookie" + DELIM: "ðŸª", + DELIM + r"birthday" + DELIM: "🎂", + DELIM + r"cake" + DELIM: "ðŸ°", + DELIM + r"cupcake" + DELIM: "ðŸ§", + DELIM + r"pie" + DELIM: "🥧", + DELIM + r"chocolate_bar" + DELIM: "ðŸ«", + DELIM + r"candy" + DELIM: "ðŸ¬", + DELIM + r"lollipop" + DELIM: "ðŸ­", + DELIM + r"custard" + DELIM: "ðŸ®", + DELIM + r"honey_pot" + DELIM: "ðŸ¯", # # Drink # - DELIM + r'baby_bottle' + DELIM: 'ðŸ¼', - DELIM + r'milk_glass' + DELIM: '🥛', - DELIM + r'coffee' + DELIM: '☕', - DELIM + r'teapot' + DELIM: '🫖', - DELIM + r'tea' + DELIM: 'ðŸµ', - DELIM + r'sake' + DELIM: 'ðŸ¶', - DELIM + r'champagne' + DELIM: 'ðŸ¾', - DELIM + r'wine_glass' + DELIM: 'ðŸ·', - DELIM + r'cocktail' + DELIM: 'ðŸ¸', - DELIM + r'tropical_drink' + DELIM: 'ðŸ¹', - DELIM + r'beer' + DELIM: 'ðŸº', - DELIM + r'beers' + DELIM: 'ðŸ»', - DELIM + r'clinking_glasses' + DELIM: '🥂', - DELIM + r'tumbler_glass' + DELIM: '🥃', - DELIM + r'cup_with_straw' + DELIM: '🥤', - DELIM + r'bubble_tea' + DELIM: '🧋', - DELIM + r'beverage_box' + DELIM: '🧃', - DELIM + r'mate' + DELIM: '🧉', - DELIM + r'ice_cube' + DELIM: '🧊', - + DELIM + r"baby_bottle" + DELIM: "ðŸ¼", + DELIM + r"milk_glass" + DELIM: "🥛", + DELIM + r"coffee" + DELIM: "☕", + DELIM + r"teapot" + DELIM: "🫖", + DELIM + r"tea" + DELIM: "ðŸµ", + DELIM + r"sake" + DELIM: "ðŸ¶", + DELIM + r"champagne" + DELIM: "ðŸ¾", + DELIM + r"wine_glass" + DELIM: "ðŸ·", + DELIM + r"cocktail" + DELIM: "ðŸ¸", + DELIM + r"tropical_drink" + DELIM: "ðŸ¹", + DELIM + r"beer" + DELIM: "ðŸº", + DELIM + r"beers" + DELIM: "ðŸ»", + DELIM + r"clinking_glasses" + DELIM: "🥂", + DELIM + r"tumbler_glass" + DELIM: "🥃", + DELIM + r"cup_with_straw" + DELIM: "🥤", + DELIM + r"bubble_tea" + DELIM: "🧋", + DELIM + r"beverage_box" + DELIM: "🧃", + DELIM + r"mate" + DELIM: "🧉", + DELIM + r"ice_cube" + DELIM: "🧊", # # Dishware # - DELIM + r'chopsticks' + DELIM: '🥢', - DELIM + r'plate_with_cutlery' + DELIM: 'ðŸ½ï¸', - DELIM + r'fork_and_knife' + DELIM: 'ðŸ´', - DELIM + r'spoon' + DELIM: '🥄', - DELIM + r'(hocho|knife)' + DELIM: '🔪', - DELIM + r'amphora' + DELIM: 'ðŸº', - + DELIM + r"chopsticks" + DELIM: "🥢", + DELIM + r"plate_with_cutlery" + DELIM: "ðŸ½ï¸", + DELIM + r"fork_and_knife" + DELIM: "ðŸ´", + DELIM + r"spoon" + DELIM: "🥄", + DELIM + r"(hocho|knife)" + DELIM: "🔪", + DELIM + r"amphora" + DELIM: "ðŸº", # # Place Map # - DELIM + r'earth_africa' + DELIM: 'ðŸŒ', - DELIM + r'earth_americas' + DELIM: '🌎', - DELIM + r'earth_asia' + DELIM: 'ðŸŒ', - DELIM + r'globe_with_meridians' + DELIM: 'ðŸŒ', - DELIM + r'world_map' + DELIM: '🗺ï¸', - DELIM + r'japan' + DELIM: '🗾', - DELIM + r'compass' + DELIM: '🧭', - + DELIM + r"earth_africa" + DELIM: "ðŸŒ", + DELIM + r"earth_americas" + DELIM: "🌎", + DELIM + r"earth_asia" + DELIM: "ðŸŒ", + DELIM + r"globe_with_meridians" + DELIM: "ðŸŒ", + DELIM + r"world_map" + DELIM: "🗺ï¸", + DELIM + r"japan" + DELIM: "🗾", + DELIM + r"compass" + DELIM: "🧭", # # Place Geographic # - DELIM + r'mountain_snow' + DELIM: 'ðŸ”ï¸', - DELIM + r'mountain' + DELIM: 'â›°ï¸', - DELIM + r'volcano' + DELIM: '🌋', - DELIM + r'mount_fuji' + DELIM: '🗻', - DELIM + r'camping' + DELIM: 'ðŸ•ï¸', - DELIM + r'beach_umbrella' + DELIM: 'ðŸ–ï¸', - DELIM + r'desert' + DELIM: 'ðŸœï¸', - DELIM + r'desert_island' + DELIM: 'ðŸï¸', - DELIM + r'national_park' + DELIM: 'ðŸžï¸', - + DELIM + r"mountain_snow" + DELIM: "ðŸ”ï¸", + DELIM + r"mountain" + DELIM: "â›°ï¸", + DELIM + r"volcano" + DELIM: "🌋", + DELIM + r"mount_fuji" + DELIM: "🗻", + DELIM + r"camping" + DELIM: "ðŸ•ï¸", + DELIM + r"beach_umbrella" + DELIM: "ðŸ–ï¸", + DELIM + r"desert" + DELIM: "ðŸœï¸", + DELIM + r"desert_island" + DELIM: "ðŸï¸", + DELIM + r"national_park" + DELIM: "ðŸžï¸", # # Place Building # - DELIM + r'stadium' + DELIM: 'ðŸŸï¸', - DELIM + r'classical_building' + DELIM: 'ðŸ›ï¸', - DELIM + r'building_construction' + DELIM: 'ðŸ—ï¸', - DELIM + r'bricks' + DELIM: '🧱', - DELIM + r'rock' + DELIM: '🪨', - DELIM + r'wood' + DELIM: '🪵', - DELIM + r'hut' + DELIM: '🛖', - DELIM + r'houses' + DELIM: 'ðŸ˜ï¸', - DELIM + r'derelict_house' + DELIM: 'ðŸšï¸', - DELIM + r'house' + DELIM: 'ðŸ ', - DELIM + r'house_with_garden' + DELIM: 'ðŸ¡', - DELIM + r'office' + DELIM: 'ðŸ¢', - DELIM + r'post_office' + DELIM: 'ðŸ£', - DELIM + r'european_post_office' + DELIM: 'ðŸ¤', - DELIM + r'hospital' + DELIM: 'ðŸ¥', - DELIM + r'bank' + DELIM: 'ðŸ¦', - DELIM + r'hotel' + DELIM: 'ðŸ¨', - DELIM + r'love_hotel' + DELIM: 'ðŸ©', - DELIM + r'convenience_store' + DELIM: 'ðŸª', - DELIM + r'school' + DELIM: 'ðŸ«', - DELIM + r'department_store' + DELIM: 'ðŸ¬', - DELIM + r'factory' + DELIM: 'ðŸ­', - DELIM + r'japanese_castle' + DELIM: 'ðŸ¯', - DELIM + r'european_castle' + DELIM: 'ðŸ°', - DELIM + r'wedding' + DELIM: '💒', - DELIM + r'tokyo_tower' + DELIM: '🗼', - DELIM + r'statue_of_liberty' + DELIM: '🗽', - + DELIM + r"stadium" + DELIM: "ðŸŸï¸", + DELIM + r"classical_building" + DELIM: "ðŸ›ï¸", + DELIM + r"building_construction" + DELIM: "ðŸ—ï¸", + DELIM + r"bricks" + DELIM: "🧱", + DELIM + r"rock" + DELIM: "🪨", + DELIM + r"wood" + DELIM: "🪵", + DELIM + r"hut" + DELIM: "🛖", + DELIM + r"houses" + DELIM: "ðŸ˜ï¸", + DELIM + r"derelict_house" + DELIM: "ðŸšï¸", + DELIM + r"house" + DELIM: "ðŸ ", + DELIM + r"house_with_garden" + DELIM: "ðŸ¡", + DELIM + r"office" + DELIM: "ðŸ¢", + DELIM + r"post_office" + DELIM: "ðŸ£", + DELIM + r"european_post_office" + DELIM: "ðŸ¤", + DELIM + r"hospital" + DELIM: "ðŸ¥", + DELIM + r"bank" + DELIM: "ðŸ¦", + DELIM + r"hotel" + DELIM: "ðŸ¨", + DELIM + r"love_hotel" + DELIM: "ðŸ©", + DELIM + r"convenience_store" + DELIM: "ðŸª", + DELIM + r"school" + DELIM: "ðŸ«", + DELIM + r"department_store" + DELIM: "ðŸ¬", + DELIM + r"factory" + DELIM: "ðŸ­", + DELIM + r"japanese_castle" + DELIM: "ðŸ¯", + DELIM + r"european_castle" + DELIM: "ðŸ°", + DELIM + r"wedding" + DELIM: "💒", + DELIM + r"tokyo_tower" + DELIM: "🗼", + DELIM + r"statue_of_liberty" + DELIM: "🗽", # # Place Religious # - DELIM + r'church' + DELIM: '⛪', - DELIM + r'mosque' + DELIM: '🕌', - DELIM + r'hindu_temple' + DELIM: '🛕', - DELIM + r'synagogue' + DELIM: 'ðŸ•', - DELIM + r'shinto_shrine' + DELIM: '⛩ï¸', - DELIM + r'kaaba' + DELIM: '🕋', - + DELIM + r"church" + DELIM: "⛪", + DELIM + r"mosque" + DELIM: "🕌", + DELIM + r"hindu_temple" + DELIM: "🛕", + DELIM + r"synagogue" + DELIM: "ðŸ•", + DELIM + r"shinto_shrine" + DELIM: "⛩ï¸", + DELIM + r"kaaba" + DELIM: "🕋", # # Place Other # - DELIM + r'fountain' + DELIM: '⛲', - DELIM + r'tent' + DELIM: '⛺', - DELIM + r'foggy' + DELIM: 'ðŸŒ', - DELIM + r'night_with_stars' + DELIM: '🌃', - DELIM + r'cityscape' + DELIM: 'ðŸ™ï¸', - DELIM + r'sunrise_over_mountains' + DELIM: '🌄', - DELIM + r'sunrise' + DELIM: '🌅', - DELIM + r'city_sunset' + DELIM: '🌆', - DELIM + r'city_sunrise' + DELIM: '🌇', - DELIM + r'bridge_at_night' + DELIM: '🌉', - DELIM + r'hotsprings' + DELIM: '♨ï¸', - DELIM + r'carousel_horse' + DELIM: '🎠', - DELIM + r'ferris_wheel' + DELIM: '🎡', - DELIM + r'roller_coaster' + DELIM: '🎢', - DELIM + r'barber' + DELIM: '💈', - DELIM + r'circus_tent' + DELIM: '🎪', - + DELIM + r"fountain" + DELIM: "⛲", + DELIM + r"tent" + DELIM: "⛺", + DELIM + r"foggy" + DELIM: "ðŸŒ", + DELIM + r"night_with_stars" + DELIM: "🌃", + DELIM + r"cityscape" + DELIM: "ðŸ™ï¸", + DELIM + r"sunrise_over_mountains" + DELIM: "🌄", + DELIM + r"sunrise" + DELIM: "🌅", + DELIM + r"city_sunset" + DELIM: "🌆", + DELIM + r"city_sunrise" + DELIM: "🌇", + DELIM + r"bridge_at_night" + DELIM: "🌉", + DELIM + r"hotsprings" + DELIM: "♨ï¸", + DELIM + r"carousel_horse" + DELIM: "🎠", + DELIM + r"ferris_wheel" + DELIM: "🎡", + DELIM + r"roller_coaster" + DELIM: "🎢", + DELIM + r"barber" + DELIM: "💈", + DELIM + r"circus_tent" + DELIM: "🎪", # # Transport Ground # - DELIM + r'steam_locomotive' + DELIM: '🚂', - DELIM + r'railway_car' + DELIM: '🚃', - DELIM + r'bullettrain_side' + DELIM: '🚄', - DELIM + r'bullettrain_front' + DELIM: '🚅', - DELIM + r'train2' + DELIM: '🚆', - DELIM + r'metro' + DELIM: '🚇', - DELIM + r'light_rail' + DELIM: '🚈', - DELIM + r'station' + DELIM: '🚉', - DELIM + r'tram' + DELIM: '🚊', - DELIM + r'monorail' + DELIM: 'ðŸš', - DELIM + r'mountain_railway' + DELIM: '🚞', - DELIM + r'train' + DELIM: '🚋', - DELIM + r'bus' + DELIM: '🚌', - DELIM + r'oncoming_bus' + DELIM: 'ðŸš', - DELIM + r'trolleybus' + DELIM: '🚎', - DELIM + r'minibus' + DELIM: 'ðŸš', - DELIM + r'ambulance' + DELIM: '🚑', - DELIM + r'fire_engine' + DELIM: '🚒', - DELIM + r'police_car' + DELIM: '🚓', - DELIM + r'oncoming_police_car' + DELIM: '🚔', - DELIM + r'taxi' + DELIM: '🚕', - DELIM + r'oncoming_taxi' + DELIM: '🚖', - DELIM + r'car' + DELIM: '🚗', - DELIM + r'(red_car|oncoming_automobile)' + DELIM: '🚘', - DELIM + r'blue_car' + DELIM: '🚙', - DELIM + r'pickup_truck' + DELIM: '🛻', - DELIM + r'truck' + DELIM: '🚚', - DELIM + r'articulated_lorry' + DELIM: '🚛', - DELIM + r'tractor' + DELIM: '🚜', - DELIM + r'racing_car' + DELIM: 'ðŸŽï¸', - DELIM + r'motorcycle' + DELIM: 'ðŸï¸', - DELIM + r'motor_scooter' + DELIM: '🛵', - DELIM + r'manual_wheelchair' + DELIM: '🦽', - DELIM + r'motorized_wheelchair' + DELIM: '🦼', - DELIM + r'auto_rickshaw' + DELIM: '🛺', - DELIM + r'bike' + DELIM: '🚲', - DELIM + r'kick_scooter' + DELIM: '🛴', - DELIM + r'skateboard' + DELIM: '🛹', - DELIM + r'roller_skate' + DELIM: '🛼', - DELIM + r'busstop' + DELIM: 'ðŸš', - DELIM + r'motorway' + DELIM: '🛣ï¸', - DELIM + r'railway_track' + DELIM: '🛤ï¸', - DELIM + r'oil_drum' + DELIM: '🛢ï¸', - DELIM + r'fuelpump' + DELIM: '⛽', - DELIM + r'rotating_light' + DELIM: '🚨', - DELIM + r'traffic_light' + DELIM: '🚥', - DELIM + r'vertical_traffic_light' + DELIM: '🚦', - DELIM + r'stop_sign' + DELIM: '🛑', - DELIM + r'construction' + DELIM: '🚧', - + DELIM + r"steam_locomotive" + DELIM: "🚂", + DELIM + r"railway_car" + DELIM: "🚃", + DELIM + r"bullettrain_side" + DELIM: "🚄", + DELIM + r"bullettrain_front" + DELIM: "🚅", + DELIM + r"train2" + DELIM: "🚆", + DELIM + r"metro" + DELIM: "🚇", + DELIM + r"light_rail" + DELIM: "🚈", + DELIM + r"station" + DELIM: "🚉", + DELIM + r"tram" + DELIM: "🚊", + DELIM + r"monorail" + DELIM: "ðŸš", + DELIM + r"mountain_railway" + DELIM: "🚞", + DELIM + r"train" + DELIM: "🚋", + DELIM + r"bus" + DELIM: "🚌", + DELIM + r"oncoming_bus" + DELIM: "ðŸš", + DELIM + r"trolleybus" + DELIM: "🚎", + DELIM + r"minibus" + DELIM: "ðŸš", + DELIM + r"ambulance" + DELIM: "🚑", + DELIM + r"fire_engine" + DELIM: "🚒", + DELIM + r"police_car" + DELIM: "🚓", + DELIM + r"oncoming_police_car" + DELIM: "🚔", + DELIM + r"taxi" + DELIM: "🚕", + DELIM + r"oncoming_taxi" + DELIM: "🚖", + DELIM + r"car" + DELIM: "🚗", + DELIM + r"(red_car|oncoming_automobile)" + DELIM: "🚘", + DELIM + r"blue_car" + DELIM: "🚙", + DELIM + r"pickup_truck" + DELIM: "🛻", + DELIM + r"truck" + DELIM: "🚚", + DELIM + r"articulated_lorry" + DELIM: "🚛", + DELIM + r"tractor" + DELIM: "🚜", + DELIM + r"racing_car" + DELIM: "ðŸŽï¸", + DELIM + r"motorcycle" + DELIM: "ðŸï¸", + DELIM + r"motor_scooter" + DELIM: "🛵", + DELIM + r"manual_wheelchair" + DELIM: "🦽", + DELIM + r"motorized_wheelchair" + DELIM: "🦼", + DELIM + r"auto_rickshaw" + DELIM: "🛺", + DELIM + r"bike" + DELIM: "🚲", + DELIM + r"kick_scooter" + DELIM: "🛴", + DELIM + r"skateboard" + DELIM: "🛹", + DELIM + r"roller_skate" + DELIM: "🛼", + DELIM + r"busstop" + DELIM: "ðŸš", + DELIM + r"motorway" + DELIM: "🛣ï¸", + DELIM + r"railway_track" + DELIM: "🛤ï¸", + DELIM + r"oil_drum" + DELIM: "🛢ï¸", + DELIM + r"fuelpump" + DELIM: "⛽", + DELIM + r"rotating_light" + DELIM: "🚨", + DELIM + r"traffic_light" + DELIM: "🚥", + DELIM + r"vertical_traffic_light" + DELIM: "🚦", + DELIM + r"stop_sign" + DELIM: "🛑", + DELIM + r"construction" + DELIM: "🚧", # # Transport Water # - DELIM + r'anchor' + DELIM: 'âš“', - DELIM + r'(sailboat|boat)' + DELIM: '⛵', - DELIM + r'canoe' + DELIM: '🛶', - DELIM + r'speedboat' + DELIM: '🚤', - DELIM + r'passenger_ship' + DELIM: '🛳ï¸', - DELIM + r'ferry' + DELIM: 'â›´ï¸', - DELIM + r'motor_boat' + DELIM: '🛥ï¸', - DELIM + r'ship' + DELIM: '🚢', - + DELIM + r"anchor" + DELIM: "âš“", + DELIM + r"(sailboat|boat)" + DELIM: "⛵", + DELIM + r"canoe" + DELIM: "🛶", + DELIM + r"speedboat" + DELIM: "🚤", + DELIM + r"passenger_ship" + DELIM: "🛳ï¸", + DELIM + r"ferry" + DELIM: "â›´ï¸", + DELIM + r"motor_boat" + DELIM: "🛥ï¸", + DELIM + r"ship" + DELIM: "🚢", # # Transport Air # - DELIM + r'airplane' + DELIM: '✈ï¸', - DELIM + r'small_airplane' + DELIM: '🛩ï¸', - DELIM + r'flight_departure' + DELIM: '🛫', - DELIM + r'flight_arrival' + DELIM: '🛬', - DELIM + r'parachute' + DELIM: '🪂', - DELIM + r'seat' + DELIM: '💺', - DELIM + r'helicopter' + DELIM: 'ðŸš', - DELIM + r'suspension_railway' + DELIM: '🚟', - DELIM + r'mountain_cableway' + DELIM: '🚠', - DELIM + r'aerial_tramway' + DELIM: '🚡', - DELIM + r'artificial_satellite' + DELIM: '🛰ï¸', - DELIM + r'rocket' + DELIM: '🚀', - DELIM + r'flying_saucer' + DELIM: '🛸', - + DELIM + r"airplane" + DELIM: "✈ï¸", + DELIM + r"small_airplane" + DELIM: "🛩ï¸", + DELIM + r"flight_departure" + DELIM: "🛫", + DELIM + r"flight_arrival" + DELIM: "🛬", + DELIM + r"parachute" + DELIM: "🪂", + DELIM + r"seat" + DELIM: "💺", + DELIM + r"helicopter" + DELIM: "ðŸš", + DELIM + r"suspension_railway" + DELIM: "🚟", + DELIM + r"mountain_cableway" + DELIM: "🚠", + DELIM + r"aerial_tramway" + DELIM: "🚡", + DELIM + r"artificial_satellite" + DELIM: "🛰ï¸", + DELIM + r"rocket" + DELIM: "🚀", + DELIM + r"flying_saucer" + DELIM: "🛸", # # Hotel # - DELIM + r'bellhop_bell' + DELIM: '🛎ï¸', - DELIM + r'luggage' + DELIM: '🧳', - + DELIM + r"bellhop_bell" + DELIM: "🛎ï¸", + DELIM + r"luggage" + DELIM: "🧳", # # Time # - DELIM + r'hourglass' + DELIM: '⌛', - DELIM + r'hourglass_flowing_sand' + DELIM: 'â³', - DELIM + r'watch' + DELIM: '⌚', - DELIM + r'alarm_clock' + DELIM: 'â°', - DELIM + r'stopwatch' + DELIM: 'â±ï¸', - DELIM + r'timer_clock' + DELIM: 'â²ï¸', - DELIM + r'mantelpiece_clock' + DELIM: '🕰ï¸', - DELIM + r'clock12' + DELIM: '🕛', - DELIM + r'clock1230' + DELIM: '🕧', - DELIM + r'clock1' + DELIM: 'ðŸ•', - DELIM + r'clock130' + DELIM: '🕜', - DELIM + r'clock2' + DELIM: '🕑', - DELIM + r'clock230' + DELIM: 'ðŸ•', - DELIM + r'clock3' + DELIM: '🕒', - DELIM + r'clock330' + DELIM: '🕞', - DELIM + r'clock4' + DELIM: '🕓', - DELIM + r'clock430' + DELIM: '🕟', - DELIM + r'clock5' + DELIM: '🕔', - DELIM + r'clock530' + DELIM: '🕠', - DELIM + r'clock6' + DELIM: '🕕', - DELIM + r'clock630' + DELIM: '🕡', - DELIM + r'clock7' + DELIM: '🕖', - DELIM + r'clock730' + DELIM: '🕢', - DELIM + r'clock8' + DELIM: '🕗', - DELIM + r'clock830' + DELIM: '🕣', - DELIM + r'clock9' + DELIM: '🕘', - DELIM + r'clock930' + DELIM: '🕤', - DELIM + r'clock10' + DELIM: '🕙', - DELIM + r'clock1030' + DELIM: '🕥', - DELIM + r'clock11' + DELIM: '🕚', - DELIM + r'clock1130' + DELIM: '🕦', - + DELIM + r"hourglass" + DELIM: "⌛", + DELIM + r"hourglass_flowing_sand" + DELIM: "â³", + DELIM + r"watch" + DELIM: "⌚", + DELIM + r"alarm_clock" + DELIM: "â°", + DELIM + r"stopwatch" + DELIM: "â±ï¸", + DELIM + r"timer_clock" + DELIM: "â²ï¸", + DELIM + r"mantelpiece_clock" + DELIM: "🕰ï¸", + DELIM + r"clock12" + DELIM: "🕛", + DELIM + r"clock1230" + DELIM: "🕧", + DELIM + r"clock1" + DELIM: "ðŸ•", + DELIM + r"clock130" + DELIM: "🕜", + DELIM + r"clock2" + DELIM: "🕑", + DELIM + r"clock230" + DELIM: "ðŸ•", + DELIM + r"clock3" + DELIM: "🕒", + DELIM + r"clock330" + DELIM: "🕞", + DELIM + r"clock4" + DELIM: "🕓", + DELIM + r"clock430" + DELIM: "🕟", + DELIM + r"clock5" + DELIM: "🕔", + DELIM + r"clock530" + DELIM: "🕠", + DELIM + r"clock6" + DELIM: "🕕", + DELIM + r"clock630" + DELIM: "🕡", + DELIM + r"clock7" + DELIM: "🕖", + DELIM + r"clock730" + DELIM: "🕢", + DELIM + r"clock8" + DELIM: "🕗", + DELIM + r"clock830" + DELIM: "🕣", + DELIM + r"clock9" + DELIM: "🕘", + DELIM + r"clock930" + DELIM: "🕤", + DELIM + r"clock10" + DELIM: "🕙", + DELIM + r"clock1030" + DELIM: "🕥", + DELIM + r"clock11" + DELIM: "🕚", + DELIM + r"clock1130" + DELIM: "🕦", # Sky & Weather - DELIM + r'new_moon' + DELIM: '🌑', - DELIM + r'waxing_crescent_moon' + DELIM: '🌒', - DELIM + r'first_quarter_moon' + DELIM: '🌓', - DELIM + r'moon' + DELIM: '🌔', - DELIM + r'(waxing_gibbous_moon|full_moon)' + DELIM: '🌕', - DELIM + r'waning_gibbous_moon' + DELIM: '🌖', - DELIM + r'last_quarter_moon' + DELIM: '🌗', - DELIM + r'waning_crescent_moon' + DELIM: '🌘', - DELIM + r'crescent_moon' + DELIM: '🌙', - DELIM + r'new_moon_with_face' + DELIM: '🌚', - DELIM + r'first_quarter_moon_with_face' + DELIM: '🌛', - DELIM + r'last_quarter_moon_with_face' + DELIM: '🌜', - DELIM + r'thermometer' + DELIM: '🌡ï¸', - DELIM + r'sunny' + DELIM: '☀ï¸', - DELIM + r'full_moon_with_face' + DELIM: 'ðŸŒ', - DELIM + r'sun_with_face' + DELIM: '🌞', - DELIM + r'ringed_planet' + DELIM: 'ðŸª', - DELIM + r'star' + DELIM: 'â­', - DELIM + r'star2' + DELIM: '🌟', - DELIM + r'stars' + DELIM: '🌠', - DELIM + r'milky_way' + DELIM: '🌌', - DELIM + r'cloud' + DELIM: 'â˜ï¸', - DELIM + r'partly_sunny' + DELIM: 'â›…', - DELIM + r'cloud_with_lightning_and_rain' + DELIM: '⛈ï¸', - DELIM + r'sun_behind_small_cloud' + DELIM: '🌤ï¸', - DELIM + r'sun_behind_large_cloud' + DELIM: '🌥ï¸', - DELIM + r'sun_behind_rain_cloud' + DELIM: '🌦ï¸', - DELIM + r'cloud_with_rain' + DELIM: '🌧ï¸', - DELIM + r'cloud_with_snow' + DELIM: '🌨ï¸', - DELIM + r'cloud_with_lightning' + DELIM: '🌩ï¸', - DELIM + r'tornado' + DELIM: '🌪ï¸', - DELIM + r'fog' + DELIM: '🌫ï¸', - DELIM + r'wind_face' + DELIM: '🌬ï¸', - DELIM + r'cyclone' + DELIM: '🌀', - DELIM + r'rainbow' + DELIM: '🌈', - DELIM + r'closed_umbrella' + DELIM: '🌂', - DELIM + r'open_umbrella' + DELIM: '☂ï¸', - DELIM + r'umbrella' + DELIM: '☔', - DELIM + r'parasol_on_ground' + DELIM: 'â›±ï¸', - DELIM + r'zap' + DELIM: 'âš¡', - DELIM + r'snowflake' + DELIM: 'â„ï¸', - DELIM + r'snowman_with_snow' + DELIM: '☃ï¸', - DELIM + r'snowman' + DELIM: '⛄', - DELIM + r'comet' + DELIM: '☄ï¸', - DELIM + r'fire' + DELIM: '🔥', - DELIM + r'droplet' + DELIM: '💧', - DELIM + r'ocean' + DELIM: '🌊', - + DELIM + r"new_moon" + DELIM: "🌑", + DELIM + r"waxing_crescent_moon" + DELIM: "🌒", + DELIM + r"first_quarter_moon" + DELIM: "🌓", + DELIM + r"moon" + DELIM: "🌔", + DELIM + r"(waxing_gibbous_moon|full_moon)" + DELIM: "🌕", + DELIM + r"waning_gibbous_moon" + DELIM: "🌖", + DELIM + r"last_quarter_moon" + DELIM: "🌗", + DELIM + r"waning_crescent_moon" + DELIM: "🌘", + DELIM + r"crescent_moon" + DELIM: "🌙", + DELIM + r"new_moon_with_face" + DELIM: "🌚", + DELIM + r"first_quarter_moon_with_face" + DELIM: "🌛", + DELIM + r"last_quarter_moon_with_face" + DELIM: "🌜", + DELIM + r"thermometer" + DELIM: "🌡ï¸", + DELIM + r"sunny" + DELIM: "☀ï¸", + DELIM + r"full_moon_with_face" + DELIM: "ðŸŒ", + DELIM + r"sun_with_face" + DELIM: "🌞", + DELIM + r"ringed_planet" + DELIM: "ðŸª", + DELIM + r"star" + DELIM: "â­", + DELIM + r"star2" + DELIM: "🌟", + DELIM + r"stars" + DELIM: "🌠", + DELIM + r"milky_way" + DELIM: "🌌", + DELIM + r"cloud" + DELIM: "â˜ï¸", + DELIM + r"partly_sunny" + DELIM: "â›…", + DELIM + r"cloud_with_lightning_and_rain" + DELIM: "⛈ï¸", + DELIM + r"sun_behind_small_cloud" + DELIM: "🌤ï¸", + DELIM + r"sun_behind_large_cloud" + DELIM: "🌥ï¸", + DELIM + r"sun_behind_rain_cloud" + DELIM: "🌦ï¸", + DELIM + r"cloud_with_rain" + DELIM: "🌧ï¸", + DELIM + r"cloud_with_snow" + DELIM: "🌨ï¸", + DELIM + r"cloud_with_lightning" + DELIM: "🌩ï¸", + DELIM + r"tornado" + DELIM: "🌪ï¸", + DELIM + r"fog" + DELIM: "🌫ï¸", + DELIM + r"wind_face" + DELIM: "🌬ï¸", + DELIM + r"cyclone" + DELIM: "🌀", + DELIM + r"rainbow" + DELIM: "🌈", + DELIM + r"closed_umbrella" + DELIM: "🌂", + DELIM + r"open_umbrella" + DELIM: "☂ï¸", + DELIM + r"umbrella" + DELIM: "☔", + DELIM + r"parasol_on_ground" + DELIM: "â›±ï¸", + DELIM + r"zap" + DELIM: "âš¡", + DELIM + r"snowflake" + DELIM: "â„ï¸", + DELIM + r"snowman_with_snow" + DELIM: "☃ï¸", + DELIM + r"snowman" + DELIM: "⛄", + DELIM + r"comet" + DELIM: "☄ï¸", + DELIM + r"fire" + DELIM: "🔥", + DELIM + r"droplet" + DELIM: "💧", + DELIM + r"ocean" + DELIM: "🌊", # # Event # - DELIM + r'jack_o_lantern' + DELIM: '🎃', - DELIM + r'christmas_tree' + DELIM: '🎄', - DELIM + r'fireworks' + DELIM: '🎆', - DELIM + r'sparkler' + DELIM: '🎇', - DELIM + r'firecracker' + DELIM: '🧨', - DELIM + r'sparkles' + DELIM: '✨', - DELIM + r'balloon' + DELIM: '🎈', - DELIM + r'tada' + DELIM: '🎉', - DELIM + r'confetti_ball' + DELIM: '🎊', - DELIM + r'tanabata_tree' + DELIM: '🎋', - DELIM + r'bamboo' + DELIM: 'ðŸŽ', - DELIM + r'dolls' + DELIM: '🎎', - DELIM + r'flags' + DELIM: 'ðŸŽ', - DELIM + r'wind_chime' + DELIM: 'ðŸŽ', - DELIM + r'rice_scene' + DELIM: '🎑', - DELIM + r'red_envelope' + DELIM: '🧧', - DELIM + r'ribbon' + DELIM: '🎀', - DELIM + r'gift' + DELIM: 'ðŸŽ', - DELIM + r'reminder_ribbon' + DELIM: '🎗ï¸', - DELIM + r'tickets' + DELIM: '🎟ï¸', - DELIM + r'ticket' + DELIM: '🎫', - + DELIM + r"jack_o_lantern" + DELIM: "🎃", + DELIM + r"christmas_tree" + DELIM: "🎄", + DELIM + r"fireworks" + DELIM: "🎆", + DELIM + r"sparkler" + DELIM: "🎇", + DELIM + r"firecracker" + DELIM: "🧨", + DELIM + r"sparkles" + DELIM: "✨", + DELIM + r"balloon" + DELIM: "🎈", + DELIM + r"tada" + DELIM: "🎉", + DELIM + r"confetti_ball" + DELIM: "🎊", + DELIM + r"tanabata_tree" + DELIM: "🎋", + DELIM + r"bamboo" + DELIM: "ðŸŽ", + DELIM + r"dolls" + DELIM: "🎎", + DELIM + r"flags" + DELIM: "ðŸŽ", + DELIM + r"wind_chime" + DELIM: "ðŸŽ", + DELIM + r"rice_scene" + DELIM: "🎑", + DELIM + r"red_envelope" + DELIM: "🧧", + DELIM + r"ribbon" + DELIM: "🎀", + DELIM + r"gift" + DELIM: "ðŸŽ", + DELIM + r"reminder_ribbon" + DELIM: "🎗ï¸", + DELIM + r"tickets" + DELIM: "🎟ï¸", + DELIM + r"ticket" + DELIM: "🎫", # # Award Medal # - DELIM + r'medal_military' + DELIM: '🎖ï¸', - DELIM + r'trophy' + DELIM: 'ðŸ†', - DELIM + r'medal_sports' + DELIM: 'ðŸ…', - DELIM + r'1st_place_medal' + DELIM: '🥇', - DELIM + r'2nd_place_medal' + DELIM: '🥈', - DELIM + r'3rd_place_medal' + DELIM: '🥉', - + DELIM + r"medal_military" + DELIM: "🎖ï¸", + DELIM + r"trophy" + DELIM: "ðŸ†", + DELIM + r"medal_sports" + DELIM: "ðŸ…", + DELIM + r"1st_place_medal" + DELIM: "🥇", + DELIM + r"2nd_place_medal" + DELIM: "🥈", + DELIM + r"3rd_place_medal" + DELIM: "🥉", # # Sport # - DELIM + r'soccer' + DELIM: 'âš½', - DELIM + r'baseball' + DELIM: 'âš¾', - DELIM + r'softball' + DELIM: '🥎', - DELIM + r'basketball' + DELIM: 'ðŸ€', - DELIM + r'volleyball' + DELIM: 'ðŸ', - DELIM + r'football' + DELIM: 'ðŸˆ', - DELIM + r'rugby_football' + DELIM: 'ðŸ‰', - DELIM + r'tennis' + DELIM: '🎾', - DELIM + r'flying_disc' + DELIM: 'ðŸ¥', - DELIM + r'bowling' + DELIM: '🎳', - DELIM + r'cricket_game' + DELIM: 'ðŸ', - DELIM + r'field_hockey' + DELIM: 'ðŸ‘', - DELIM + r'ice_hockey' + DELIM: 'ðŸ’', - DELIM + r'lacrosse' + DELIM: 'ðŸ¥', - DELIM + r'ping_pong' + DELIM: 'ðŸ“', - DELIM + r'badminton' + DELIM: 'ðŸ¸', - DELIM + r'boxing_glove' + DELIM: '🥊', - DELIM + r'martial_arts_uniform' + DELIM: '🥋', - DELIM + r'goal_net' + DELIM: '🥅', - DELIM + r'golf' + DELIM: '⛳', - DELIM + r'ice_skate' + DELIM: '⛸ï¸', - DELIM + r'fishing_pole_and_fish' + DELIM: '🎣', - DELIM + r'diving_mask' + DELIM: '🤿', - DELIM + r'running_shirt_with_sash' + DELIM: '🎽', - DELIM + r'ski' + DELIM: '🎿', - DELIM + r'sled' + DELIM: '🛷', - DELIM + r'curling_stone' + DELIM: '🥌', - + DELIM + r"soccer" + DELIM: "âš½", + DELIM + r"baseball" + DELIM: "âš¾", + DELIM + r"softball" + DELIM: "🥎", + DELIM + r"basketball" + DELIM: "ðŸ€", + DELIM + r"volleyball" + DELIM: "ðŸ", + DELIM + r"football" + DELIM: "ðŸˆ", + DELIM + r"rugby_football" + DELIM: "ðŸ‰", + DELIM + r"tennis" + DELIM: "🎾", + DELIM + r"flying_disc" + DELIM: "ðŸ¥", + DELIM + r"bowling" + DELIM: "🎳", + DELIM + r"cricket_game" + DELIM: "ðŸ", + DELIM + r"field_hockey" + DELIM: "ðŸ‘", + DELIM + r"ice_hockey" + DELIM: "ðŸ’", + DELIM + r"lacrosse" + DELIM: "ðŸ¥", + DELIM + r"ping_pong" + DELIM: "ðŸ“", + DELIM + r"badminton" + DELIM: "ðŸ¸", + DELIM + r"boxing_glove" + DELIM: "🥊", + DELIM + r"martial_arts_uniform" + DELIM: "🥋", + DELIM + r"goal_net" + DELIM: "🥅", + DELIM + r"golf" + DELIM: "⛳", + DELIM + r"ice_skate" + DELIM: "⛸ï¸", + DELIM + r"fishing_pole_and_fish" + DELIM: "🎣", + DELIM + r"diving_mask" + DELIM: "🤿", + DELIM + r"running_shirt_with_sash" + DELIM: "🎽", + DELIM + r"ski" + DELIM: "🎿", + DELIM + r"sled" + DELIM: "🛷", + DELIM + r"curling_stone" + DELIM: "🥌", # # Game # - DELIM + r'dart' + DELIM: '🎯', - DELIM + r'yo_yo' + DELIM: '🪀', - DELIM + r'kite' + DELIM: 'ðŸª', - DELIM + r'gun' + DELIM: '🔫', - DELIM + r'8ball' + DELIM: '🎱', - DELIM + r'crystal_ball' + DELIM: '🔮', - DELIM + r'magic_wand' + DELIM: '🪄', - DELIM + r'video_game' + DELIM: '🎮', - DELIM + r'joystick' + DELIM: '🕹ï¸', - DELIM + r'slot_machine' + DELIM: '🎰', - DELIM + r'game_die' + DELIM: '🎲', - DELIM + r'jigsaw' + DELIM: '🧩', - DELIM + r'teddy_bear' + DELIM: '🧸', - DELIM + r'pinata' + DELIM: '🪅', - DELIM + r'nesting_dolls' + DELIM: '🪆', - DELIM + r'spades' + DELIM: 'â™ ï¸', - DELIM + r'hearts' + DELIM: '♥ï¸', - DELIM + r'diamonds' + DELIM: '♦ï¸', - DELIM + r'clubs' + DELIM: '♣ï¸', - DELIM + r'chess_pawn' + DELIM: '♟ï¸', - DELIM + r'black_joker' + DELIM: 'ðŸƒ', - DELIM + r'mahjong' + DELIM: '🀄', - DELIM + r'flower_playing_cards' + DELIM: '🎴', - + DELIM + r"dart" + DELIM: "🎯", + DELIM + r"yo_yo" + DELIM: "🪀", + DELIM + r"kite" + DELIM: "ðŸª", + DELIM + r"gun" + DELIM: "🔫", + DELIM + r"8ball" + DELIM: "🎱", + DELIM + r"crystal_ball" + DELIM: "🔮", + DELIM + r"magic_wand" + DELIM: "🪄", + DELIM + r"video_game" + DELIM: "🎮", + DELIM + r"joystick" + DELIM: "🕹ï¸", + DELIM + r"slot_machine" + DELIM: "🎰", + DELIM + r"game_die" + DELIM: "🎲", + DELIM + r"jigsaw" + DELIM: "🧩", + DELIM + r"teddy_bear" + DELIM: "🧸", + DELIM + r"pinata" + DELIM: "🪅", + DELIM + r"nesting_dolls" + DELIM: "🪆", + DELIM + r"spades" + DELIM: "â™ ï¸", + DELIM + r"hearts" + DELIM: "♥ï¸", + DELIM + r"diamonds" + DELIM: "♦ï¸", + DELIM + r"clubs" + DELIM: "♣ï¸", + DELIM + r"chess_pawn" + DELIM: "♟ï¸", + DELIM + r"black_joker" + DELIM: "ðŸƒ", + DELIM + r"mahjong" + DELIM: "🀄", + DELIM + r"flower_playing_cards" + DELIM: "🎴", # # Arts & Crafts # - DELIM + r'performing_arts' + DELIM: '🎭', - DELIM + r'framed_picture' + DELIM: '🖼ï¸', - DELIM + r'art' + DELIM: '🎨', - DELIM + r'thread' + DELIM: '🧵', - DELIM + r'sewing_needle' + DELIM: '🪡', - DELIM + r'yarn' + DELIM: '🧶', - DELIM + r'knot' + DELIM: '🪢', - + DELIM + r"performing_arts" + DELIM: "🎭", + DELIM + r"framed_picture" + DELIM: "🖼ï¸", + DELIM + r"art" + DELIM: "🎨", + DELIM + r"thread" + DELIM: "🧵", + DELIM + r"sewing_needle" + DELIM: "🪡", + DELIM + r"yarn" + DELIM: "🧶", + DELIM + r"knot" + DELIM: "🪢", # # Clothing # - DELIM + r'eyeglasses' + DELIM: '👓', - DELIM + r'dark_sunglasses' + DELIM: '🕶ï¸', - DELIM + r'goggles' + DELIM: '🥽', - DELIM + r'lab_coat' + DELIM: '🥼', - DELIM + r'safety_vest' + DELIM: '🦺', - DELIM + r'necktie' + DELIM: '👔', - DELIM + r't?shirt' + DELIM: '👕', - DELIM + r'jeans' + DELIM: '👖', - DELIM + r'scarf' + DELIM: '🧣', - DELIM + r'gloves' + DELIM: '🧤', - DELIM + r'coat' + DELIM: '🧥', - DELIM + r'socks' + DELIM: '🧦', - DELIM + r'dress' + DELIM: '👗', - DELIM + r'kimono' + DELIM: '👘', - DELIM + r'sari' + DELIM: '🥻', - DELIM + r'one_piece_swimsuit' + DELIM: '🩱', - DELIM + r'swim_brief' + DELIM: '🩲', - DELIM + r'shorts' + DELIM: '🩳', - DELIM + r'bikini' + DELIM: '👙', - DELIM + r'womans_clothes' + DELIM: '👚', - DELIM + r'purse' + DELIM: '👛', - DELIM + r'handbag' + DELIM: '👜', - DELIM + r'pouch' + DELIM: 'ðŸ‘', - DELIM + r'shopping' + DELIM: 'ðŸ›ï¸', - DELIM + r'school_satchel' + DELIM: '🎒', - DELIM + r'thong_sandal' + DELIM: '🩴', - DELIM + r'(mans_)?shoe' + DELIM: '👞', - DELIM + r'athletic_shoe' + DELIM: '👟', - DELIM + r'hiking_boot' + DELIM: '🥾', - DELIM + r'flat_shoe' + DELIM: '🥿', - DELIM + r'high_heel' + DELIM: '👠', - DELIM + r'sandal' + DELIM: '👡', - DELIM + r'ballet_shoes' + DELIM: '🩰', - DELIM + r'boot' + DELIM: '👢', - DELIM + r'crown' + DELIM: '👑', - DELIM + r'womans_hat' + DELIM: '👒', - DELIM + r'tophat' + DELIM: '🎩', - DELIM + r'mortar_board' + DELIM: '🎓', - DELIM + r'billed_cap' + DELIM: '🧢', - DELIM + r'military_helmet' + DELIM: '🪖', - DELIM + r'rescue_worker_helmet' + DELIM: '⛑ï¸', - DELIM + r'prayer_beads' + DELIM: '📿', - DELIM + r'lipstick' + DELIM: '💄', - DELIM + r'ring' + DELIM: 'ðŸ’', - DELIM + r'gem' + DELIM: '💎', - + DELIM + r"eyeglasses" + DELIM: "👓", + DELIM + r"dark_sunglasses" + DELIM: "🕶ï¸", + DELIM + r"goggles" + DELIM: "🥽", + DELIM + r"lab_coat" + DELIM: "🥼", + DELIM + r"safety_vest" + DELIM: "🦺", + DELIM + r"necktie" + DELIM: "👔", + DELIM + r"t?shirt" + DELIM: "👕", + DELIM + r"jeans" + DELIM: "👖", + DELIM + r"scarf" + DELIM: "🧣", + DELIM + r"gloves" + DELIM: "🧤", + DELIM + r"coat" + DELIM: "🧥", + DELIM + r"socks" + DELIM: "🧦", + DELIM + r"dress" + DELIM: "👗", + DELIM + r"kimono" + DELIM: "👘", + DELIM + r"sari" + DELIM: "🥻", + DELIM + r"one_piece_swimsuit" + DELIM: "🩱", + DELIM + r"swim_brief" + DELIM: "🩲", + DELIM + r"shorts" + DELIM: "🩳", + DELIM + r"bikini" + DELIM: "👙", + DELIM + r"womans_clothes" + DELIM: "👚", + DELIM + r"purse" + DELIM: "👛", + DELIM + r"handbag" + DELIM: "👜", + DELIM + r"pouch" + DELIM: "ðŸ‘", + DELIM + r"shopping" + DELIM: "ðŸ›ï¸", + DELIM + r"school_satchel" + DELIM: "🎒", + DELIM + r"thong_sandal" + DELIM: "🩴", + DELIM + r"(mans_)?shoe" + DELIM: "👞", + DELIM + r"athletic_shoe" + DELIM: "👟", + DELIM + r"hiking_boot" + DELIM: "🥾", + DELIM + r"flat_shoe" + DELIM: "🥿", + DELIM + r"high_heel" + DELIM: "👠", + DELIM + r"sandal" + DELIM: "👡", + DELIM + r"ballet_shoes" + DELIM: "🩰", + DELIM + r"boot" + DELIM: "👢", + DELIM + r"crown" + DELIM: "👑", + DELIM + r"womans_hat" + DELIM: "👒", + DELIM + r"tophat" + DELIM: "🎩", + DELIM + r"mortar_board" + DELIM: "🎓", + DELIM + r"billed_cap" + DELIM: "🧢", + DELIM + r"military_helmet" + DELIM: "🪖", + DELIM + r"rescue_worker_helmet" + DELIM: "⛑ï¸", + DELIM + r"prayer_beads" + DELIM: "📿", + DELIM + r"lipstick" + DELIM: "💄", + DELIM + r"ring" + DELIM: "ðŸ’", + DELIM + r"gem" + DELIM: "💎", # # Sound # - DELIM + r'mute' + DELIM: '🔇', - DELIM + r'speaker' + DELIM: '🔈', - DELIM + r'sound' + DELIM: '🔉', - DELIM + r'loud_sound' + DELIM: '🔊', - DELIM + r'loudspeaker' + DELIM: '📢', - DELIM + r'mega' + DELIM: '📣', - DELIM + r'postal_horn' + DELIM: '📯', - DELIM + r'bell' + DELIM: '🔔', - DELIM + r'no_bell' + DELIM: '🔕', - + DELIM + r"mute" + DELIM: "🔇", + DELIM + r"speaker" + DELIM: "🔈", + DELIM + r"sound" + DELIM: "🔉", + DELIM + r"loud_sound" + DELIM: "🔊", + DELIM + r"loudspeaker" + DELIM: "📢", + DELIM + r"mega" + DELIM: "📣", + DELIM + r"postal_horn" + DELIM: "📯", + DELIM + r"bell" + DELIM: "🔔", + DELIM + r"no_bell" + DELIM: "🔕", # # Music # - DELIM + r'musical_score' + DELIM: '🎼', - DELIM + r'musical_note' + DELIM: '🎵', - DELIM + r'notes' + DELIM: '🎶', - DELIM + r'studio_microphone' + DELIM: '🎙ï¸', - DELIM + r'level_slider' + DELIM: '🎚ï¸', - DELIM + r'control_knobs' + DELIM: '🎛ï¸', - DELIM + r'microphone' + DELIM: '🎤', - DELIM + r'headphones' + DELIM: '🎧', - DELIM + r'radio' + DELIM: '📻', - + DELIM + r"musical_score" + DELIM: "🎼", + DELIM + r"musical_note" + DELIM: "🎵", + DELIM + r"notes" + DELIM: "🎶", + DELIM + r"studio_microphone" + DELIM: "🎙ï¸", + DELIM + r"level_slider" + DELIM: "🎚ï¸", + DELIM + r"control_knobs" + DELIM: "🎛ï¸", + DELIM + r"microphone" + DELIM: "🎤", + DELIM + r"headphones" + DELIM: "🎧", + DELIM + r"radio" + DELIM: "📻", # # Musical Instrument # - DELIM + r'saxophone' + DELIM: '🎷', - DELIM + r'accordion' + DELIM: '🪗', - DELIM + r'guitar' + DELIM: '🎸', - DELIM + r'musical_keyboard' + DELIM: '🎹', - DELIM + r'trumpet' + DELIM: '🎺', - DELIM + r'violin' + DELIM: '🎻', - DELIM + r'banjo' + DELIM: '🪕', - DELIM + r'drum' + DELIM: 'ðŸ¥', - DELIM + r'long_drum' + DELIM: '🪘', - + DELIM + r"saxophone" + DELIM: "🎷", + DELIM + r"accordion" + DELIM: "🪗", + DELIM + r"guitar" + DELIM: "🎸", + DELIM + r"musical_keyboard" + DELIM: "🎹", + DELIM + r"trumpet" + DELIM: "🎺", + DELIM + r"violin" + DELIM: "🎻", + DELIM + r"banjo" + DELIM: "🪕", + DELIM + r"drum" + DELIM: "ðŸ¥", + DELIM + r"long_drum" + DELIM: "🪘", # # Phone # - DELIM + r'iphone' + DELIM: '📱', - DELIM + r'calling' + DELIM: '📲', - DELIM + r'phone' + DELIM: '☎ï¸', - DELIM + r'telephone(_receiver)?' + DELIM: '📞', - DELIM + r'pager' + DELIM: '📟', - DELIM + r'fax' + DELIM: '📠', - + DELIM + r"iphone" + DELIM: "📱", + DELIM + r"calling" + DELIM: "📲", + DELIM + r"phone" + DELIM: "☎ï¸", + DELIM + r"telephone(_receiver)?" + DELIM: "📞", + DELIM + r"pager" + DELIM: "📟", + DELIM + r"fax" + DELIM: "📠", # # Computer # - DELIM + r'battery' + DELIM: '🔋', - DELIM + r'electric_plug' + DELIM: '🔌', - DELIM + r'computer' + DELIM: '💻', - DELIM + r'desktop_computer' + DELIM: '🖥ï¸', - DELIM + r'printer' + DELIM: '🖨ï¸', - DELIM + r'keyboard' + DELIM: '⌨ï¸', - DELIM + r'computer_mouse' + DELIM: '🖱ï¸', - DELIM + r'trackball' + DELIM: '🖲ï¸', - DELIM + r'minidisc' + DELIM: '💽', - DELIM + r'floppy_disk' + DELIM: '💾', - DELIM + r'cd' + DELIM: '💿', - DELIM + r'dvd' + DELIM: '📀', - DELIM + r'abacus' + DELIM: '🧮', - + DELIM + r"battery" + DELIM: "🔋", + DELIM + r"electric_plug" + DELIM: "🔌", + DELIM + r"computer" + DELIM: "💻", + DELIM + r"desktop_computer" + DELIM: "🖥ï¸", + DELIM + r"printer" + DELIM: "🖨ï¸", + DELIM + r"keyboard" + DELIM: "⌨ï¸", + DELIM + r"computer_mouse" + DELIM: "🖱ï¸", + DELIM + r"trackball" + DELIM: "🖲ï¸", + DELIM + r"minidisc" + DELIM: "💽", + DELIM + r"floppy_disk" + DELIM: "💾", + DELIM + r"cd" + DELIM: "💿", + DELIM + r"dvd" + DELIM: "📀", + DELIM + r"abacus" + DELIM: "🧮", # # Light & Video # - DELIM + r'movie_camera' + DELIM: '🎥', - DELIM + r'film_strip' + DELIM: '🎞ï¸', - DELIM + r'film_projector' + DELIM: '📽ï¸', - DELIM + r'clapper' + DELIM: '🎬', - DELIM + r'tv' + DELIM: '📺', - DELIM + r'camera' + DELIM: '📷', - DELIM + r'camera_flash' + DELIM: '📸', - DELIM + r'video_camera' + DELIM: '📹', - DELIM + r'vhs' + DELIM: '📼', - DELIM + r'mag' + DELIM: 'ðŸ”', - DELIM + r'mag_right' + DELIM: '🔎', - DELIM + r'candle' + DELIM: '🕯ï¸', - DELIM + r'bulb' + DELIM: '💡', - DELIM + r'flashlight' + DELIM: '🔦', - DELIM + r'(izakaya_)?lantern' + DELIM: 'ðŸ®', - DELIM + r'diya_lamp' + DELIM: '🪔', - + DELIM + r"movie_camera" + DELIM: "🎥", + DELIM + r"film_strip" + DELIM: "🎞ï¸", + DELIM + r"film_projector" + DELIM: "📽ï¸", + DELIM + r"clapper" + DELIM: "🎬", + DELIM + r"tv" + DELIM: "📺", + DELIM + r"camera" + DELIM: "📷", + DELIM + r"camera_flash" + DELIM: "📸", + DELIM + r"video_camera" + DELIM: "📹", + DELIM + r"vhs" + DELIM: "📼", + DELIM + r"mag" + DELIM: "ðŸ”", + DELIM + r"mag_right" + DELIM: "🔎", + DELIM + r"candle" + DELIM: "🕯ï¸", + DELIM + r"bulb" + DELIM: "💡", + DELIM + r"flashlight" + DELIM: "🔦", + DELIM + r"(izakaya_)?lantern" + DELIM: "ðŸ®", + DELIM + r"diya_lamp" + DELIM: "🪔", # # Book Paper # - DELIM + r'notebook_with_decorative_cover' + DELIM: '📔', - DELIM + r'closed_book' + DELIM: '📕', - DELIM + r'(open_)?book' + DELIM: '📖', - DELIM + r'green_book' + DELIM: '📗', - DELIM + r'blue_book' + DELIM: '📘', - DELIM + r'orange_book' + DELIM: '📙', - DELIM + r'books' + DELIM: '📚', - DELIM + r'notebook' + DELIM: '📓', - DELIM + r'ledger' + DELIM: '📒', - DELIM + r'page_with_curl' + DELIM: '📃', - DELIM + r'scroll' + DELIM: '📜', - DELIM + r'page_facing_up' + DELIM: '📄', - DELIM + r'newspaper' + DELIM: '📰', - DELIM + r'newspaper_roll' + DELIM: '🗞ï¸', - DELIM + r'bookmark_tabs' + DELIM: '📑', - DELIM + r'bookmark' + DELIM: '🔖', - DELIM + r'label' + DELIM: 'ðŸ·ï¸', - + DELIM + r"notebook_with_decorative_cover" + DELIM: "📔", + DELIM + r"closed_book" + DELIM: "📕", + DELIM + r"(open_)?book" + DELIM: "📖", + DELIM + r"green_book" + DELIM: "📗", + DELIM + r"blue_book" + DELIM: "📘", + DELIM + r"orange_book" + DELIM: "📙", + DELIM + r"books" + DELIM: "📚", + DELIM + r"notebook" + DELIM: "📓", + DELIM + r"ledger" + DELIM: "📒", + DELIM + r"page_with_curl" + DELIM: "📃", + DELIM + r"scroll" + DELIM: "📜", + DELIM + r"page_facing_up" + DELIM: "📄", + DELIM + r"newspaper" + DELIM: "📰", + DELIM + r"newspaper_roll" + DELIM: "🗞ï¸", + DELIM + r"bookmark_tabs" + DELIM: "📑", + DELIM + r"bookmark" + DELIM: "🔖", + DELIM + r"label" + DELIM: "ðŸ·ï¸", # # Money # - DELIM + r'moneybag' + DELIM: '💰', - DELIM + r'coin' + DELIM: '🪙', - DELIM + r'yen' + DELIM: '💴', - DELIM + r'dollar' + DELIM: '💵', - DELIM + r'euro' + DELIM: '💶', - DELIM + r'pound' + DELIM: '💷', - DELIM + r'money_with_wings' + DELIM: '💸', - DELIM + r'credit_card' + DELIM: '💳', - DELIM + r'receipt' + DELIM: '🧾', - DELIM + r'chart' + DELIM: '💹', - + DELIM + r"moneybag" + DELIM: "💰", + DELIM + r"coin" + DELIM: "🪙", + DELIM + r"yen" + DELIM: "💴", + DELIM + r"dollar" + DELIM: "💵", + DELIM + r"euro" + DELIM: "💶", + DELIM + r"pound" + DELIM: "💷", + DELIM + r"money_with_wings" + DELIM: "💸", + DELIM + r"credit_card" + DELIM: "💳", + DELIM + r"receipt" + DELIM: "🧾", + DELIM + r"chart" + DELIM: "💹", # # Mail # - DELIM + r'envelope' + DELIM: '✉ï¸', - DELIM + r'e-?mail' + DELIM: '📧', - DELIM + r'incoming_envelope' + DELIM: '📨', - DELIM + r'envelope_with_arrow' + DELIM: '📩', - DELIM + r'outbox_tray' + DELIM: '📤', - DELIM + r'inbox_tray' + DELIM: '📥', - DELIM + r'package' + DELIM: '📦', - DELIM + r'mailbox' + DELIM: '📫', - DELIM + r'mailbox_closed' + DELIM: '📪', - DELIM + r'mailbox_with_mail' + DELIM: '📬', - DELIM + r'mailbox_with_no_mail' + DELIM: '📭', - DELIM + r'postbox' + DELIM: '📮', - DELIM + r'ballot_box' + DELIM: '🗳ï¸', - + DELIM + r"envelope" + DELIM: "✉ï¸", + DELIM + r"e-?mail" + DELIM: "📧", + DELIM + r"incoming_envelope" + DELIM: "📨", + DELIM + r"envelope_with_arrow" + DELIM: "📩", + DELIM + r"outbox_tray" + DELIM: "📤", + DELIM + r"inbox_tray" + DELIM: "📥", + DELIM + r"package" + DELIM: "📦", + DELIM + r"mailbox" + DELIM: "📫", + DELIM + r"mailbox_closed" + DELIM: "📪", + DELIM + r"mailbox_with_mail" + DELIM: "📬", + DELIM + r"mailbox_with_no_mail" + DELIM: "📭", + DELIM + r"postbox" + DELIM: "📮", + DELIM + r"ballot_box" + DELIM: "🗳ï¸", # # Writing # - DELIM + r'pencil2' + DELIM: 'âœï¸', - DELIM + r'black_nib' + DELIM: '✒ï¸', - DELIM + r'fountain_pen' + DELIM: '🖋ï¸', - DELIM + r'pen' + DELIM: '🖊ï¸', - DELIM + r'paintbrush' + DELIM: '🖌ï¸', - DELIM + r'crayon' + DELIM: 'ðŸ–ï¸', - DELIM + r'(memo|pencil)' + DELIM: 'ðŸ“', - + DELIM + r"pencil2" + DELIM: "âœï¸", + DELIM + r"black_nib" + DELIM: "✒ï¸", + DELIM + r"fountain_pen" + DELIM: "🖋ï¸", + DELIM + r"pen" + DELIM: "🖊ï¸", + DELIM + r"paintbrush" + DELIM: "🖌ï¸", + DELIM + r"crayon" + DELIM: "ðŸ–ï¸", + DELIM + r"(memo|pencil)" + DELIM: "ðŸ“", # # Office # - DELIM + r'briefcase' + DELIM: '💼', - DELIM + r'file_folder' + DELIM: 'ðŸ“', - DELIM + r'open_file_folder' + DELIM: '📂', - DELIM + r'card_index_dividers' + DELIM: '🗂ï¸', - DELIM + r'date' + DELIM: '📅', - DELIM + r'calendar' + DELIM: '📆', - DELIM + r'spiral_notepad' + DELIM: '🗒ï¸', - DELIM + r'spiral_calendar' + DELIM: '🗓ï¸', - DELIM + r'card_index' + DELIM: '📇', - DELIM + r'chart_with_upwards_trend' + DELIM: '📈', - DELIM + r'chart_with_downwards_trend' + DELIM: '📉', - DELIM + r'bar_chart' + DELIM: '📊', - DELIM + r'clipboard' + DELIM: '📋', - DELIM + r'pushpin' + DELIM: '📌', - DELIM + r'round_pushpin' + DELIM: 'ðŸ“', - DELIM + r'paperclip' + DELIM: '📎', - DELIM + r'paperclips' + DELIM: '🖇ï¸', - DELIM + r'straight_ruler' + DELIM: 'ðŸ“', - DELIM + r'triangular_ruler' + DELIM: 'ðŸ“', - DELIM + r'scissors' + DELIM: '✂ï¸', - DELIM + r'card_file_box' + DELIM: '🗃ï¸', - DELIM + r'file_cabinet' + DELIM: '🗄ï¸', - DELIM + r'wastebasket' + DELIM: '🗑ï¸', - + DELIM + r"briefcase" + DELIM: "💼", + DELIM + r"file_folder" + DELIM: "ðŸ“", + DELIM + r"open_file_folder" + DELIM: "📂", + DELIM + r"card_index_dividers" + DELIM: "🗂ï¸", + DELIM + r"date" + DELIM: "📅", + DELIM + r"calendar" + DELIM: "📆", + DELIM + r"spiral_notepad" + DELIM: "🗒ï¸", + DELIM + r"spiral_calendar" + DELIM: "🗓ï¸", + DELIM + r"card_index" + DELIM: "📇", + DELIM + r"chart_with_upwards_trend" + DELIM: "📈", + DELIM + r"chart_with_downwards_trend" + DELIM: "📉", + DELIM + r"bar_chart" + DELIM: "📊", + DELIM + r"clipboard" + DELIM: "📋", + DELIM + r"pushpin" + DELIM: "📌", + DELIM + r"round_pushpin" + DELIM: "ðŸ“", + DELIM + r"paperclip" + DELIM: "📎", + DELIM + r"paperclips" + DELIM: "🖇ï¸", + DELIM + r"straight_ruler" + DELIM: "ðŸ“", + DELIM + r"triangular_ruler" + DELIM: "ðŸ“", + DELIM + r"scissors" + DELIM: "✂ï¸", + DELIM + r"card_file_box" + DELIM: "🗃ï¸", + DELIM + r"file_cabinet" + DELIM: "🗄ï¸", + DELIM + r"wastebasket" + DELIM: "🗑ï¸", # # Lock # - DELIM + r'lock' + DELIM: '🔒', - DELIM + r'unlock' + DELIM: '🔓', - DELIM + r'lock_with_ink_pen' + DELIM: 'ðŸ”', - DELIM + r'closed_lock_with_key' + DELIM: 'ðŸ”', - DELIM + r'key' + DELIM: '🔑', - DELIM + r'old_key' + DELIM: 'ðŸ—ï¸', - + DELIM + r"lock" + DELIM: "🔒", + DELIM + r"unlock" + DELIM: "🔓", + DELIM + r"lock_with_ink_pen" + DELIM: "ðŸ”", + DELIM + r"closed_lock_with_key" + DELIM: "ðŸ”", + DELIM + r"key" + DELIM: "🔑", + DELIM + r"old_key" + DELIM: "ðŸ—ï¸", # # Tool # - DELIM + r'hammer' + DELIM: '🔨', - DELIM + r'axe' + DELIM: '🪓', - DELIM + r'pick' + DELIM: 'â›ï¸', - DELIM + r'hammer_and_pick' + DELIM: 'âš’ï¸', - DELIM + r'hammer_and_wrench' + DELIM: '🛠ï¸', - DELIM + r'dagger' + DELIM: '🗡ï¸', - DELIM + r'crossed_swords' + DELIM: 'âš”ï¸', - DELIM + r'bomb' + DELIM: '💣', - DELIM + r'boomerang' + DELIM: '🪃', - DELIM + r'bow_and_arrow' + DELIM: 'ðŸ¹', - DELIM + r'shield' + DELIM: '🛡ï¸', - DELIM + r'carpentry_saw' + DELIM: '🪚', - DELIM + r'wrench' + DELIM: '🔧', - DELIM + r'screwdriver' + DELIM: '🪛', - DELIM + r'nut_and_bolt' + DELIM: '🔩', - DELIM + r'gear' + DELIM: 'âš™ï¸', - DELIM + r'clamp' + DELIM: '🗜ï¸', - DELIM + r'balance_scale' + DELIM: 'âš–ï¸', - DELIM + r'probing_cane' + DELIM: '🦯', - DELIM + r'link' + DELIM: '🔗', - DELIM + r'chains' + DELIM: '⛓ï¸', - DELIM + r'hook' + DELIM: 'ðŸª', - DELIM + r'toolbox' + DELIM: '🧰', - DELIM + r'magnet' + DELIM: '🧲', - DELIM + r'ladder' + DELIM: '🪜', - + DELIM + r"hammer" + DELIM: "🔨", + DELIM + r"axe" + DELIM: "🪓", + DELIM + r"pick" + DELIM: "â›ï¸", + DELIM + r"hammer_and_pick" + DELIM: "âš’ï¸", + DELIM + r"hammer_and_wrench" + DELIM: "🛠ï¸", + DELIM + r"dagger" + DELIM: "🗡ï¸", + DELIM + r"crossed_swords" + DELIM: "âš”ï¸", + DELIM + r"bomb" + DELIM: "💣", + DELIM + r"boomerang" + DELIM: "🪃", + DELIM + r"bow_and_arrow" + DELIM: "ðŸ¹", + DELIM + r"shield" + DELIM: "🛡ï¸", + DELIM + r"carpentry_saw" + DELIM: "🪚", + DELIM + r"wrench" + DELIM: "🔧", + DELIM + r"screwdriver" + DELIM: "🪛", + DELIM + r"nut_and_bolt" + DELIM: "🔩", + DELIM + r"gear" + DELIM: "âš™ï¸", + DELIM + r"clamp" + DELIM: "🗜ï¸", + DELIM + r"balance_scale" + DELIM: "âš–ï¸", + DELIM + r"probing_cane" + DELIM: "🦯", + DELIM + r"link" + DELIM: "🔗", + DELIM + r"chains" + DELIM: "⛓ï¸", + DELIM + r"hook" + DELIM: "ðŸª", + DELIM + r"toolbox" + DELIM: "🧰", + DELIM + r"magnet" + DELIM: "🧲", + DELIM + r"ladder" + DELIM: "🪜", # # Science # - DELIM + r'alembic' + DELIM: 'âš—ï¸', - DELIM + r'test_tube' + DELIM: '🧪', - DELIM + r'petri_dish' + DELIM: '🧫', - DELIM + r'dna' + DELIM: '🧬', - DELIM + r'microscope' + DELIM: '🔬', - DELIM + r'telescope' + DELIM: '🔭', - DELIM + r'satellite' + DELIM: '📡', - + DELIM + r"alembic" + DELIM: "âš—ï¸", + DELIM + r"test_tube" + DELIM: "🧪", + DELIM + r"petri_dish" + DELIM: "🧫", + DELIM + r"dna" + DELIM: "🧬", + DELIM + r"microscope" + DELIM: "🔬", + DELIM + r"telescope" + DELIM: "🔭", + DELIM + r"satellite" + DELIM: "📡", # # Medical # - DELIM + r'syringe' + DELIM: '💉', - DELIM + r'drop_of_blood' + DELIM: '🩸', - DELIM + r'pill' + DELIM: '💊', - DELIM + r'adhesive_bandage' + DELIM: '🩹', - DELIM + r'stethoscope' + DELIM: '🩺', - + DELIM + r"syringe" + DELIM: "💉", + DELIM + r"drop_of_blood" + DELIM: "🩸", + DELIM + r"pill" + DELIM: "💊", + DELIM + r"adhesive_bandage" + DELIM: "🩹", + DELIM + r"stethoscope" + DELIM: "🩺", # # Household # - DELIM + r'door' + DELIM: '🚪', - DELIM + r'elevator' + DELIM: '🛗', - DELIM + r'mirror' + DELIM: '🪞', - DELIM + r'window' + DELIM: '🪟', - DELIM + r'bed' + DELIM: 'ðŸ›ï¸', - DELIM + r'couch_and_lamp' + DELIM: '🛋ï¸', - DELIM + r'chair' + DELIM: '🪑', - DELIM + r'toilet' + DELIM: '🚽', - DELIM + r'plunger' + DELIM: '🪠', - DELIM + r'shower' + DELIM: '🚿', - DELIM + r'bathtub' + DELIM: 'ðŸ›', - DELIM + r'mouse_trap' + DELIM: '🪤', - DELIM + r'razor' + DELIM: '🪒', - DELIM + r'lotion_bottle' + DELIM: '🧴', - DELIM + r'safety_pin' + DELIM: '🧷', - DELIM + r'broom' + DELIM: '🧹', - DELIM + r'basket' + DELIM: '🧺', - DELIM + r'roll_of_paper' + DELIM: '🧻', - DELIM + r'bucket' + DELIM: '🪣', - DELIM + r'soap' + DELIM: '🧼', - DELIM + r'toothbrush' + DELIM: '🪥', - DELIM + r'sponge' + DELIM: '🧽', - DELIM + r'fire_extinguisher' + DELIM: '🧯', - DELIM + r'shopping_cart' + DELIM: '🛒', - + DELIM + r"door" + DELIM: "🚪", + DELIM + r"elevator" + DELIM: "🛗", + DELIM + r"mirror" + DELIM: "🪞", + DELIM + r"window" + DELIM: "🪟", + DELIM + r"bed" + DELIM: "ðŸ›ï¸", + DELIM + r"couch_and_lamp" + DELIM: "🛋ï¸", + DELIM + r"chair" + DELIM: "🪑", + DELIM + r"toilet" + DELIM: "🚽", + DELIM + r"plunger" + DELIM: "🪠", + DELIM + r"shower" + DELIM: "🚿", + DELIM + r"bathtub" + DELIM: "ðŸ›", + DELIM + r"mouse_trap" + DELIM: "🪤", + DELIM + r"razor" + DELIM: "🪒", + DELIM + r"lotion_bottle" + DELIM: "🧴", + DELIM + r"safety_pin" + DELIM: "🧷", + DELIM + r"broom" + DELIM: "🧹", + DELIM + r"basket" + DELIM: "🧺", + DELIM + r"roll_of_paper" + DELIM: "🧻", + DELIM + r"bucket" + DELIM: "🪣", + DELIM + r"soap" + DELIM: "🧼", + DELIM + r"toothbrush" + DELIM: "🪥", + DELIM + r"sponge" + DELIM: "🧽", + DELIM + r"fire_extinguisher" + DELIM: "🧯", + DELIM + r"shopping_cart" + DELIM: "🛒", # # Other Object # - DELIM + r'smoking' + DELIM: '🚬', - DELIM + r'coffin' + DELIM: 'âš°ï¸', - DELIM + r'headstone' + DELIM: '🪦', - DELIM + r'funeral_urn' + DELIM: 'âš±ï¸', - DELIM + r'nazar_amulet' + DELIM: '🧿', - DELIM + r'moyai' + DELIM: '🗿', - DELIM + r'placard' + DELIM: '🪧', - + DELIM + r"smoking" + DELIM: "🚬", + DELIM + r"coffin" + DELIM: "âš°ï¸", + DELIM + r"headstone" + DELIM: "🪦", + DELIM + r"funeral_urn" + DELIM: "âš±ï¸", + DELIM + r"nazar_amulet" + DELIM: "🧿", + DELIM + r"moyai" + DELIM: "🗿", + DELIM + r"placard" + DELIM: "🪧", # # Transport Sign # - DELIM + r'atm' + DELIM: 'ðŸ§', - DELIM + r'put_litter_in_its_place' + DELIM: '🚮', - DELIM + r'potable_water' + DELIM: '🚰', - DELIM + r'wheelchair' + DELIM: '♿', - DELIM + r'mens' + DELIM: '🚹', - DELIM + r'womens' + DELIM: '🚺', - DELIM + r'restroom' + DELIM: '🚻', - DELIM + r'baby_symbol' + DELIM: '🚼', - DELIM + r'wc' + DELIM: '🚾', - DELIM + r'passport_control' + DELIM: '🛂', - DELIM + r'customs' + DELIM: '🛃', - DELIM + r'baggage_claim' + DELIM: '🛄', - DELIM + r'left_luggage' + DELIM: '🛅', - + DELIM + r"atm" + DELIM: "ðŸ§", + DELIM + r"put_litter_in_its_place" + DELIM: "🚮", + DELIM + r"potable_water" + DELIM: "🚰", + DELIM + r"wheelchair" + DELIM: "♿", + DELIM + r"mens" + DELIM: "🚹", + DELIM + r"womens" + DELIM: "🚺", + DELIM + r"restroom" + DELIM: "🚻", + DELIM + r"baby_symbol" + DELIM: "🚼", + DELIM + r"wc" + DELIM: "🚾", + DELIM + r"passport_control" + DELIM: "🛂", + DELIM + r"customs" + DELIM: "🛃", + DELIM + r"baggage_claim" + DELIM: "🛄", + DELIM + r"left_luggage" + DELIM: "🛅", # # Warning # - DELIM + r'warning' + DELIM: 'âš ï¸', - DELIM + r'children_crossing' + DELIM: '🚸', - DELIM + r'no_entry' + DELIM: 'â›”', - DELIM + r'no_entry_sign' + DELIM: '🚫', - DELIM + r'no_bicycles' + DELIM: '🚳', - DELIM + r'no_smoking' + DELIM: '🚭', - DELIM + r'do_not_litter' + DELIM: '🚯', - DELIM + r'non-potable_water' + DELIM: '🚱', - DELIM + r'no_pedestrians' + DELIM: '🚷', - DELIM + r'no_mobile_phones' + DELIM: '📵', - DELIM + r'underage' + DELIM: '🔞', - DELIM + r'radioactive' + DELIM: '☢ï¸', - DELIM + r'biohazard' + DELIM: '☣ï¸', - + DELIM + r"warning" + DELIM: "âš ï¸", + DELIM + r"children_crossing" + DELIM: "🚸", + DELIM + r"no_entry" + DELIM: "â›”", + DELIM + r"no_entry_sign" + DELIM: "🚫", + DELIM + r"no_bicycles" + DELIM: "🚳", + DELIM + r"no_smoking" + DELIM: "🚭", + DELIM + r"do_not_litter" + DELIM: "🚯", + DELIM + r"non-potable_water" + DELIM: "🚱", + DELIM + r"no_pedestrians" + DELIM: "🚷", + DELIM + r"no_mobile_phones" + DELIM: "📵", + DELIM + r"underage" + DELIM: "🔞", + DELIM + r"radioactive" + DELIM: "☢ï¸", + DELIM + r"biohazard" + DELIM: "☣ï¸", # # Arrow # - DELIM + r'arrow_up' + DELIM: '⬆ï¸', - DELIM + r'arrow_upper_right' + DELIM: '↗ï¸', - DELIM + r'arrow_right' + DELIM: 'âž¡ï¸', - DELIM + r'arrow_lower_right' + DELIM: '↘ï¸', - DELIM + r'arrow_down' + DELIM: '⬇ï¸', - DELIM + r'arrow_lower_left' + DELIM: '↙ï¸', - DELIM + r'arrow_left' + DELIM: '⬅ï¸', - DELIM + r'arrow_upper_left' + DELIM: '↖ï¸', - DELIM + r'arrow_up_down' + DELIM: '↕ï¸', - DELIM + r'left_right_arrow' + DELIM: '↔ï¸', - DELIM + r'leftwards_arrow_with_hook' + DELIM: '↩ï¸', - DELIM + r'arrow_right_hook' + DELIM: '↪ï¸', - DELIM + r'arrow_heading_up' + DELIM: '⤴ï¸', - DELIM + r'arrow_heading_down' + DELIM: '⤵ï¸', - DELIM + r'arrows_clockwise' + DELIM: '🔃', - DELIM + r'arrows_counterclockwise' + DELIM: '🔄', - DELIM + r'back' + DELIM: '🔙', - DELIM + r'end' + DELIM: '🔚', - DELIM + r'on' + DELIM: '🔛', - DELIM + r'soon' + DELIM: '🔜', - DELIM + r'top' + DELIM: 'ðŸ”', - + DELIM + r"arrow_up" + DELIM: "⬆ï¸", + DELIM + r"arrow_upper_right" + DELIM: "↗ï¸", + DELIM + r"arrow_right" + DELIM: "âž¡ï¸", + DELIM + r"arrow_lower_right" + DELIM: "↘ï¸", + DELIM + r"arrow_down" + DELIM: "⬇ï¸", + DELIM + r"arrow_lower_left" + DELIM: "↙ï¸", + DELIM + r"arrow_left" + DELIM: "⬅ï¸", + DELIM + r"arrow_upper_left" + DELIM: "↖ï¸", + DELIM + r"arrow_up_down" + DELIM: "↕ï¸", + DELIM + r"left_right_arrow" + DELIM: "↔ï¸", + DELIM + r"leftwards_arrow_with_hook" + DELIM: "↩ï¸", + DELIM + r"arrow_right_hook" + DELIM: "↪ï¸", + DELIM + r"arrow_heading_up" + DELIM: "⤴ï¸", + DELIM + r"arrow_heading_down" + DELIM: "⤵ï¸", + DELIM + r"arrows_clockwise" + DELIM: "🔃", + DELIM + r"arrows_counterclockwise" + DELIM: "🔄", + DELIM + r"back" + DELIM: "🔙", + DELIM + r"end" + DELIM: "🔚", + DELIM + r"on" + DELIM: "🔛", + DELIM + r"soon" + DELIM: "🔜", + DELIM + r"top" + DELIM: "ðŸ”", # # Religion # - DELIM + r'place_of_worship' + DELIM: 'ðŸ›', - DELIM + r'atom_symbol' + DELIM: 'âš›ï¸', - DELIM + r'om' + DELIM: '🕉ï¸', - DELIM + r'star_of_david' + DELIM: '✡ï¸', - DELIM + r'wheel_of_dharma' + DELIM: '☸ï¸', - DELIM + r'yin_yang' + DELIM: '☯ï¸', - DELIM + r'latin_cross' + DELIM: 'âœï¸', - DELIM + r'orthodox_cross' + DELIM: '☦ï¸', - DELIM + r'star_and_crescent' + DELIM: '☪ï¸', - DELIM + r'peace_symbol' + DELIM: '☮ï¸', - DELIM + r'menorah' + DELIM: '🕎', - DELIM + r'six_pointed_star' + DELIM: '🔯', - + DELIM + r"place_of_worship" + DELIM: "ðŸ›", + DELIM + r"atom_symbol" + DELIM: "âš›ï¸", + DELIM + r"om" + DELIM: "🕉ï¸", + DELIM + r"star_of_david" + DELIM: "✡ï¸", + DELIM + r"wheel_of_dharma" + DELIM: "☸ï¸", + DELIM + r"yin_yang" + DELIM: "☯ï¸", + DELIM + r"latin_cross" + DELIM: "âœï¸", + DELIM + r"orthodox_cross" + DELIM: "☦ï¸", + DELIM + r"star_and_crescent" + DELIM: "☪ï¸", + DELIM + r"peace_symbol" + DELIM: "☮ï¸", + DELIM + r"menorah" + DELIM: "🕎", + DELIM + r"six_pointed_star" + DELIM: "🔯", # # Zodiac # - DELIM + r'aries' + DELIM: '♈', - DELIM + r'taurus' + DELIM: '♉', - DELIM + r'gemini' + DELIM: '♊', - DELIM + r'cancer' + DELIM: '♋', - DELIM + r'leo' + DELIM: '♌', - DELIM + r'virgo' + DELIM: 'â™', - DELIM + r'libra' + DELIM: '♎', - DELIM + r'scorpius' + DELIM: 'â™', - DELIM + r'sagittarius' + DELIM: 'â™', - DELIM + r'capricorn' + DELIM: '♑', - DELIM + r'aquarius' + DELIM: 'â™’', - DELIM + r'pisces' + DELIM: '♓', - DELIM + r'ophiuchus' + DELIM: '⛎', - + DELIM + r"aries" + DELIM: "♈", + DELIM + r"taurus" + DELIM: "♉", + DELIM + r"gemini" + DELIM: "♊", + DELIM + r"cancer" + DELIM: "♋", + DELIM + r"leo" + DELIM: "♌", + DELIM + r"virgo" + DELIM: "â™", + DELIM + r"libra" + DELIM: "♎", + DELIM + r"scorpius" + DELIM: "â™", + DELIM + r"sagittarius" + DELIM: "â™", + DELIM + r"capricorn" + DELIM: "♑", + DELIM + r"aquarius" + DELIM: "â™’", + DELIM + r"pisces" + DELIM: "♓", + DELIM + r"ophiuchus" + DELIM: "⛎", # # Av Symbol # - DELIM + r'twisted_rightwards_arrows' + DELIM: '🔀', - DELIM + r'repeat' + DELIM: 'ðŸ”', - DELIM + r'repeat_one' + DELIM: '🔂', - DELIM + r'arrow_forward' + DELIM: 'â–¶ï¸', - DELIM + r'fast_forward' + DELIM: 'â©', - DELIM + r'next_track_button' + DELIM: 'â­ï¸', - DELIM + r'play_or_pause_button' + DELIM: 'â¯ï¸', - DELIM + r'arrow_backward' + DELIM: 'â—€ï¸', - DELIM + r'rewind' + DELIM: 'âª', - DELIM + r'previous_track_button' + DELIM: 'â®ï¸', - DELIM + r'arrow_up_small' + DELIM: '🔼', - DELIM + r'arrow_double_up' + DELIM: 'â«', - DELIM + r'arrow_down_small' + DELIM: '🔽', - DELIM + r'arrow_double_down' + DELIM: 'â¬', - DELIM + r'pause_button' + DELIM: 'â¸ï¸', - DELIM + r'stop_button' + DELIM: 'â¹ï¸', - DELIM + r'record_button' + DELIM: 'âºï¸', - DELIM + r'eject_button' + DELIM: 'âï¸', - DELIM + r'cinema' + DELIM: '🎦', - DELIM + r'low_brightness' + DELIM: '🔅', - DELIM + r'high_brightness' + DELIM: '🔆', - DELIM + r'signal_strength' + DELIM: '📶', - DELIM + r'vibration_mode' + DELIM: '📳', - DELIM + r'mobile_phone_off' + DELIM: '📴', - + DELIM + r"twisted_rightwards_arrows" + DELIM: "🔀", + DELIM + r"repeat" + DELIM: "ðŸ”", + DELIM + r"repeat_one" + DELIM: "🔂", + DELIM + r"arrow_forward" + DELIM: "â–¶ï¸", + DELIM + r"fast_forward" + DELIM: "â©", + DELIM + r"next_track_button" + DELIM: "â­ï¸", + DELIM + r"play_or_pause_button" + DELIM: "â¯ï¸", + DELIM + r"arrow_backward" + DELIM: "â—€ï¸", + DELIM + r"rewind" + DELIM: "âª", + DELIM + r"previous_track_button" + DELIM: "â®ï¸", + DELIM + r"arrow_up_small" + DELIM: "🔼", + DELIM + r"arrow_double_up" + DELIM: "â«", + DELIM + r"arrow_down_small" + DELIM: "🔽", + DELIM + r"arrow_double_down" + DELIM: "â¬", + DELIM + r"pause_button" + DELIM: "â¸ï¸", + DELIM + r"stop_button" + DELIM: "â¹ï¸", + DELIM + r"record_button" + DELIM: "âºï¸", + DELIM + r"eject_button" + DELIM: "âï¸", + DELIM + r"cinema" + DELIM: "🎦", + DELIM + r"low_brightness" + DELIM: "🔅", + DELIM + r"high_brightness" + DELIM: "🔆", + DELIM + r"signal_strength" + DELIM: "📶", + DELIM + r"vibration_mode" + DELIM: "📳", + DELIM + r"mobile_phone_off" + DELIM: "📴", # # Gender # - DELIM + r'female_sign' + DELIM: '♀ï¸', - DELIM + r'male_sign' + DELIM: '♂ï¸', - DELIM + r'transgender_symbol' + DELIM: 'âš§ï¸', - + DELIM + r"female_sign" + DELIM: "♀ï¸", + DELIM + r"male_sign" + DELIM: "♂ï¸", + DELIM + r"transgender_symbol" + DELIM: "âš§ï¸", # # Math # - DELIM + r'heavy_multiplication_x' + DELIM: '✖ï¸', - DELIM + r'heavy_plus_sign' + DELIM: 'âž•', - DELIM + r'heavy_minus_sign' + DELIM: 'âž–', - DELIM + r'heavy_division_sign' + DELIM: 'âž—', - DELIM + r'infinity' + DELIM: '♾ï¸', - + DELIM + r"heavy_multiplication_x" + DELIM: "✖ï¸", + DELIM + r"heavy_plus_sign" + DELIM: "âž•", # noqa: RUF001 + DELIM + r"heavy_minus_sign" + DELIM: "âž–", # noqa: RUF001 + DELIM + r"heavy_division_sign" + DELIM: "âž—", + DELIM + r"infinity" + DELIM: "♾ï¸", # # Punctuation # - DELIM + r'bangbang' + DELIM: '‼ï¸', - DELIM + r'interrobang' + DELIM: 'â‰ï¸', - DELIM + r'question' + DELIM: 'â“', - DELIM + r'grey_question' + DELIM: 'â”', - DELIM + r'grey_exclamation' + DELIM: 'â•', - DELIM + r'(heavy_exclamation_mark|exclamation)' + DELIM: 'â—', - DELIM + r'wavy_dash' + DELIM: '〰ï¸', - + DELIM + r"bangbang" + DELIM: "‼ï¸", + DELIM + r"interrobang" + DELIM: "â‰ï¸", + DELIM + r"question" + DELIM: "â“", + DELIM + r"grey_question" + DELIM: "â”", + DELIM + r"grey_exclamation" + DELIM: "â•", + DELIM + r"(heavy_exclamation_mark|exclamation)" + DELIM: "â—", + DELIM + r"wavy_dash" + DELIM: "〰ï¸", # # Currency # - DELIM + r'currency_exchange' + DELIM: '💱', - DELIM + r'heavy_dollar_sign' + DELIM: '💲', - + DELIM + r"currency_exchange" + DELIM: "💱", + DELIM + r"heavy_dollar_sign" + DELIM: "💲", # # Other Symbol # - DELIM + r'medical_symbol' + DELIM: 'âš•ï¸', - DELIM + r'recycle' + DELIM: 'â™»ï¸', - DELIM + r'fleur_de_lis' + DELIM: 'âšœï¸', - DELIM + r'trident' + DELIM: '🔱', - DELIM + r'name_badge' + DELIM: '📛', - DELIM + r'beginner' + DELIM: '🔰', - DELIM + r'o' + DELIM: 'â­•', - DELIM + r'white_check_mark' + DELIM: '✅', - DELIM + r'ballot_box_with_check' + DELIM: '☑ï¸', - DELIM + r'heavy_check_mark' + DELIM: '✔ï¸', - DELIM + r'x' + DELIM: 'âŒ', - DELIM + r'negative_squared_cross_mark' + DELIM: 'âŽ', - DELIM + r'curly_loop' + DELIM: 'âž°', - DELIM + r'loop' + DELIM: 'âž¿', - DELIM + r'part_alternation_mark' + DELIM: '〽ï¸', - DELIM + r'eight_spoked_asterisk' + DELIM: '✳ï¸', - DELIM + r'eight_pointed_black_star' + DELIM: '✴ï¸', - DELIM + r'sparkle' + DELIM: 'â‡ï¸', - DELIM + r'copyright' + DELIM: '©ï¸', - DELIM + r'registered' + DELIM: '®ï¸', - DELIM + r'tm' + DELIM: 'â„¢ï¸', - + DELIM + r"medical_symbol" + DELIM: "âš•ï¸", + DELIM + r"recycle" + DELIM: "â™»ï¸", + DELIM + r"fleur_de_lis" + DELIM: "âšœï¸", + DELIM + r"trident" + DELIM: "🔱", + DELIM + r"name_badge" + DELIM: "📛", + DELIM + r"beginner" + DELIM: "🔰", + DELIM + r"o" + DELIM: "â­•", + DELIM + r"white_check_mark" + DELIM: "✅", + DELIM + r"ballot_box_with_check" + DELIM: "☑ï¸", + DELIM + r"heavy_check_mark" + DELIM: "✔ï¸", + DELIM + r"x" + DELIM: "âŒ", + DELIM + r"negative_squared_cross_mark" + DELIM: "âŽ", + DELIM + r"curly_loop" + DELIM: "âž°", + DELIM + r"loop" + DELIM: "âž¿", + DELIM + r"part_alternation_mark" + DELIM: "〽ï¸", + DELIM + r"eight_spoked_asterisk" + DELIM: "✳ï¸", + DELIM + r"eight_pointed_black_star" + DELIM: "✴ï¸", + DELIM + r"sparkle" + DELIM: "â‡ï¸", + DELIM + r"copyright" + DELIM: "©ï¸", + DELIM + r"registered" + DELIM: "®ï¸", + DELIM + r"tm" + DELIM: "â„¢ï¸", # # Keycap # - DELIM + r'hash' + DELIM: '#ï¸âƒ£', - DELIM + r'asterisk' + DELIM: '*ï¸âƒ£', - DELIM + r'zero' + DELIM: '0ï¸âƒ£', - DELIM + r'one' + DELIM: '1ï¸âƒ£', - DELIM + r'two' + DELIM: '2ï¸âƒ£', - DELIM + r'three' + DELIM: '3ï¸âƒ£', - DELIM + r'four' + DELIM: '4ï¸âƒ£', - DELIM + r'five' + DELIM: '5ï¸âƒ£', - DELIM + r'six' + DELIM: '6ï¸âƒ£', - DELIM + r'seven' + DELIM: '7ï¸âƒ£', - DELIM + r'eight' + DELIM: '8ï¸âƒ£', - DELIM + r'nine' + DELIM: '9ï¸âƒ£', - DELIM + r'keycap_ten' + DELIM: '🔟', - + DELIM + r"hash" + DELIM: "#ï¸âƒ£", + DELIM + r"asterisk" + DELIM: "*ï¸âƒ£", + DELIM + r"zero" + DELIM: "0ï¸âƒ£", + DELIM + r"one" + DELIM: "1ï¸âƒ£", + DELIM + r"two" + DELIM: "2ï¸âƒ£", + DELIM + r"three" + DELIM: "3ï¸âƒ£", + DELIM + r"four" + DELIM: "4ï¸âƒ£", + DELIM + r"five" + DELIM: "5ï¸âƒ£", + DELIM + r"six" + DELIM: "6ï¸âƒ£", + DELIM + r"seven" + DELIM: "7ï¸âƒ£", + DELIM + r"eight" + DELIM: "8ï¸âƒ£", + DELIM + r"nine" + DELIM: "9ï¸âƒ£", + DELIM + r"keycap_ten" + DELIM: "🔟", # # Alphanum # - DELIM + r'capital_abcd' + DELIM: '🔠', - DELIM + r'abcd' + DELIM: '🔡', - DELIM + r'1234' + DELIM: '🔢', - DELIM + r'symbols' + DELIM: '🔣', - DELIM + r'abc' + DELIM: '🔤', - DELIM + r'a' + DELIM: '🅰ï¸', - DELIM + r'ab' + DELIM: '🆎', - DELIM + r'b' + DELIM: '🅱ï¸', - DELIM + r'cl' + DELIM: '🆑', - DELIM + r'cool' + DELIM: '🆒', - DELIM + r'free' + DELIM: '🆓', - DELIM + r'information_source' + DELIM: 'ℹï¸', - DELIM + r'id' + DELIM: '🆔', - DELIM + r'm' + DELIM: 'â“‚ï¸', - DELIM + r'new' + DELIM: '🆕', - DELIM + r'ng' + DELIM: '🆖', - DELIM + r'o2' + DELIM: '🅾ï¸', - DELIM + r'ok' + DELIM: '🆗', - DELIM + r'parking' + DELIM: '🅿ï¸', - DELIM + r'sos' + DELIM: '🆘', - DELIM + r'up' + DELIM: '🆙', - DELIM + r'vs' + DELIM: '🆚', - DELIM + r'koko' + DELIM: 'ðŸˆ', - DELIM + r'sa' + DELIM: '🈂ï¸', - DELIM + r'u6708' + DELIM: '🈷ï¸', - DELIM + r'u6709' + DELIM: '🈶', - DELIM + r'u6307' + DELIM: '🈯', - DELIM + r'ideograph_advantage' + DELIM: 'ðŸ‰', - DELIM + r'u5272' + DELIM: '🈹', - DELIM + r'u7121' + DELIM: '🈚', - DELIM + r'u7981' + DELIM: '🈲', - DELIM + r'accept' + DELIM: '🉑', - DELIM + r'u7533' + DELIM: '🈸', - DELIM + r'u5408' + DELIM: '🈴', - DELIM + r'u7a7a' + DELIM: '🈳', - DELIM + r'congratulations' + DELIM: '㊗ï¸', - DELIM + r'secret' + DELIM: '㊙ï¸', - DELIM + r'u55b6' + DELIM: '🈺', - DELIM + r'u6e80' + DELIM: '🈵', - + DELIM + r"capital_abcd" + DELIM: "🔠", + DELIM + r"abcd" + DELIM: "🔡", + DELIM + r"1234" + DELIM: "🔢", + DELIM + r"symbols" + DELIM: "🔣", + DELIM + r"abc" + DELIM: "🔤", + DELIM + r"a" + DELIM: "🅰ï¸", + DELIM + r"ab" + DELIM: "🆎", + DELIM + r"b" + DELIM: "🅱ï¸", + DELIM + r"cl" + DELIM: "🆑", + DELIM + r"cool" + DELIM: "🆒", + DELIM + r"free" + DELIM: "🆓", + DELIM + r"information_source" + DELIM: "ℹï¸", # noqa: RUF001 + DELIM + r"id" + DELIM: "🆔", + DELIM + r"m" + DELIM: "â“‚ï¸", + DELIM + r"new" + DELIM: "🆕", + DELIM + r"ng" + DELIM: "🆖", + DELIM + r"o2" + DELIM: "🅾ï¸", + DELIM + r"ok" + DELIM: "🆗", + DELIM + r"parking" + DELIM: "🅿ï¸", + DELIM + r"sos" + DELIM: "🆘", + DELIM + r"up" + DELIM: "🆙", + DELIM + r"vs" + DELIM: "🆚", + DELIM + r"koko" + DELIM: "ðŸˆ", + DELIM + r"sa" + DELIM: "🈂ï¸", + DELIM + r"u6708" + DELIM: "🈷ï¸", + DELIM + r"u6709" + DELIM: "🈶", + DELIM + r"u6307" + DELIM: "🈯", + DELIM + r"ideograph_advantage" + DELIM: "ðŸ‰", + DELIM + r"u5272" + DELIM: "🈹", + DELIM + r"u7121" + DELIM: "🈚", + DELIM + r"u7981" + DELIM: "🈲", + DELIM + r"accept" + DELIM: "🉑", + DELIM + r"u7533" + DELIM: "🈸", + DELIM + r"u5408" + DELIM: "🈴", + DELIM + r"u7a7a" + DELIM: "🈳", + DELIM + r"congratulations" + DELIM: "㊗ï¸", + DELIM + r"secret" + DELIM: "㊙ï¸", + DELIM + r"u55b6" + DELIM: "🈺", + DELIM + r"u6e80" + DELIM: "🈵", # # Geometric # - DELIM + r'red_circle' + DELIM: '🔴', - DELIM + r'orange_circle' + DELIM: '🟠', - DELIM + r'yellow_circle' + DELIM: '🟡', - DELIM + r'green_circle' + DELIM: '🟢', - DELIM + r'large_blue_circle' + DELIM: '🔵', - DELIM + r'purple_circle' + DELIM: '🟣', - DELIM + r'brown_circle' + DELIM: '🟤', - DELIM + r'black_circle' + DELIM: 'âš«', - DELIM + r'white_circle' + DELIM: '⚪', - DELIM + r'red_square' + DELIM: '🟥', - DELIM + r'orange_square' + DELIM: '🟧', - DELIM + r'yellow_square' + DELIM: '🟨', - DELIM + r'green_square' + DELIM: '🟩', - DELIM + r'blue_square' + DELIM: '🟦', - DELIM + r'purple_square' + DELIM: '🟪', - DELIM + r'brown_square' + DELIM: '🟫', - DELIM + r'black_large_square' + DELIM: '⬛', - DELIM + r'white_large_square' + DELIM: '⬜', - DELIM + r'black_medium_square' + DELIM: 'â—¼ï¸', - DELIM + r'white_medium_square' + DELIM: 'â—»ï¸', - DELIM + r'black_medium_small_square' + DELIM: 'â—¾', - DELIM + r'white_medium_small_square' + DELIM: 'â—½', - DELIM + r'black_small_square' + DELIM: 'â–ªï¸', - DELIM + r'white_small_square' + DELIM: 'â–«ï¸', - DELIM + r'large_orange_diamond' + DELIM: '🔶', - DELIM + r'large_blue_diamond' + DELIM: '🔷', - DELIM + r'small_orange_diamond' + DELIM: '🔸', - DELIM + r'small_blue_diamond' + DELIM: '🔹', - DELIM + r'small_red_triangle' + DELIM: '🔺', - DELIM + r'small_red_triangle_down' + DELIM: '🔻', - DELIM + r'diamond_shape_with_a_dot_inside' + DELIM: '💠', - DELIM + r'radio_button' + DELIM: '🔘', - DELIM + r'white_square_button' + DELIM: '🔳', - DELIM + r'black_square_button' + DELIM: '🔲', - + DELIM + r"red_circle" + DELIM: "🔴", + DELIM + r"orange_circle" + DELIM: "🟠", + DELIM + r"yellow_circle" + DELIM: "🟡", + DELIM + r"green_circle" + DELIM: "🟢", + DELIM + r"large_blue_circle" + DELIM: "🔵", + DELIM + r"purple_circle" + DELIM: "🟣", + DELIM + r"brown_circle" + DELIM: "🟤", + DELIM + r"black_circle" + DELIM: "âš«", + DELIM + r"white_circle" + DELIM: "⚪", + DELIM + r"red_square" + DELIM: "🟥", + DELIM + r"orange_square" + DELIM: "🟧", + DELIM + r"yellow_square" + DELIM: "🟨", + DELIM + r"green_square" + DELIM: "🟩", + DELIM + r"blue_square" + DELIM: "🟦", + DELIM + r"purple_square" + DELIM: "🟪", + DELIM + r"brown_square" + DELIM: "🟫", + DELIM + r"black_large_square" + DELIM: "⬛", + DELIM + r"white_large_square" + DELIM: "⬜", + DELIM + r"black_medium_square" + DELIM: "â—¼ï¸", + DELIM + r"white_medium_square" + DELIM: "â—»ï¸", + DELIM + r"black_medium_small_square" + DELIM: "â—¾", + DELIM + r"white_medium_small_square" + DELIM: "â—½", + DELIM + r"black_small_square" + DELIM: "â–ªï¸", + DELIM + r"white_small_square" + DELIM: "â–«ï¸", + DELIM + r"large_orange_diamond" + DELIM: "🔶", + DELIM + r"large_blue_diamond" + DELIM: "🔷", + DELIM + r"small_orange_diamond" + DELIM: "🔸", + DELIM + r"small_blue_diamond" + DELIM: "🔹", + DELIM + r"small_red_triangle" + DELIM: "🔺", + DELIM + r"small_red_triangle_down" + DELIM: "🔻", + DELIM + r"diamond_shape_with_a_dot_inside" + DELIM: "💠", + DELIM + r"radio_button" + DELIM: "🔘", + DELIM + r"white_square_button" + DELIM: "🔳", + DELIM + r"black_square_button" + DELIM: "🔲", # # Flag # - DELIM + r'checkered_flag' + DELIM: 'ðŸ', - DELIM + r'triangular_flag_on_post' + DELIM: '🚩', - DELIM + r'crossed_flags' + DELIM: '🎌', - DELIM + r'black_flag' + DELIM: 'ðŸ´', - DELIM + r'white_flag' + DELIM: 'ðŸ³ï¸', - DELIM + r'rainbow_flag' + DELIM: 'ðŸ³ï¸â€ðŸŒˆ', - DELIM + r'transgender_flag' + DELIM: 'ðŸ³ï¸â€âš§ï¸', - DELIM + r'pirate_flag' + DELIM: 'ðŸ´â€â˜ ï¸', - + DELIM + r"checkered_flag" + DELIM: "ðŸ", + DELIM + r"triangular_flag_on_post" + DELIM: "🚩", + DELIM + r"crossed_flags" + DELIM: "🎌", + DELIM + r"black_flag" + DELIM: "ðŸ´", + DELIM + r"white_flag" + DELIM: "ðŸ³ï¸", + DELIM + r"rainbow_flag" + DELIM: "ðŸ³ï¸â€ðŸŒˆ", + DELIM + r"transgender_flag" + DELIM: "ðŸ³ï¸â€âš§ï¸", + DELIM + r"pirate_flag" + DELIM: "ðŸ´â€â˜ ï¸", # # Country Flag # - DELIM + r'ascension_island' + DELIM: '🇦🇨', - DELIM + r'andorra' + DELIM: '🇦🇩', - DELIM + r'united_arab_emirates' + DELIM: '🇦🇪', - DELIM + r'afghanistan' + DELIM: '🇦🇫', - DELIM + r'antigua_barbuda' + DELIM: '🇦🇬', - DELIM + r'anguilla' + DELIM: '🇦🇮', - DELIM + r'albania' + DELIM: '🇦🇱', - DELIM + r'armenia' + DELIM: '🇦🇲', - DELIM + r'angola' + DELIM: '🇦🇴', - DELIM + r'antarctica' + DELIM: '🇦🇶', - DELIM + r'argentina' + DELIM: '🇦🇷', - DELIM + r'american_samoa' + DELIM: '🇦🇸', - DELIM + r'austria' + DELIM: '🇦🇹', - DELIM + r'australia' + DELIM: '🇦🇺', - DELIM + r'aruba' + DELIM: '🇦🇼', - DELIM + r'aland_islands' + DELIM: '🇦🇽', - DELIM + r'azerbaijan' + DELIM: '🇦🇿', - DELIM + r'bosnia_herzegovina' + DELIM: '🇧🇦', - DELIM + r'barbados' + DELIM: '🇧🇧', - DELIM + r'bangladesh' + DELIM: '🇧🇩', - DELIM + r'belgium' + DELIM: '🇧🇪', - DELIM + r'burkina_faso' + DELIM: '🇧🇫', - DELIM + r'bulgaria' + DELIM: '🇧🇬', - DELIM + r'bahrain' + DELIM: '🇧🇭', - DELIM + r'burundi' + DELIM: '🇧🇮', - DELIM + r'benin' + DELIM: '🇧🇯', - DELIM + r'st_barthelemy' + DELIM: '🇧🇱', - DELIM + r'bermuda' + DELIM: '🇧🇲', - DELIM + r'brunei' + DELIM: '🇧🇳', - DELIM + r'bolivia' + DELIM: '🇧🇴', - DELIM + r'caribbean_netherlands' + DELIM: '🇧🇶', - DELIM + r'brazil' + DELIM: '🇧🇷', - DELIM + r'bahamas' + DELIM: '🇧🇸', - DELIM + r'bhutan' + DELIM: '🇧🇹', - DELIM + r'bouvet_island' + DELIM: '🇧🇻', - DELIM + r'botswana' + DELIM: '🇧🇼', - DELIM + r'belarus' + DELIM: '🇧🇾', - DELIM + r'belize' + DELIM: '🇧🇿', - DELIM + r'canada' + DELIM: '🇨🇦', - DELIM + r'cocos_islands' + DELIM: '🇨🇨', - DELIM + r'congo_kinshasa' + DELIM: '🇨🇩', - DELIM + r'central_african_republic' + DELIM: '🇨🇫', - DELIM + r'congo_brazzaville' + DELIM: '🇨🇬', - DELIM + r'switzerland' + DELIM: '🇨🇭', - DELIM + r'cote_divoire' + DELIM: '🇨🇮', - DELIM + r'cook_islands' + DELIM: '🇨🇰', - DELIM + r'chile' + DELIM: '🇨🇱', - DELIM + r'cameroon' + DELIM: '🇨🇲', - DELIM + r'cn' + DELIM: '🇨🇳', - DELIM + r'colombia' + DELIM: '🇨🇴', - DELIM + r'clipperton_island' + DELIM: '🇨🇵', - DELIM + r'costa_rica' + DELIM: '🇨🇷', - DELIM + r'cuba' + DELIM: '🇨🇺', - DELIM + r'cape_verde' + DELIM: '🇨🇻', - DELIM + r'curacao' + DELIM: '🇨🇼', - DELIM + r'christmas_island' + DELIM: '🇨🇽', - DELIM + r'cyprus' + DELIM: '🇨🇾', - DELIM + r'czech_republic' + DELIM: '🇨🇿', - DELIM + r'de' + DELIM: '🇩🇪', - DELIM + r'diego_garcia' + DELIM: '🇩🇬', - DELIM + r'djibouti' + DELIM: '🇩🇯', - DELIM + r'denmark' + DELIM: '🇩🇰', - DELIM + r'dominica' + DELIM: '🇩🇲', - DELIM + r'dominican_republic' + DELIM: '🇩🇴', - DELIM + r'algeria' + DELIM: '🇩🇿', - DELIM + r'ceuta_melilla' + DELIM: '🇪🇦', - DELIM + r'ecuador' + DELIM: '🇪🇨', - DELIM + r'estonia' + DELIM: '🇪🇪', - DELIM + r'egypt' + DELIM: '🇪🇬', - DELIM + r'western_sahara' + DELIM: '🇪🇭', - DELIM + r'eritrea' + DELIM: '🇪🇷', - DELIM + r'es' + DELIM: '🇪🇸', - DELIM + r'ethiopia' + DELIM: '🇪🇹', - DELIM + r'(eu|european_union)' + DELIM: '🇪🇺', - DELIM + r'finland' + DELIM: '🇫🇮', - DELIM + r'fiji' + DELIM: '🇫🇯', - DELIM + r'falkland_islands' + DELIM: '🇫🇰', - DELIM + r'micronesia' + DELIM: '🇫🇲', - DELIM + r'faroe_islands' + DELIM: '🇫🇴', - DELIM + r'fr' + DELIM: '🇫🇷', - DELIM + r'gabon' + DELIM: '🇬🇦', - DELIM + r'(uk|gb)' + DELIM: '🇬🇧', - DELIM + r'grenada' + DELIM: '🇬🇩', - DELIM + r'georgia' + DELIM: '🇬🇪', - DELIM + r'french_guiana' + DELIM: '🇬🇫', - DELIM + r'guernsey' + DELIM: '🇬🇬', - DELIM + r'ghana' + DELIM: '🇬🇭', - DELIM + r'gibraltar' + DELIM: '🇬🇮', - DELIM + r'greenland' + DELIM: '🇬🇱', - DELIM + r'gambia' + DELIM: '🇬🇲', - DELIM + r'guinea' + DELIM: '🇬🇳', - DELIM + r'guadeloupe' + DELIM: '🇬🇵', - DELIM + r'equatorial_guinea' + DELIM: '🇬🇶', - DELIM + r'greece' + DELIM: '🇬🇷', - DELIM + r'south_georgia_south_sandwich_islands' + DELIM: '🇬🇸', - DELIM + r'guatemala' + DELIM: '🇬🇹', - DELIM + r'guam' + DELIM: '🇬🇺', - DELIM + r'guinea_bissau' + DELIM: '🇬🇼', - DELIM + r'guyana' + DELIM: '🇬🇾', - DELIM + r'hong_kong' + DELIM: '🇭🇰', - DELIM + r'heard_mcdonald_islands' + DELIM: '🇭🇲', - DELIM + r'honduras' + DELIM: '🇭🇳', - DELIM + r'croatia' + DELIM: '🇭🇷', - DELIM + r'haiti' + DELIM: '🇭🇹', - DELIM + r'hungary' + DELIM: '🇭🇺', - DELIM + r'canary_islands' + DELIM: '🇮🇨', - DELIM + r'indonesia' + DELIM: '🇮🇩', - DELIM + r'ireland' + DELIM: '🇮🇪', - DELIM + r'israel' + DELIM: '🇮🇱', - DELIM + r'isle_of_man' + DELIM: '🇮🇲', - DELIM + r'india' + DELIM: '🇮🇳', - DELIM + r'british_indian_ocean_territory' + DELIM: '🇮🇴', - DELIM + r'iraq' + DELIM: '🇮🇶', - DELIM + r'iran' + DELIM: '🇮🇷', - DELIM + r'iceland' + DELIM: '🇮🇸', - DELIM + r'it' + DELIM: '🇮🇹', - DELIM + r'jersey' + DELIM: '🇯🇪', - DELIM + r'jamaica' + DELIM: '🇯🇲', - DELIM + r'jordan' + DELIM: '🇯🇴', - DELIM + r'jp' + DELIM: '🇯🇵', - DELIM + r'kenya' + DELIM: '🇰🇪', - DELIM + r'kyrgyzstan' + DELIM: '🇰🇬', - DELIM + r'cambodia' + DELIM: '🇰🇭', - DELIM + r'kiribati' + DELIM: '🇰🇮', - DELIM + r'comoros' + DELIM: '🇰🇲', - DELIM + r'st_kitts_nevis' + DELIM: '🇰🇳', - DELIM + r'north_korea' + DELIM: '🇰🇵', - DELIM + r'kr' + DELIM: '🇰🇷', - DELIM + r'kuwait' + DELIM: '🇰🇼', - DELIM + r'cayman_islands' + DELIM: '🇰🇾', - DELIM + r'kazakhstan' + DELIM: '🇰🇿', - DELIM + r'laos' + DELIM: '🇱🇦', - DELIM + r'lebanon' + DELIM: '🇱🇧', - DELIM + r'st_lucia' + DELIM: '🇱🇨', - DELIM + r'liechtenstein' + DELIM: '🇱🇮', - DELIM + r'sri_lanka' + DELIM: '🇱🇰', - DELIM + r'liberia' + DELIM: '🇱🇷', - DELIM + r'lesotho' + DELIM: '🇱🇸', - DELIM + r'lithuania' + DELIM: '🇱🇹', - DELIM + r'luxembourg' + DELIM: '🇱🇺', - DELIM + r'latvia' + DELIM: '🇱🇻', - DELIM + r'libya' + DELIM: '🇱🇾', - DELIM + r'morocco' + DELIM: '🇲🇦', - DELIM + r'monaco' + DELIM: '🇲🇨', - DELIM + r'moldova' + DELIM: '🇲🇩', - DELIM + r'montenegro' + DELIM: '🇲🇪', - DELIM + r'st_martin' + DELIM: '🇲🇫', - DELIM + r'madagascar' + DELIM: '🇲🇬', - DELIM + r'marshall_islands' + DELIM: '🇲🇭', - DELIM + r'macedonia' + DELIM: '🇲🇰', - DELIM + r'mali' + DELIM: '🇲🇱', - DELIM + r'myanmar' + DELIM: '🇲🇲', - DELIM + r'mongolia' + DELIM: '🇲🇳', - DELIM + r'macau' + DELIM: '🇲🇴', - DELIM + r'northern_mariana_islands' + DELIM: '🇲🇵', - DELIM + r'martinique' + DELIM: '🇲🇶', - DELIM + r'mauritania' + DELIM: '🇲🇷', - DELIM + r'montserrat' + DELIM: '🇲🇸', - DELIM + r'malta' + DELIM: '🇲🇹', - DELIM + r'mauritius' + DELIM: '🇲🇺', - DELIM + r'maldives' + DELIM: '🇲🇻', - DELIM + r'malawi' + DELIM: '🇲🇼', - DELIM + r'mexico' + DELIM: '🇲🇽', - DELIM + r'malaysia' + DELIM: '🇲🇾', - DELIM + r'mozambique' + DELIM: '🇲🇿', - DELIM + r'namibia' + DELIM: '🇳🇦', - DELIM + r'new_caledonia' + DELIM: '🇳🇨', - DELIM + r'niger' + DELIM: '🇳🇪', - DELIM + r'norfolk_island' + DELIM: '🇳🇫', - DELIM + r'nigeria' + DELIM: '🇳🇬', - DELIM + r'nicaragua' + DELIM: '🇳🇮', - DELIM + r'netherlands' + DELIM: '🇳🇱', - DELIM + r'norway' + DELIM: '🇳🇴', - DELIM + r'nepal' + DELIM: '🇳🇵', - DELIM + r'nauru' + DELIM: '🇳🇷', - DELIM + r'niue' + DELIM: '🇳🇺', - DELIM + r'new_zealand' + DELIM: '🇳🇿', - DELIM + r'oman' + DELIM: '🇴🇲', - DELIM + r'panama' + DELIM: '🇵🇦', - DELIM + r'peru' + DELIM: '🇵🇪', - DELIM + r'french_polynesia' + DELIM: '🇵🇫', - DELIM + r'papua_new_guinea' + DELIM: '🇵🇬', - DELIM + r'philippines' + DELIM: '🇵🇭', - DELIM + r'pakistan' + DELIM: '🇵🇰', - DELIM + r'poland' + DELIM: '🇵🇱', - DELIM + r'st_pierre_miquelon' + DELIM: '🇵🇲', - DELIM + r'pitcairn_islands' + DELIM: '🇵🇳', - DELIM + r'puerto_rico' + DELIM: '🇵🇷', - DELIM + r'palestinian_territories' + DELIM: '🇵🇸', - DELIM + r'portugal' + DELIM: '🇵🇹', - DELIM + r'palau' + DELIM: '🇵🇼', - DELIM + r'paraguay' + DELIM: '🇵🇾', - DELIM + r'qatar' + DELIM: '🇶🇦', - DELIM + r'reunion' + DELIM: '🇷🇪', - DELIM + r'romania' + DELIM: '🇷🇴', - DELIM + r'serbia' + DELIM: '🇷🇸', - DELIM + r'ru' + DELIM: '🇷🇺', - DELIM + r'rwanda' + DELIM: '🇷🇼', - DELIM + r'saudi_arabia' + DELIM: '🇸🇦', - DELIM + r'solomon_islands' + DELIM: '🇸🇧', - DELIM + r'seychelles' + DELIM: '🇸🇨', - DELIM + r'sudan' + DELIM: '🇸🇩', - DELIM + r'sweden' + DELIM: '🇸🇪', - DELIM + r'singapore' + DELIM: '🇸🇬', - DELIM + r'st_helena' + DELIM: '🇸🇭', - DELIM + r'slovenia' + DELIM: '🇸🇮', - DELIM + r'svalbard_jan_mayen' + DELIM: '🇸🇯', - DELIM + r'slovakia' + DELIM: '🇸🇰', - DELIM + r'sierra_leone' + DELIM: '🇸🇱', - DELIM + r'san_marino' + DELIM: '🇸🇲', - DELIM + r'senegal' + DELIM: '🇸🇳', - DELIM + r'somalia' + DELIM: '🇸🇴', - DELIM + r'suriname' + DELIM: '🇸🇷', - DELIM + r'south_sudan' + DELIM: '🇸🇸', - DELIM + r'sao_tome_principe' + DELIM: '🇸🇹', - DELIM + r'el_salvador' + DELIM: '🇸🇻', - DELIM + r'sint_maarten' + DELIM: '🇸🇽', - DELIM + r'syria' + DELIM: '🇸🇾', - DELIM + r'swaziland' + DELIM: '🇸🇿', - DELIM + r'tristan_da_cunha' + DELIM: '🇹🇦', - DELIM + r'turks_caicos_islands' + DELIM: '🇹🇨', - DELIM + r'chad' + DELIM: '🇹🇩', - DELIM + r'french_southern_territories' + DELIM: '🇹🇫', - DELIM + r'togo' + DELIM: '🇹🇬', - DELIM + r'thailand' + DELIM: '🇹🇭', - DELIM + r'tajikistan' + DELIM: '🇹🇯', - DELIM + r'tokelau' + DELIM: '🇹🇰', - DELIM + r'timor_leste' + DELIM: '🇹🇱', - DELIM + r'turkmenistan' + DELIM: '🇹🇲', - DELIM + r'tunisia' + DELIM: '🇹🇳', - DELIM + r'tonga' + DELIM: '🇹🇴', - DELIM + r'tr' + DELIM: '🇹🇷', - DELIM + r'trinidad_tobago' + DELIM: '🇹🇹', - DELIM + r'tuvalu' + DELIM: '🇹🇻', - DELIM + r'taiwan' + DELIM: '🇹🇼', - DELIM + r'tanzania' + DELIM: '🇹🇿', - DELIM + r'ukraine' + DELIM: '🇺🇦', - DELIM + r'uganda' + DELIM: '🇺🇬', - DELIM + r'us_outlying_islands' + DELIM: '🇺🇲', - DELIM + r'united_nations' + DELIM: '🇺🇳', - DELIM + r'us' + DELIM: '🇺🇸', - DELIM + r'uruguay' + DELIM: '🇺🇾', - DELIM + r'uzbekistan' + DELIM: '🇺🇿', - DELIM + r'vatican_city' + DELIM: '🇻🇦', - DELIM + r'st_vincent_grenadines' + DELIM: '🇻🇨', - DELIM + r'venezuela' + DELIM: '🇻🇪', - DELIM + r'british_virgin_islands' + DELIM: '🇻🇬', - DELIM + r'us_virgin_islands' + DELIM: '🇻🇮', - DELIM + r'vietnam' + DELIM: '🇻🇳', - DELIM + r'vanuatu' + DELIM: '🇻🇺', - DELIM + r'wallis_futuna' + DELIM: '🇼🇫', - DELIM + r'samoa' + DELIM: '🇼🇸', - DELIM + r'kosovo' + DELIM: '🇽🇰', - DELIM + r'yemen' + DELIM: '🇾🇪', - DELIM + r'mayotte' + DELIM: '🇾🇹', - DELIM + r'south_africa' + DELIM: '🇿🇦', - DELIM + r'zambia' + DELIM: '🇿🇲', - DELIM + r'zimbabwe' + DELIM: '🇿🇼', - + DELIM + r"ascension_island" + DELIM: "🇦🇨", + DELIM + r"andorra" + DELIM: "🇦🇩", + DELIM + r"united_arab_emirates" + DELIM: "🇦🇪", + DELIM + r"afghanistan" + DELIM: "🇦🇫", + DELIM + r"antigua_barbuda" + DELIM: "🇦🇬", + DELIM + r"anguilla" + DELIM: "🇦🇮", + DELIM + r"albania" + DELIM: "🇦🇱", + DELIM + r"armenia" + DELIM: "🇦🇲", + DELIM + r"angola" + DELIM: "🇦🇴", + DELIM + r"antarctica" + DELIM: "🇦🇶", + DELIM + r"argentina" + DELIM: "🇦🇷", + DELIM + r"american_samoa" + DELIM: "🇦🇸", + DELIM + r"austria" + DELIM: "🇦🇹", + DELIM + r"australia" + DELIM: "🇦🇺", + DELIM + r"aruba" + DELIM: "🇦🇼", + DELIM + r"aland_islands" + DELIM: "🇦🇽", + DELIM + r"azerbaijan" + DELIM: "🇦🇿", + DELIM + r"bosnia_herzegovina" + DELIM: "🇧🇦", + DELIM + r"barbados" + DELIM: "🇧🇧", + DELIM + r"bangladesh" + DELIM: "🇧🇩", + DELIM + r"belgium" + DELIM: "🇧🇪", + DELIM + r"burkina_faso" + DELIM: "🇧🇫", + DELIM + r"bulgaria" + DELIM: "🇧🇬", + DELIM + r"bahrain" + DELIM: "🇧🇭", + DELIM + r"burundi" + DELIM: "🇧🇮", + DELIM + r"benin" + DELIM: "🇧🇯", + DELIM + r"st_barthelemy" + DELIM: "🇧🇱", + DELIM + r"bermuda" + DELIM: "🇧🇲", + DELIM + r"brunei" + DELIM: "🇧🇳", + DELIM + r"bolivia" + DELIM: "🇧🇴", + DELIM + r"caribbean_netherlands" + DELIM: "🇧🇶", + DELIM + r"brazil" + DELIM: "🇧🇷", + DELIM + r"bahamas" + DELIM: "🇧🇸", + DELIM + r"bhutan" + DELIM: "🇧🇹", + DELIM + r"bouvet_island" + DELIM: "🇧🇻", + DELIM + r"botswana" + DELIM: "🇧🇼", + DELIM + r"belarus" + DELIM: "🇧🇾", + DELIM + r"belize" + DELIM: "🇧🇿", + DELIM + r"canada" + DELIM: "🇨🇦", + DELIM + r"cocos_islands" + DELIM: "🇨🇨", + DELIM + r"congo_kinshasa" + DELIM: "🇨🇩", + DELIM + r"central_african_republic" + DELIM: "🇨🇫", + DELIM + r"congo_brazzaville" + DELIM: "🇨🇬", + DELIM + r"switzerland" + DELIM: "🇨🇭", + DELIM + r"cote_divoire" + DELIM: "🇨🇮", + DELIM + r"cook_islands" + DELIM: "🇨🇰", + DELIM + r"chile" + DELIM: "🇨🇱", + DELIM + r"cameroon" + DELIM: "🇨🇲", + DELIM + r"cn" + DELIM: "🇨🇳", + DELIM + r"colombia" + DELIM: "🇨🇴", + DELIM + r"clipperton_island" + DELIM: "🇨🇵", + DELIM + r"costa_rica" + DELIM: "🇨🇷", + DELIM + r"cuba" + DELIM: "🇨🇺", + DELIM + r"cape_verde" + DELIM: "🇨🇻", + DELIM + r"curacao" + DELIM: "🇨🇼", + DELIM + r"christmas_island" + DELIM: "🇨🇽", + DELIM + r"cyprus" + DELIM: "🇨🇾", + DELIM + r"czech_republic" + DELIM: "🇨🇿", + DELIM + r"de" + DELIM: "🇩🇪", + DELIM + r"diego_garcia" + DELIM: "🇩🇬", + DELIM + r"djibouti" + DELIM: "🇩🇯", + DELIM + r"denmark" + DELIM: "🇩🇰", + DELIM + r"dominica" + DELIM: "🇩🇲", + DELIM + r"dominican_republic" + DELIM: "🇩🇴", + DELIM + r"algeria" + DELIM: "🇩🇿", + DELIM + r"ceuta_melilla" + DELIM: "🇪🇦", + DELIM + r"ecuador" + DELIM: "🇪🇨", + DELIM + r"estonia" + DELIM: "🇪🇪", + DELIM + r"egypt" + DELIM: "🇪🇬", + DELIM + r"western_sahara" + DELIM: "🇪🇭", + DELIM + r"eritrea" + DELIM: "🇪🇷", + DELIM + r"es" + DELIM: "🇪🇸", + DELIM + r"ethiopia" + DELIM: "🇪🇹", + DELIM + r"(eu|european_union)" + DELIM: "🇪🇺", + DELIM + r"finland" + DELIM: "🇫🇮", + DELIM + r"fiji" + DELIM: "🇫🇯", + DELIM + r"falkland_islands" + DELIM: "🇫🇰", + DELIM + r"micronesia" + DELIM: "🇫🇲", + DELIM + r"faroe_islands" + DELIM: "🇫🇴", + DELIM + r"fr" + DELIM: "🇫🇷", + DELIM + r"gabon" + DELIM: "🇬🇦", + DELIM + r"(uk|gb)" + DELIM: "🇬🇧", + DELIM + r"grenada" + DELIM: "🇬🇩", + DELIM + r"georgia" + DELIM: "🇬🇪", + DELIM + r"french_guiana" + DELIM: "🇬🇫", + DELIM + r"guernsey" + DELIM: "🇬🇬", + DELIM + r"ghana" + DELIM: "🇬🇭", + DELIM + r"gibraltar" + DELIM: "🇬🇮", + DELIM + r"greenland" + DELIM: "🇬🇱", + DELIM + r"gambia" + DELIM: "🇬🇲", + DELIM + r"guinea" + DELIM: "🇬🇳", + DELIM + r"guadeloupe" + DELIM: "🇬🇵", + DELIM + r"equatorial_guinea" + DELIM: "🇬🇶", + DELIM + r"greece" + DELIM: "🇬🇷", + DELIM + r"south_georgia_south_sandwich_islands" + DELIM: "🇬🇸", + DELIM + r"guatemala" + DELIM: "🇬🇹", + DELIM + r"guam" + DELIM: "🇬🇺", + DELIM + r"guinea_bissau" + DELIM: "🇬🇼", + DELIM + r"guyana" + DELIM: "🇬🇾", + DELIM + r"hong_kong" + DELIM: "🇭🇰", + DELIM + r"heard_mcdonald_islands" + DELIM: "🇭🇲", + DELIM + r"honduras" + DELIM: "🇭🇳", + DELIM + r"croatia" + DELIM: "🇭🇷", + DELIM + r"haiti" + DELIM: "🇭🇹", + DELIM + r"hungary" + DELIM: "🇭🇺", + DELIM + r"canary_islands" + DELIM: "🇮🇨", + DELIM + r"indonesia" + DELIM: "🇮🇩", + DELIM + r"ireland" + DELIM: "🇮🇪", + DELIM + r"israel" + DELIM: "🇮🇱", + DELIM + r"isle_of_man" + DELIM: "🇮🇲", + DELIM + r"india" + DELIM: "🇮🇳", + DELIM + r"british_indian_ocean_territory" + DELIM: "🇮🇴", + DELIM + r"iraq" + DELIM: "🇮🇶", + DELIM + r"iran" + DELIM: "🇮🇷", + DELIM + r"iceland" + DELIM: "🇮🇸", + DELIM + r"it" + DELIM: "🇮🇹", + DELIM + r"jersey" + DELIM: "🇯🇪", + DELIM + r"jamaica" + DELIM: "🇯🇲", + DELIM + r"jordan" + DELIM: "🇯🇴", + DELIM + r"jp" + DELIM: "🇯🇵", + DELIM + r"kenya" + DELIM: "🇰🇪", + DELIM + r"kyrgyzstan" + DELIM: "🇰🇬", + DELIM + r"cambodia" + DELIM: "🇰🇭", + DELIM + r"kiribati" + DELIM: "🇰🇮", + DELIM + r"comoros" + DELIM: "🇰🇲", + DELIM + r"st_kitts_nevis" + DELIM: "🇰🇳", + DELIM + r"north_korea" + DELIM: "🇰🇵", + DELIM + r"kr" + DELIM: "🇰🇷", + DELIM + r"kuwait" + DELIM: "🇰🇼", + DELIM + r"cayman_islands" + DELIM: "🇰🇾", + DELIM + r"kazakhstan" + DELIM: "🇰🇿", + DELIM + r"laos" + DELIM: "🇱🇦", + DELIM + r"lebanon" + DELIM: "🇱🇧", + DELIM + r"st_lucia" + DELIM: "🇱🇨", + DELIM + r"liechtenstein" + DELIM: "🇱🇮", + DELIM + r"sri_lanka" + DELIM: "🇱🇰", + DELIM + r"liberia" + DELIM: "🇱🇷", + DELIM + r"lesotho" + DELIM: "🇱🇸", + DELIM + r"lithuania" + DELIM: "🇱🇹", + DELIM + r"luxembourg" + DELIM: "🇱🇺", + DELIM + r"latvia" + DELIM: "🇱🇻", + DELIM + r"libya" + DELIM: "🇱🇾", + DELIM + r"morocco" + DELIM: "🇲🇦", + DELIM + r"monaco" + DELIM: "🇲🇨", + DELIM + r"moldova" + DELIM: "🇲🇩", + DELIM + r"montenegro" + DELIM: "🇲🇪", + DELIM + r"st_martin" + DELIM: "🇲🇫", + DELIM + r"madagascar" + DELIM: "🇲🇬", + DELIM + r"marshall_islands" + DELIM: "🇲🇭", + DELIM + r"macedonia" + DELIM: "🇲🇰", + DELIM + r"mali" + DELIM: "🇲🇱", + DELIM + r"myanmar" + DELIM: "🇲🇲", + DELIM + r"mongolia" + DELIM: "🇲🇳", + DELIM + r"macau" + DELIM: "🇲🇴", + DELIM + r"northern_mariana_islands" + DELIM: "🇲🇵", + DELIM + r"martinique" + DELIM: "🇲🇶", + DELIM + r"mauritania" + DELIM: "🇲🇷", + DELIM + r"montserrat" + DELIM: "🇲🇸", + DELIM + r"malta" + DELIM: "🇲🇹", + DELIM + r"mauritius" + DELIM: "🇲🇺", + DELIM + r"maldives" + DELIM: "🇲🇻", + DELIM + r"malawi" + DELIM: "🇲🇼", + DELIM + r"mexico" + DELIM: "🇲🇽", + DELIM + r"malaysia" + DELIM: "🇲🇾", + DELIM + r"mozambique" + DELIM: "🇲🇿", + DELIM + r"namibia" + DELIM: "🇳🇦", + DELIM + r"new_caledonia" + DELIM: "🇳🇨", + DELIM + r"niger" + DELIM: "🇳🇪", + DELIM + r"norfolk_island" + DELIM: "🇳🇫", + DELIM + r"nigeria" + DELIM: "🇳🇬", + DELIM + r"nicaragua" + DELIM: "🇳🇮", + DELIM + r"netherlands" + DELIM: "🇳🇱", + DELIM + r"norway" + DELIM: "🇳🇴", + DELIM + r"nepal" + DELIM: "🇳🇵", + DELIM + r"nauru" + DELIM: "🇳🇷", + DELIM + r"niue" + DELIM: "🇳🇺", + DELIM + r"new_zealand" + DELIM: "🇳🇿", + DELIM + r"oman" + DELIM: "🇴🇲", + DELIM + r"panama" + DELIM: "🇵🇦", + DELIM + r"peru" + DELIM: "🇵🇪", + DELIM + r"french_polynesia" + DELIM: "🇵🇫", + DELIM + r"papua_new_guinea" + DELIM: "🇵🇬", + DELIM + r"philippines" + DELIM: "🇵🇭", + DELIM + r"pakistan" + DELIM: "🇵🇰", + DELIM + r"poland" + DELIM: "🇵🇱", + DELIM + r"st_pierre_miquelon" + DELIM: "🇵🇲", + DELIM + r"pitcairn_islands" + DELIM: "🇵🇳", + DELIM + r"puerto_rico" + DELIM: "🇵🇷", + DELIM + r"palestinian_territories" + DELIM: "🇵🇸", + DELIM + r"portugal" + DELIM: "🇵🇹", + DELIM + r"palau" + DELIM: "🇵🇼", + DELIM + r"paraguay" + DELIM: "🇵🇾", + DELIM + r"qatar" + DELIM: "🇶🇦", + DELIM + r"reunion" + DELIM: "🇷🇪", + DELIM + r"romania" + DELIM: "🇷🇴", + DELIM + r"serbia" + DELIM: "🇷🇸", + DELIM + r"ru" + DELIM: "🇷🇺", + DELIM + r"rwanda" + DELIM: "🇷🇼", + DELIM + r"saudi_arabia" + DELIM: "🇸🇦", + DELIM + r"solomon_islands" + DELIM: "🇸🇧", + DELIM + r"seychelles" + DELIM: "🇸🇨", + DELIM + r"sudan" + DELIM: "🇸🇩", + DELIM + r"sweden" + DELIM: "🇸🇪", + DELIM + r"singapore" + DELIM: "🇸🇬", + DELIM + r"st_helena" + DELIM: "🇸🇭", + DELIM + r"slovenia" + DELIM: "🇸🇮", + DELIM + r"svalbard_jan_mayen" + DELIM: "🇸🇯", + DELIM + r"slovakia" + DELIM: "🇸🇰", + DELIM + r"sierra_leone" + DELIM: "🇸🇱", + DELIM + r"san_marino" + DELIM: "🇸🇲", + DELIM + r"senegal" + DELIM: "🇸🇳", + DELIM + r"somalia" + DELIM: "🇸🇴", + DELIM + r"suriname" + DELIM: "🇸🇷", + DELIM + r"south_sudan" + DELIM: "🇸🇸", + DELIM + r"sao_tome_principe" + DELIM: "🇸🇹", + DELIM + r"el_salvador" + DELIM: "🇸🇻", + DELIM + r"sint_maarten" + DELIM: "🇸🇽", + DELIM + r"syria" + DELIM: "🇸🇾", + DELIM + r"swaziland" + DELIM: "🇸🇿", + DELIM + r"tristan_da_cunha" + DELIM: "🇹🇦", + DELIM + r"turks_caicos_islands" + DELIM: "🇹🇨", + DELIM + r"chad" + DELIM: "🇹🇩", + DELIM + r"french_southern_territories" + DELIM: "🇹🇫", + DELIM + r"togo" + DELIM: "🇹🇬", + DELIM + r"thailand" + DELIM: "🇹🇭", + DELIM + r"tajikistan" + DELIM: "🇹🇯", + DELIM + r"tokelau" + DELIM: "🇹🇰", + DELIM + r"timor_leste" + DELIM: "🇹🇱", + DELIM + r"turkmenistan" + DELIM: "🇹🇲", + DELIM + r"tunisia" + DELIM: "🇹🇳", + DELIM + r"tonga" + DELIM: "🇹🇴", + DELIM + r"tr" + DELIM: "🇹🇷", + DELIM + r"trinidad_tobago" + DELIM: "🇹🇹", + DELIM + r"tuvalu" + DELIM: "🇹🇻", + DELIM + r"taiwan" + DELIM: "🇹🇼", + DELIM + r"tanzania" + DELIM: "🇹🇿", + DELIM + r"ukraine" + DELIM: "🇺🇦", + DELIM + r"uganda" + DELIM: "🇺🇬", + DELIM + r"us_outlying_islands" + DELIM: "🇺🇲", + DELIM + r"united_nations" + DELIM: "🇺🇳", + DELIM + r"us" + DELIM: "🇺🇸", + DELIM + r"uruguay" + DELIM: "🇺🇾", + DELIM + r"uzbekistan" + DELIM: "🇺🇿", + DELIM + r"vatican_city" + DELIM: "🇻🇦", + DELIM + r"st_vincent_grenadines" + DELIM: "🇻🇨", + DELIM + r"venezuela" + DELIM: "🇻🇪", + DELIM + r"british_virgin_islands" + DELIM: "🇻🇬", + DELIM + r"us_virgin_islands" + DELIM: "🇻🇮", + DELIM + r"vietnam" + DELIM: "🇻🇳", + DELIM + r"vanuatu" + DELIM: "🇻🇺", + DELIM + r"wallis_futuna" + DELIM: "🇼🇫", + DELIM + r"samoa" + DELIM: "🇼🇸", + DELIM + r"kosovo" + DELIM: "🇽🇰", + DELIM + r"yemen" + DELIM: "🇾🇪", + DELIM + r"mayotte" + DELIM: "🇾🇹", + DELIM + r"south_africa" + DELIM: "🇿🇦", + DELIM + r"zambia" + DELIM: "🇿🇲", + DELIM + r"zimbabwe" + DELIM: "🇿🇼", # # Subdivision Flag # - DELIM + r'england' + DELIM: 'ðŸ´ó §ó ¢ó ¥ó ®ó §ó ¿', - DELIM + r'scotland' + DELIM: 'ðŸ´ó §ó ¢ó ³ó £ó ´ó ¿', - DELIM + r'wales' + DELIM: 'ðŸ´ó §ó ¢ó ·ó ¬ó ³ó ¿', + DELIM + r"england" + DELIM: "ðŸ´ó §ó ¢ó ¥ó ®ó §ó ¿", + DELIM + r"scotland" + DELIM: "ðŸ´ó §ó ¢ó ³ó £ó ´ó ¿", + DELIM + r"wales" + DELIM: "ðŸ´ó §ó ¢ó ·ó ¬ó ³ó ¿", } # Define our singlton @@ -2249,10 +2151,8 @@ def apply_emojis(content): - """ - Takes the content and swaps any matched emoji's found with their - utf-8 encoded mapping - """ + """Takes the content and swaps any matched emoji's found with their utf-8 + encoded mapping.""" global EMOJI_COMPILED_MAP @@ -2260,14 +2160,13 @@ def apply_emojis(content): t_start = time.time() # Perform our compilation EMOJI_COMPILED_MAP = re.compile( - r'(' + '|'.join(EMOJI_MAP.keys()) + r')', - re.IGNORECASE) - logger.trace( - 'Emoji engine loaded in {:.4f}s'.format((time.time() - t_start))) + r"(" + "|".join(EMOJI_MAP.keys()) + r")", re.IGNORECASE + ) + logger.trace(f"Emoji engine loaded in {time.time() - t_start:.4f}s") try: return EMOJI_COMPILED_MAP.sub(lambda x: EMOJI_MAP[x.group()], content) except TypeError: # No change; but force string return - return '' + return "" diff --git a/libs/apprise/exception.py b/libs/apprise/exception.py index 216e5cc74f..aa1907722f 100644 --- a/libs/apprise/exception.py +++ b/libs/apprise/exception.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -29,33 +28,36 @@ class AppriseException(Exception): - """ - Base Apprise Exception Class - """ + """Base Apprise Exception Class.""" + def __init__(self, message, error_code=0): super().__init__(message) self.error_code = error_code class ApprisePluginException(AppriseException): - """ - Class object for handling exceptions raised from within a plugin - """ + """Class object for handling exceptions raised from within a plugin.""" + def __init__(self, message, error_code=600): super().__init__(message, error_code=error_code) class AppriseDiskIOError(AppriseException): - """ - Thrown when an disk i/o error occurs - """ + """Thrown when an disk i/o error occurs.""" + def __init__(self, message, error_code=errno.EIO): super().__init__(message, error_code=error_code) +class AppriseInvalidData(AppriseException): + """Thrown when bad data was passed into an internal function.""" + + def __init__(self, message, error_code=errno.EINVAL): + super().__init__(message, error_code=error_code) + + class AppriseFileNotFound(AppriseDiskIOError, FileNotFoundError): - """ - Thrown when a persistent write occured in MEMORY mode - """ + """Thrown when a persistent write occured in MEMORY mode.""" + def __init__(self, message): super().__init__(message, error_code=errno.ENOENT) diff --git a/libs/apprise/i18n/apprise.pot b/libs/apprise/i18n/apprise.pot new file mode 100644 index 0000000000..37194e5d02 --- /dev/null +++ b/libs/apprise/i18n/apprise.pot @@ -0,0 +1,1762 @@ +# Translations template for PROJECT. +# Copyright (C) 2026 ORGANIZATION +# This file is distributed under the same license as the PROJECT project. +# FIRST AUTHOR , 2026. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PROJECT VERSION\n" +"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" +"POT-Creation-Date: 2026-03-08 16:43-0400\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 2.18.0\n" + +#: apprise/attachment/base.py:96 apprise/url.py:141 +msgid "Verify SSL" +msgstr "" + +#: apprise/url.py:151 +msgid "Socket Read Timeout" +msgstr "" + +#: apprise/url.py:165 +msgid "Socket Connect Timeout" +msgstr "" + +#: apprise/attachment/base.py:82 +msgid "Cache Age" +msgstr "" + +#: apprise/attachment/base.py:88 +msgid "Forced Mime Type" +msgstr "" + +#: apprise/attachment/base.py:92 +msgid "Forced File Name" +msgstr "" + +#: apprise/attachment/file.py:41 apprise/config/file.py:41 +msgid "Local File" +msgstr "" + +#: apprise/attachment/http.py:46 apprise/config/http.py:54 +msgid "Web Based" +msgstr "" + +#: apprise/attachment/memory.py:44 apprise/config/memory.py:37 +msgid "Memory" +msgstr "" + +#: apprise/plugins/__init__.py:280 +msgid "Schema" +msgstr "" + +#: apprise/plugins/__init__.py:401 +msgid "No dependencies." +msgstr "" + +#: apprise/plugins/__init__.py:404 +msgid "Packages are required to function." +msgstr "" + +#: apprise/plugins/__init__.py:408 +msgid "Packages are recommended to improve functionality." +msgstr "" + +#: apprise/plugins/africas_talking.py:132 +msgid "App User Name" +msgstr "" + +#: apprise/plugins/africas_talking.py:138 apprise/plugins/brevo.py:112 +#: apprise/plugins/burstsms.py:104 apprise/plugins/clicksend.py:98 +#: apprise/plugins/dot.py:121 apprise/plugins/fcm/__init__.py:143 +#: apprise/plugins/httpsms.py:77 apprise/plugins/join.py:140 +#: apprise/plugins/kavenegar.py:115 apprise/plugins/kumulos.py:87 +#: apprise/plugins/mailgun.py:146 apprise/plugins/messagebird.py:78 +#: apprise/plugins/one_signal.py:113 apprise/plugins/opsgenie.py:236 +#: apprise/plugins/pagerduty.py:131 apprise/plugins/popcorn_notify.py:71 +#: apprise/plugins/prowl.py:121 apprise/plugins/resend.py:107 +#: apprise/plugins/sendgrid.py:116 apprise/plugins/seven.py:75 +#: apprise/plugins/simplepush.py:101 apprise/plugins/smsmanager.py:106 +#: apprise/plugins/smtp2go.py:118 apprise/plugins/sparkpost.py:169 +#: apprise/plugins/splunk.py:165 apprise/plugins/techuluspush.py:97 +#: apprise/plugins/twilio.py:197 apprise/plugins/vapid/__init__.py:152 +#: apprise/plugins/vonage.py:80 +msgid "API Key" +msgstr "" + +#: apprise/plugins/africas_talking.py:145 apprise/plugins/fortysixelks.py:106 +msgid "Target Phone" +msgstr "" + +#: apprise/plugins/africas_talking.py:150 apprise/plugins/aprs.py:187 +#: apprise/plugins/bark.py:159 apprise/plugins/brevo.py:129 +#: apprise/plugins/bulksms.py:137 apprise/plugins/bulkvs.py:110 +#: apprise/plugins/burstsms.py:131 apprise/plugins/clickatell.py:90 +#: apprise/plugins/clicksend.py:112 apprise/plugins/d7networks.py:110 +#: apprise/plugins/dapnet.py:138 apprise/plugins/dingtalk.py:111 +#: apprise/plugins/email/base.py:140 apprise/plugins/fcm/__init__.py:168 +#: apprise/plugins/flock.py:125 apprise/plugins/fortysixelks.py:111 +#: apprise/plugins/httpsms.py:97 apprise/plugins/irc/base.py:149 +#: apprise/plugins/join.py:172 apprise/plugins/kavenegar.py:135 +#: apprise/plugins/line.py:93 apprise/plugins/mailgun.py:157 +#: apprise/plugins/mastodon.py:190 apprise/plugins/matrix.py:262 +#: apprise/plugins/mattermost.py:185 apprise/plugins/messagebird.py:99 +#: apprise/plugins/mqtt.py:174 apprise/plugins/msg91.py:123 +#: apprise/plugins/nextcloud.py:148 apprise/plugins/nextcloudtalk.py:101 +#: apprise/plugins/notifiarr.py:103 apprise/plugins/notificationapi.py:187 +#: apprise/plugins/ntfy.py:241 apprise/plugins/office365.py:148 +#: apprise/plugins/one_signal.py:141 apprise/plugins/plivo.py:114 +#: apprise/plugins/popcorn_notify.py:89 apprise/plugins/pushbullet.py:105 +#: apprise/plugins/pushed.py:107 apprise/plugins/pushover.py:206 +#: apprise/plugins/pushsafer.py:376 apprise/plugins/pushy.py:97 +#: apprise/plugins/reddit.py:170 apprise/plugins/resend.py:124 +#: apprise/plugins/revolt.py:112 apprise/plugins/rocketchat.py:166 +#: apprise/plugins/sendgrid.py:133 apprise/plugins/sendpulse.py:136 +#: apprise/plugins/seven.py:88 apprise/plugins/sfr.py:113 +#: apprise/plugins/signal_api.py:138 apprise/plugins/sinch.py:140 +#: apprise/plugins/slack.py:248 apprise/plugins/smpp.py:128 +#: apprise/plugins/smseagle.py:172 apprise/plugins/smsmanager.py:119 +#: apprise/plugins/sns.py:138 apprise/plugins/telegram.py:359 +#: apprise/plugins/threema.py:115 apprise/plugins/twilio.py:174 +#: apprise/plugins/twist.py:123 apprise/plugins/twitter.py:168 +#: apprise/plugins/vapid/__init__.py:158 apprise/plugins/voipms.py:107 +#: apprise/plugins/vonage.py:108 apprise/plugins/whatsapp.py:126 +#: apprise/plugins/wxpusher.py:139 apprise/plugins/xmpp/base.py:118 +#: apprise/plugins/zulip.py:153 +msgid "Targets" +msgstr "" + +#: apprise/plugins/africas_talking.py:168 +msgid "From" +msgstr "" + +#: apprise/plugins/africas_talking.py:174 apprise/plugins/bulksms.py:170 +#: apprise/plugins/bulkvs.py:131 apprise/plugins/burstsms.py:166 +#: apprise/plugins/clicksend.py:130 apprise/plugins/d7networks.py:128 +#: apprise/plugins/dapnet.py:167 apprise/plugins/mailgun.py:194 +#: apprise/plugins/mastodon.py:215 apprise/plugins/one_signal.py:161 +#: apprise/plugins/opsgenie.py:288 apprise/plugins/plivo.py:137 +#: apprise/plugins/popcorn_notify.py:104 apprise/plugins/signal_api.py:155 +#: apprise/plugins/smseagle.py:190 apprise/plugins/smsmanager.py:152 +#: apprise/plugins/smtp2go.py:151 apprise/plugins/sparkpost.py:209 +#: apprise/plugins/twitter.py:193 +msgid "Batch Mode" +msgstr "" + +#: apprise/plugins/africas_talking.py:179 +msgid "SMS Mode" +msgstr "" + +#: apprise/plugins/apprise_api.py:102 apprise/plugins/bark.py:134 +#: apprise/plugins/custom_form.py:121 apprise/plugins/custom_json.py:103 +#: apprise/plugins/custom_xml.py:103 apprise/plugins/emby.py:85 +#: apprise/plugins/enigma2.py:110 apprise/plugins/fluxer.py:159 +#: apprise/plugins/gotify.py:129 apprise/plugins/growl.py:140 +#: apprise/plugins/home_assistant.py:79 apprise/plugins/irc/base.py:117 +#: apprise/plugins/lametric.py:460 apprise/plugins/mastodon.py:168 +#: apprise/plugins/matrix.py:220 apprise/plugins/mattermost.py:151 +#: apprise/plugins/misskey.py:117 apprise/plugins/mqtt.py:147 +#: apprise/plugins/nextcloud.py:116 apprise/plugins/nextcloudtalk.py:74 +#: apprise/plugins/notica.py:126 apprise/plugins/ntfy.py:211 +#: apprise/plugins/parseplatform.py:90 apprise/plugins/pushdeer.py:75 +#: apprise/plugins/pushjet.py:71 apprise/plugins/rocketchat.py:120 +#: apprise/plugins/rsyslog.py:180 apprise/plugins/signal_api.py:97 +#: apprise/plugins/smseagle.py:135 apprise/plugins/synology.py:83 +#: apprise/plugins/workflows.py:125 apprise/plugins/xbmc.py:96 +#: apprise/plugins/xmpp/base.py:96 +msgid "Hostname" +msgstr "" + +#: apprise/plugins/apprise_api.py:107 apprise/plugins/bark.py:139 +#: apprise/plugins/custom_form.py:126 apprise/plugins/custom_json.py:108 +#: apprise/plugins/custom_xml.py:108 apprise/plugins/email/base.py:129 +#: apprise/plugins/emby.py:90 apprise/plugins/enigma2.py:115 +#: apprise/plugins/fluxer.py:163 apprise/plugins/gotify.py:140 +#: apprise/plugins/growl.py:145 apprise/plugins/home_assistant.py:84 +#: apprise/plugins/irc/base.py:122 apprise/plugins/lametric.py:464 +#: apprise/plugins/mastodon.py:178 apprise/plugins/matrix.py:224 +#: apprise/plugins/mattermost.py:167 apprise/plugins/misskey.py:127 +#: apprise/plugins/mqtt.py:152 apprise/plugins/nextcloud.py:121 +#: apprise/plugins/nextcloudtalk.py:79 apprise/plugins/notica.py:130 +#: apprise/plugins/ntfy.py:215 apprise/plugins/parseplatform.py:95 +#: apprise/plugins/pushdeer.py:79 apprise/plugins/pushjet.py:76 +#: apprise/plugins/rocketchat.py:125 apprise/plugins/rsyslog.py:185 +#: apprise/plugins/signal_api.py:102 apprise/plugins/smpp.py:108 +#: apprise/plugins/smseagle.py:140 apprise/plugins/synology.py:88 +#: apprise/plugins/workflows.py:130 apprise/plugins/xbmc.py:101 +#: apprise/plugins/xmpp/base.py:101 +msgid "Port" +msgstr "" + +#: apprise/plugins/apprise_api.py:113 apprise/plugins/bark.py:145 +#: apprise/plugins/bluesky.py:119 apprise/plugins/custom_form.py:132 +#: apprise/plugins/custom_json.py:114 apprise/plugins/custom_xml.py:114 +#: apprise/plugins/emby.py:97 apprise/plugins/enigma2.py:121 +#: apprise/plugins/freemobile.py:78 apprise/plugins/home_assistant.py:90 +#: apprise/plugins/lametric.py:471 apprise/plugins/matrix.py:230 +#: apprise/plugins/nextcloud.py:127 apprise/plugins/nextcloudtalk.py:85 +#: apprise/plugins/notica.py:136 apprise/plugins/ntfy.py:221 +#: apprise/plugins/opsgenie.py:242 apprise/plugins/pushjet.py:88 +#: apprise/plugins/rocketchat.py:131 apprise/plugins/signal_api.py:108 +#: apprise/plugins/smpp.py:92 apprise/plugins/synology.py:94 +#: apprise/plugins/xbmc.py:107 +msgid "Username" +msgstr "" + +#: apprise/plugins/apprise_api.py:117 apprise/plugins/aprs.py:172 +#: apprise/plugins/bark.py:149 apprise/plugins/bluesky.py:124 +#: apprise/plugins/bulksms.py:117 apprise/plugins/bulkvs.py:90 +#: apprise/plugins/custom_form.py:136 apprise/plugins/custom_json.py:118 +#: apprise/plugins/custom_xml.py:118 apprise/plugins/dapnet.py:123 +#: apprise/plugins/email/base.py:119 apprise/plugins/emby.py:101 +#: apprise/plugins/enigma2.py:125 apprise/plugins/freemobile.py:83 +#: apprise/plugins/growl.py:151 apprise/plugins/home_assistant.py:94 +#: apprise/plugins/irc/base.py:132 apprise/plugins/matrix.py:234 +#: apprise/plugins/mqtt.py:163 apprise/plugins/nextcloud.py:131 +#: apprise/plugins/nextcloudtalk.py:90 apprise/plugins/notica.py:140 +#: apprise/plugins/ntfy.py:225 apprise/plugins/pushjet.py:92 +#: apprise/plugins/reddit.py:145 apprise/plugins/rocketchat.py:135 +#: apprise/plugins/signal_api.py:112 apprise/plugins/simplepush.py:108 +#: apprise/plugins/smpp.py:97 apprise/plugins/synology.py:98 +#: apprise/plugins/twist.py:101 apprise/plugins/voipms.py:88 +#: apprise/plugins/xbmc.py:111 apprise/plugins/xmpp/base.py:112 +msgid "Password" +msgstr "" + +#: apprise/plugins/apprise_api.py:122 apprise/plugins/chanify.py:74 +#: apprise/plugins/dingtalk.py:93 apprise/plugins/feishu.py:80 +#: apprise/plugins/gotify.py:123 apprise/plugins/mattermost.py:157 +#: apprise/plugins/notica.py:119 apprise/plugins/notifiarr.py:91 +#: apprise/plugins/ntfy.py:230 apprise/plugins/pushme.py:62 +#: apprise/plugins/ryver.py:99 apprise/plugins/serverchan.py:70 +#: apprise/plugins/slack.py:294 apprise/plugins/synology.py:103 +#: apprise/plugins/webexteams.py:116 apprise/plugins/zulip.py:136 +msgid "Token" +msgstr "" + +#: apprise/plugins/apprise_api.py:136 apprise/plugins/ntfy.py:288 +#: apprise/plugins/opsgenie.py:307 apprise/plugins/pagertree.py:133 +msgid "Tags" +msgstr "" + +#: apprise/plugins/apprise_api.py:140 +msgid "Query Method" +msgstr "" + +#: apprise/plugins/apprise_api.py:154 apprise/plugins/custom_form.py:165 +#: apprise/plugins/custom_json.py:141 apprise/plugins/custom_xml.py:141 +#: apprise/plugins/enigma2.py:153 apprise/plugins/nextcloud.py:181 +#: apprise/plugins/nextcloudtalk.py:122 apprise/plugins/notica.py:156 +#: apprise/plugins/pagertree.py:142 apprise/plugins/synology.py:128 +msgid "HTTP Header" +msgstr "" + +#: apprise/plugins/aprs.py:167 apprise/plugins/bulksms.py:112 +#: apprise/plugins/bulkvs.py:85 apprise/plugins/clicksend.py:93 +#: apprise/plugins/dapnet.py:118 apprise/plugins/email/base.py:115 +#: apprise/plugins/mailgun.py:136 apprise/plugins/mqtt.py:158 +#: apprise/plugins/reddit.py:140 apprise/plugins/sendpulse.py:108 +#: apprise/plugins/smtp2go.py:108 apprise/plugins/sparkpost.py:159 +msgid "User Name" +msgstr "" + +#: apprise/plugins/aprs.py:178 apprise/plugins/aprs.py:199 +#: apprise/plugins/dapnet.py:129 apprise/plugins/dapnet.py:150 +msgid "Target Callsign" +msgstr "" + +#: apprise/plugins/aprs.py:204 +msgid "Resend Delay" +msgstr "" + +#: apprise/plugins/aprs.py:211 +msgid "Locale" +msgstr "" + +#: apprise/plugins/bark.py:154 apprise/plugins/fcm/__init__.py:157 +#: apprise/plugins/pushbullet.py:89 apprise/plugins/pushover.py:200 +#: apprise/plugins/pushsafer.py:366 apprise/plugins/pushy.py:85 +msgid "Target Device" +msgstr "" + +#: apprise/plugins/bark.py:174 apprise/plugins/lametric.py:516 +#: apprise/plugins/macosx.py:124 apprise/plugins/pushover.py:223 +#: apprise/plugins/pushsafer.py:392 apprise/plugins/pushy.py:110 +msgid "Sound" +msgstr "" + +#: apprise/plugins/bark.py:179 +msgid "Level" +msgstr "" + +#: apprise/plugins/bark.py:184 +msgid "Volume" +msgstr "" + +#: apprise/plugins/bark.py:190 apprise/plugins/ntfy.py:270 +#: apprise/plugins/pagerduty.py:172 +msgid "Click" +msgstr "" + +#: apprise/plugins/bark.py:194 apprise/plugins/pushy.py:114 +msgid "Badge" +msgstr "" + +#: apprise/plugins/bark.py:199 +msgid "Category" +msgstr "" + +#: apprise/plugins/bark.py:203 apprise/plugins/join.py:158 +#: apprise/plugins/pagerduty.py:163 +msgid "Group" +msgstr "" + +#: apprise/plugins/bark.py:207 apprise/plugins/dbus.py:225 +#: apprise/plugins/discord.py:196 apprise/plugins/fcm/__init__.py:197 +#: apprise/plugins/flock.py:136 apprise/plugins/fluxer.py:250 +#: apprise/plugins/glib.py:187 apprise/plugins/gnome.py:154 +#: apprise/plugins/growl.py:175 apprise/plugins/join.py:182 +#: apprise/plugins/line.py:108 apprise/plugins/macosx.py:115 +#: apprise/plugins/matrix.py:273 apprise/plugins/mattermost.py:211 +#: apprise/plugins/msteams.py:200 apprise/plugins/notifiarr.py:125 +#: apprise/plugins/ntfy.py:256 apprise/plugins/one_signal.py:155 +#: apprise/plugins/pagerduty.py:191 apprise/plugins/ryver.py:124 +#: apprise/plugins/slack.py:259 apprise/plugins/telegram.py:370 +#: apprise/plugins/vapid/__init__.py:204 apprise/plugins/windows.py:106 +#: apprise/plugins/workflows.py:162 apprise/plugins/xbmc.py:129 +msgid "Include Image" +msgstr "" + +#: apprise/plugins/bark.py:213 apprise/plugins/mattermost.py:207 +#: apprise/plugins/revolt.py:128 +msgid "Icon URL" +msgstr "" + +#: apprise/plugins/bark.py:217 apprise/plugins/streamlabs.py:119 +msgid "Call" +msgstr "" + +#: apprise/plugins/base.py:192 +msgid "Overflow Mode" +msgstr "" + +#: apprise/plugins/base.py:207 +msgid "Notify Format" +msgstr "" + +#: apprise/plugins/base.py:217 +msgid "Interpret Emojis" +msgstr "" + +#: apprise/plugins/base.py:227 +msgid "Persistent Storage" +msgstr "" + +#: apprise/plugins/base.py:237 +msgid "Timezone" +msgstr "" + +#: apprise/plugins/brevo.py:119 apprise/plugins/resend.py:114 +#: apprise/plugins/sendgrid.py:123 +msgid "Source Email" +msgstr "" + +#: apprise/plugins/brevo.py:124 apprise/plugins/email/base.py:135 +#: apprise/plugins/mailgun.py:152 apprise/plugins/notificationapi.py:172 +#: apprise/plugins/office365.py:143 apprise/plugins/one_signal.py:124 +#: apprise/plugins/popcorn_notify.py:84 apprise/plugins/pushbullet.py:100 +#: apprise/plugins/pushsafer.py:371 apprise/plugins/resend.py:119 +#: apprise/plugins/sendgrid.py:128 apprise/plugins/sendpulse.py:131 +#: apprise/plugins/slack.py:231 apprise/plugins/threema.py:105 +msgid "Target Email" +msgstr "" + +#: apprise/plugins/brevo.py:143 apprise/plugins/email/base.py:165 +#: apprise/plugins/mailgun.py:186 apprise/plugins/notificationapi.py:218 +#: apprise/plugins/office365.py:162 apprise/plugins/resend.py:138 +#: apprise/plugins/sendgrid.py:147 apprise/plugins/sendpulse.py:152 +#: apprise/plugins/ses.py:206 apprise/plugins/smtp2go.py:143 +#: apprise/plugins/sparkpost.py:201 +msgid "Carbon Copy" +msgstr "" + +#: apprise/plugins/brevo.py:147 apprise/plugins/email/base.py:169 +#: apprise/plugins/mailgun.py:190 apprise/plugins/notificationapi.py:222 +#: apprise/plugins/office365.py:166 apprise/plugins/resend.py:142 +#: apprise/plugins/sendgrid.py:151 apprise/plugins/sendpulse.py:156 +#: apprise/plugins/ses.py:210 apprise/plugins/smtp2go.py:147 +#: apprise/plugins/sparkpost.py:205 +msgid "Blind Carbon Copy" +msgstr "" + +#: apprise/plugins/brevo.py:151 apprise/plugins/ses.py:196 +msgid "Reply To Email" +msgstr "" + +#: apprise/plugins/bulksms.py:123 apprise/plugins/bulkvs.py:103 +#: apprise/plugins/burstsms.py:124 apprise/plugins/clickatell.py:83 +#: apprise/plugins/clicksend.py:105 apprise/plugins/d7networks.py:103 +#: apprise/plugins/dingtalk.py:106 apprise/plugins/httpsms.py:90 +#: apprise/plugins/kavenegar.py:128 apprise/plugins/messagebird.py:92 +#: apprise/plugins/msg91.py:116 apprise/plugins/plivo.py:107 +#: apprise/plugins/popcorn_notify.py:77 apprise/plugins/seven.py:81 +#: apprise/plugins/signal_api.py:124 apprise/plugins/sinch.py:127 +#: apprise/plugins/smpp.py:121 apprise/plugins/smseagle.py:151 +#: apprise/plugins/smsmanager.py:112 apprise/plugins/sns.py:125 +#: apprise/plugins/threema.py:98 apprise/plugins/twilio.py:161 +#: apprise/plugins/voipms.py:100 apprise/plugins/vonage.py:101 +#: apprise/plugins/whatsapp.py:119 +msgid "Target Phone No" +msgstr "" + +#: apprise/plugins/bulksms.py:130 apprise/plugins/nextcloud.py:142 +msgid "Target Group" +msgstr "" + +#: apprise/plugins/bulksms.py:152 apprise/plugins/bulkvs.py:96 +#: apprise/plugins/bulkvs.py:125 apprise/plugins/clickatell.py:78 +#: apprise/plugins/fortysixelks.py:100 apprise/plugins/httpsms.py:83 +#: apprise/plugins/httpsms.py:115 apprise/plugins/signal_api.py:117 +#: apprise/plugins/sinch.py:120 apprise/plugins/smpp.py:114 +#: apprise/plugins/smsmanager.py:137 apprise/plugins/twilio.py:154 +#: apprise/plugins/voipms.py:94 apprise/plugins/vonage.py:94 +msgid "From Phone No" +msgstr "" + +#: apprise/plugins/bulksms.py:158 +msgid "Route Group" +msgstr "" + +#: apprise/plugins/bulksms.py:165 apprise/plugins/d7networks.py:123 +msgid "Unicode Characters" +msgstr "" + +#: apprise/plugins/burstsms.py:111 apprise/plugins/threema.py:92 +#: apprise/plugins/vonage.py:87 +msgid "API Secret" +msgstr "" + +#: apprise/plugins/burstsms.py:118 +msgid "Sender ID" +msgstr "" + +#: apprise/plugins/burstsms.py:155 +msgid "Country" +msgstr "" + +#: apprise/plugins/burstsms.py:164 +msgid "validity" +msgstr "" + +#: apprise/plugins/chanify.py:47 +msgid "Chanify" +msgstr "" + +#: apprise/plugins/clickatell.py:45 +msgid "Clickatell" +msgstr "" + +#: apprise/plugins/clickatell.py:72 apprise/plugins/rocketchat.py:140 +msgid "API Token" +msgstr "" + +#: apprise/plugins/custom_form.py:148 apprise/plugins/custom_json.py:130 +#: apprise/plugins/custom_xml.py:130 +msgid "Fetch Method" +msgstr "" + +#: apprise/plugins/custom_form.py:154 +msgid "Attach File As" +msgstr "" + +#: apprise/plugins/custom_form.py:169 apprise/plugins/custom_json.py:145 +#: apprise/plugins/custom_xml.py:145 apprise/plugins/pagertree.py:146 +msgid "Payload Extras" +msgstr "" + +#: apprise/plugins/custom_form.py:173 apprise/plugins/custom_json.py:149 +#: apprise/plugins/custom_xml.py:149 +msgid "GET Params" +msgstr "" + +#: apprise/plugins/d7networks.py:97 +msgid "API Access Token" +msgstr "" + +#: apprise/plugins/d7networks.py:141 apprise/plugins/seven.py:107 +msgid "Originating Address" +msgstr "" + +#: apprise/plugins/dapnet.py:155 apprise/plugins/gotify.py:153 +#: apprise/plugins/growl.py:163 apprise/plugins/join.py:188 +#: apprise/plugins/lametric.py:494 apprise/plugins/ntfy.py:282 +#: apprise/plugins/opsgenie.py:293 apprise/plugins/prowl.py:141 +#: apprise/plugins/pushover.py:217 apprise/plugins/pushsafer.py:387 +#: apprise/plugins/smseagle.py:210 +msgid "Priority" +msgstr "" + +#: apprise/plugins/dapnet.py:161 +msgid "Transmitter Groups" +msgstr "" + +#: apprise/plugins/dbus.py:153 +msgid "libdbus-1.so.x must be installed." +msgstr "" + +#: apprise/plugins/dbus.py:157 apprise/plugins/glib.py:126 +msgid "DBus Notification" +msgstr "" + +#: apprise/plugins/dbus.py:201 apprise/plugins/glib.py:163 +#: apprise/plugins/gnome.py:142 apprise/plugins/pagertree.py:128 +msgid "Urgency" +msgstr "" + +#: apprise/plugins/dbus.py:213 apprise/plugins/glib.py:175 +msgid "X-Axis" +msgstr "" + +#: apprise/plugins/dbus.py:219 apprise/plugins/glib.py:181 +msgid "Y-Axis" +msgstr "" + +#: apprise/plugins/dingtalk.py:100 apprise/plugins/signl4.py:76 +msgid "Secret" +msgstr "" + +#: apprise/plugins/discord.py:125 apprise/plugins/flock.py:106 +#: apprise/plugins/fluxer.py:169 apprise/plugins/ryver.py:106 +#: apprise/plugins/slack.py:186 apprise/plugins/viber.py:95 +#: apprise/plugins/zulip.py:124 +msgid "Bot Name" +msgstr "" + +#: apprise/plugins/discord.py:130 apprise/plugins/fluxer.py:174 +#: apprise/plugins/ifttt.py:103 +msgid "Webhook ID" +msgstr "" + +#: apprise/plugins/discord.py:136 apprise/plugins/fluxer.py:181 +#: apprise/plugins/google_chat.py:118 +msgid "Webhook Token" +msgstr "" + +#: apprise/plugins/discord.py:149 apprise/plugins/fluxer.py:201 +msgid "Text To Speech" +msgstr "" + +#: apprise/plugins/discord.py:154 apprise/plugins/fluxer.py:206 +msgid "Avatar Image" +msgstr "" + +#: apprise/plugins/discord.py:159 apprise/plugins/fluxer.py:211 +#: apprise/plugins/ntfy.py:262 +msgid "Avatar URL" +msgstr "" + +#: apprise/plugins/discord.py:163 apprise/plugins/fluxer.py:215 +#: apprise/plugins/pushover.py:229 +msgid "URL" +msgstr "" + +#: apprise/plugins/discord.py:172 apprise/plugins/fluxer.py:222 +msgid "Thread ID" +msgstr "" + +#: apprise/plugins/discord.py:176 apprise/plugins/fluxer.py:230 +msgid "Display Footer" +msgstr "" + +#: apprise/plugins/discord.py:181 apprise/plugins/fluxer.py:235 +msgid "Footer Logo" +msgstr "" + +#: apprise/plugins/discord.py:186 apprise/plugins/fluxer.py:240 +msgid "Use Fields" +msgstr "" + +#: apprise/plugins/discord.py:191 apprise/plugins/fluxer.py:245 +msgid "Discord Flags" +msgstr "" + +#: apprise/plugins/discord.py:205 apprise/plugins/fluxer.py:256 +msgid "Ping Users/Roles" +msgstr "" + +#: apprise/plugins/dot.py:127 +msgid "Device Serial Number" +msgstr "" + +#: apprise/plugins/dot.py:133 +msgid "API Mode" +msgstr "" + +#: apprise/plugins/dot.py:147 +msgid "Refresh Now" +msgstr "" + +#: apprise/plugins/dot.py:153 +msgid "Text Signature" +msgstr "" + +#: apprise/plugins/dot.py:157 +msgid "Icon Base64 (Text API)" +msgstr "" + +#: apprise/plugins/dot.py:161 +msgid "Image Base64 (Image API)" +msgstr "" + +#: apprise/plugins/dot.py:166 +msgid "Link" +msgstr "" + +#: apprise/plugins/dot.py:170 +msgid "Border" +msgstr "" + +#: apprise/plugins/dot.py:177 +msgid "Dither Type" +msgstr "" + +#: apprise/plugins/dot.py:183 +msgid "Dither Kernel" +msgstr "" + +#: apprise/plugins/emby.py:112 +msgid "Modal" +msgstr "" + +#: apprise/plugins/enigma2.py:130 apprise/plugins/gotify.py:134 +#: apprise/plugins/mattermost.py:163 apprise/plugins/notica.py:145 +msgid "Path" +msgstr "" + +#: apprise/plugins/enigma2.py:140 +msgid "Server Timeout" +msgstr "" + +#: apprise/plugins/feishu.py:49 +msgid "Feishu" +msgstr "" + +#: apprise/plugins/flock.py:99 apprise/plugins/twitter.py:150 +msgid "Access Key" +msgstr "" + +#: apprise/plugins/flock.py:111 +msgid "To User ID" +msgstr "" + +#: apprise/plugins/flock.py:118 +msgid "To Channel ID" +msgstr "" + +#: apprise/plugins/fcm/__init__.py:182 apprise/plugins/fcm/__init__.py:188 +#: apprise/plugins/fluxer.py:195 apprise/plugins/lametric.py:510 +#: apprise/plugins/mattermost.py:223 apprise/plugins/notificationapi.py:209 +#: apprise/plugins/ntfy.py:296 apprise/plugins/vapid/__init__.py:169 +msgid "Mode" +msgstr "" + +#: apprise/plugins/fluxer.py:226 +msgid "Thread Name" +msgstr "" + +#: apprise/plugins/fortysixelks.py:58 +msgid "46elks" +msgstr "" + +#: apprise/plugins/fortysixelks.py:89 +msgid "API Username" +msgstr "" + +#: apprise/plugins/fortysixelks.py:94 +msgid "API Password" +msgstr "" + +#: apprise/plugins/freemobile.py:48 +msgid "Free-Mobile" +msgstr "" + +#: apprise/plugins/glib.py:122 +msgid "libdbus-1.so.x or libdbus-2.so.x must be installed." +msgstr "" + +#: apprise/plugins/gnome.py:100 +msgid "A local Gnome environment is required." +msgstr "" + +#: apprise/plugins/gnome.py:104 +msgid "Gnome Notification" +msgstr "" + +#: apprise/plugins/google_chat.py:106 +msgid "Workspace" +msgstr "" + +#: apprise/plugins/google_chat.py:112 +msgid "Webhook Key" +msgstr "" + +#: apprise/plugins/google_chat.py:124 +msgid "Thread Key" +msgstr "" + +#: apprise/plugins/growl.py:169 apprise/plugins/mqtt.py:195 +#: apprise/plugins/msteams.py:206 apprise/plugins/nextcloud.py:163 +msgid "Version" +msgstr "" + +#: apprise/plugins/growl.py:181 +msgid "Sticky" +msgstr "" + +#: apprise/plugins/home_assistant.py:99 +msgid "Long-Lived Access Token" +msgstr "" + +#: apprise/plugins/home_assistant.py:113 +msgid "Notification ID" +msgstr "" + +#: apprise/plugins/ifttt.py:109 +msgid "Events" +msgstr "" + +#: apprise/plugins/ifttt.py:129 +msgid "Add Tokens" +msgstr "" + +#: apprise/plugins/ifttt.py:133 +msgid "Remove Tokens" +msgstr "" + +#: apprise/plugins/join.py:147 +msgid "Device ID" +msgstr "" + +#: apprise/plugins/join.py:153 +msgid "Device Name" +msgstr "" + +#: apprise/plugins/kavenegar.py:122 apprise/plugins/messagebird.py:85 +#: apprise/plugins/plivo.py:100 +msgid "Source Phone No" +msgstr "" + +#: apprise/plugins/kumulos.py:99 +msgid "Server Key" +msgstr "" + +#: apprise/plugins/lametric.py:436 +msgid "Device API Key" +msgstr "" + +#: apprise/plugins/lametric.py:442 apprise/plugins/one_signal.py:102 +#: apprise/plugins/parseplatform.py:101 +msgid "App ID" +msgstr "" + +#: apprise/plugins/lametric.py:448 +msgid "App Version" +msgstr "" + +#: apprise/plugins/lametric.py:455 +msgid "App Access Token" +msgstr "" + +#: apprise/plugins/lametric.py:500 +msgid "Custom Icon" +msgstr "" + +#: apprise/plugins/lametric.py:504 +msgid "Icon Type" +msgstr "" + +#: apprise/plugins/lametric.py:521 +msgid "Cycles" +msgstr "" + +#: apprise/plugins/lark.py:47 +msgid "Lark (Feishu)" +msgstr "" + +#: apprise/plugins/lark.py:67 apprise/plugins/revolt.py:98 +#: apprise/plugins/telegram.py:344 +msgid "Bot Token" +msgstr "" + +#: apprise/plugins/line.py:82 apprise/plugins/mastodon.py:173 +#: apprise/plugins/matrix.py:239 apprise/plugins/misskey.py:122 +#: apprise/plugins/pushbullet.py:83 apprise/plugins/pushover.py:194 +#: apprise/plugins/smseagle.py:146 apprise/plugins/spugpush.py:68 +#: apprise/plugins/streamlabs.py:105 apprise/plugins/whatsapp.py:99 +msgid "Access Token" +msgstr "" + +#: apprise/plugins/irc/base.py:137 apprise/plugins/line.py:88 +#: apprise/plugins/mastodon.py:184 apprise/plugins/matrix.py:244 +#: apprise/plugins/nextcloud.py:136 apprise/plugins/one_signal.py:129 +#: apprise/plugins/opsgenie.py:258 apprise/plugins/pushed.py:95 +#: apprise/plugins/rocketchat.py:155 apprise/plugins/slack.py:236 +#: apprise/plugins/twitter.py:162 apprise/plugins/zulip.py:143 +msgid "Target User" +msgstr "" + +#: apprise/plugins/macosx.py:65 +msgid "" +"Only works with Mac OS X 10.8 and higher. Additionally requires that " +"/usr/local/bin/terminal-notifier is locally accessible." +msgstr "" + +#: apprise/plugins/macosx.py:72 +msgid "MacOSX Notification" +msgstr "" + +#: apprise/plugins/macosx.py:128 +msgid "Open/Click URL" +msgstr "" + +#: apprise/plugins/email/base.py:124 apprise/plugins/mailgun.py:141 +#: apprise/plugins/sendpulse.py:112 apprise/plugins/smtp2go.py:113 +#: apprise/plugins/sparkpost.py:164 +msgid "Domain" +msgstr "" + +#: apprise/plugins/email/base.py:160 apprise/plugins/mailgun.py:168 +#: apprise/plugins/resend.py:154 apprise/plugins/ses.py:201 +#: apprise/plugins/smtp2go.py:135 apprise/plugins/sparkpost.py:186 +msgid "From Name" +msgstr "" + +#: apprise/plugins/mailgun.py:176 apprise/plugins/notificationapi.py:203 +#: apprise/plugins/opsgenie.py:281 apprise/plugins/pagerduty.py:176 +#: apprise/plugins/sparkpost.py:191 +msgid "Region Name" +msgstr "" + +#: apprise/plugins/email/base.py:209 apprise/plugins/mailgun.py:204 +#: apprise/plugins/smtp2go.py:161 apprise/plugins/sparkpost.py:219 +msgid "Email Header" +msgstr "" + +#: apprise/plugins/mailgun.py:208 apprise/plugins/msteams.py:222 +#: apprise/plugins/notificationapi.py:246 apprise/plugins/sparkpost.py:223 +#: apprise/plugins/workflows.py:201 +msgid "Template Tokens" +msgstr "" + +#: apprise/plugins/mastodon.py:204 apprise/plugins/misskey.py:143 +msgid "Visibility" +msgstr "" + +#: apprise/plugins/mastodon.py:210 apprise/plugins/twitter.py:185 +msgid "Cache Results" +msgstr "" + +#: apprise/plugins/mastodon.py:220 +msgid "Sensitive Attachments" +msgstr "" + +#: apprise/plugins/mastodon.py:225 +msgid "Spoiler Text" +msgstr "" + +#: apprise/plugins/mastodon.py:229 +msgid "Idempotency-Key" +msgstr "" + +#: apprise/plugins/mastodon.py:233 +msgid "Language Code" +msgstr "" + +#: apprise/plugins/matrix.py:250 apprise/plugins/rocketchat.py:161 +msgid "Target Room ID" +msgstr "" + +#: apprise/plugins/matrix.py:256 +msgid "Target Room Alias" +msgstr "" + +#: apprise/plugins/matrix.py:279 +msgid "Server Discovery" +msgstr "" + +#: apprise/plugins/matrix.py:284 +msgid "Force Home Server on Room IDs" +msgstr "" + +#: apprise/plugins/matrix.py:289 apprise/plugins/rocketchat.py:177 +#: apprise/plugins/ryver.py:118 +msgid "Webhook Mode" +msgstr "" + +#: apprise/plugins/matrix.py:295 +msgid "Matrix API Verion" +msgstr "" + +#: apprise/plugins/matrix.py:301 apprise/plugins/notificationapi.py:154 +msgid "Message Type" +msgstr "" + +#: apprise/plugins/irc/base.py:128 apprise/plugins/mattermost.py:147 +#: apprise/plugins/xmpp/base.py:107 +msgid "User" +msgstr "" + +#: apprise/plugins/irc/base.py:143 apprise/plugins/mattermost.py:173 +#: apprise/plugins/notifiarr.py:97 apprise/plugins/pushbullet.py:94 +#: apprise/plugins/pushed.py:101 apprise/plugins/rocketchat.py:149 +#: apprise/plugins/slack.py:242 apprise/plugins/twist.py:112 +msgid "Target Channel" +msgstr "" + +#: apprise/plugins/mattermost.py:179 apprise/plugins/twist.py:118 +msgid "Target Channel ID" +msgstr "" + +#: apprise/plugins/mqtt.py:169 +msgid "Target Queue" +msgstr "" + +#: apprise/plugins/mqtt.py:188 +msgid "QOS" +msgstr "" + +#: apprise/plugins/mqtt.py:201 apprise/plugins/notificationapi.py:161 +#: apprise/plugins/office365.py:130 apprise/plugins/sendpulse.py:117 +msgid "Client ID" +msgstr "" + +#: apprise/plugins/mqtt.py:205 +msgid "Use Session" +msgstr "" + +#: apprise/plugins/mqtt.py:210 +msgid "Retain Messages" +msgstr "" + +#: apprise/plugins/msg91.py:102 apprise/plugins/sendpulse.py:161 +msgid "Template ID" +msgstr "" + +#: apprise/plugins/msg91.py:109 +msgid "Authentication Key" +msgstr "" + +#: apprise/plugins/msg91.py:138 +msgid "Short URL" +msgstr "" + +#: apprise/plugins/msg91.py:148 apprise/plugins/whatsapp.py:168 +msgid "Template Mapping" +msgstr "" + +#: apprise/plugins/msteams.py:151 +msgid "Team Name" +msgstr "" + +#: apprise/plugins/msteams.py:159 apprise/plugins/slack.py:203 +msgid "Token A" +msgstr "" + +#: apprise/plugins/msteams.py:168 apprise/plugins/slack.py:211 +msgid "Token B" +msgstr "" + +#: apprise/plugins/msteams.py:177 apprise/plugins/slack.py:219 +msgid "Token C" +msgstr "" + +#: apprise/plugins/msteams.py:186 +msgid "Token D" +msgstr "" + +#: apprise/plugins/msteams.py:212 apprise/plugins/workflows.py:180 +msgid "Template Path" +msgstr "" + +#: apprise/plugins/nextcloud.py:169 apprise/plugins/nextcloudtalk.py:113 +msgid "URL Prefix" +msgstr "" + +#: apprise/plugins/nextcloudtalk.py:43 +msgid "Nextcloud Talk" +msgstr "" + +#: apprise/plugins/nextcloudtalk.py:96 +msgid "Room ID" +msgstr "" + +#: apprise/plugins/notifiarr.py:121 +msgid "Discord Event ID" +msgstr "" + +#: apprise/plugins/notifiarr.py:131 apprise/plugins/pagerduty.py:145 +msgid "Source" +msgstr "" + +#: apprise/plugins/notificationapi.py:166 apprise/plugins/office365.py:137 +#: apprise/plugins/sendpulse.py:124 +msgid "Client Secret" +msgstr "" + +#: apprise/plugins/notificationapi.py:177 +msgid "Target ID" +msgstr "" + +#: apprise/plugins/notificationapi.py:182 +msgid "Target SMS" +msgstr "" + +#: apprise/plugins/notificationapi.py:198 +msgid "Channels" +msgstr "" + +#: apprise/plugins/email/base.py:185 apprise/plugins/notificationapi.py:226 +#: apprise/plugins/resend.py:146 +msgid "Reply To" +msgstr "" + +#: apprise/plugins/email/base.py:155 apprise/plugins/notificationapi.py:231 +#: apprise/plugins/sendpulse.py:147 apprise/plugins/ses.py:154 +msgid "From Email" +msgstr "" + +#: apprise/plugins/fcm/__init__.py:153 apprise/plugins/notifico.py:124 +msgid "Project ID" +msgstr "" + +#: apprise/plugins/notifico.py:133 +msgid "Message Hook" +msgstr "" + +#: apprise/plugins/notifico.py:148 +msgid "IRC Colors" +msgstr "" + +#: apprise/plugins/notifico.py:154 +msgid "Prefix" +msgstr "" + +#: apprise/plugins/ntfy.py:235 +msgid "Topic" +msgstr "" + +#: apprise/plugins/ntfy.py:252 +msgid "Attach" +msgstr "" + +#: apprise/plugins/ntfy.py:266 +msgid "Attach Filename" +msgstr "" + +#: apprise/plugins/ntfy.py:274 +msgid "Delay" +msgstr "" + +#: apprise/plugins/ntfy.py:278 apprise/plugins/twist.py:107 +msgid "Email" +msgstr "" + +#: apprise/plugins/ntfy.py:292 +msgid "Actions" +msgstr "" + +#: apprise/plugins/ntfy.py:305 +msgid "Authentication Type" +msgstr "" + +#: apprise/plugins/office365.py:118 +msgid "Tenant Domain" +msgstr "" + +#: apprise/plugins/office365.py:125 +msgid "Account Email or Object ID" +msgstr "" + +#: apprise/plugins/one_signal.py:108 apprise/plugins/sendgrid.py:157 +msgid "Template" +msgstr "" + +#: apprise/plugins/one_signal.py:119 +msgid "Target Player ID" +msgstr "" + +#: apprise/plugins/one_signal.py:135 +msgid "Include Segment" +msgstr "" + +#: apprise/plugins/one_signal.py:166 +msgid "Enable Contents" +msgstr "" + +#: apprise/plugins/one_signal.py:172 +msgid "Decode Template Args" +msgstr "" + +#: apprise/plugins/one_signal.py:181 +msgid "Subtitle" +msgstr "" + +#: apprise/plugins/one_signal.py:185 apprise/plugins/sfr.py:125 +#: apprise/plugins/whatsapp.py:130 +msgid "Language" +msgstr "" + +#: apprise/plugins/one_signal.py:195 +msgid "Custom Data" +msgstr "" + +#: apprise/plugins/one_signal.py:199 +msgid "Postback Data" +msgstr "" + +#: apprise/plugins/opsgenie.py:246 +msgid "Target Escalation" +msgstr "" + +#: apprise/plugins/opsgenie.py:252 +msgid "Target Schedule" +msgstr "" + +#: apprise/plugins/opsgenie.py:264 +msgid "Target Team" +msgstr "" + +#: apprise/plugins/opsgenie.py:270 +msgid "Targets " +msgstr "" + +#: apprise/plugins/opsgenie.py:299 +msgid "Entity" +msgstr "" + +#: apprise/plugins/opsgenie.py:303 +msgid "Alias" +msgstr "" + +#: apprise/plugins/opsgenie.py:314 apprise/plugins/pagertree.py:118 +#: apprise/plugins/splunk.py:202 +msgid "Action" +msgstr "" + +#: apprise/plugins/opsgenie.py:325 +msgid "Details" +msgstr "" + +#: apprise/plugins/opsgenie.py:329 apprise/plugins/splunk.py:213 +msgid "Action Mapping" +msgstr "" + +#: apprise/plugins/pagerduty.py:138 apprise/plugins/spike.py:68 +msgid "Integration Key" +msgstr "" + +#: apprise/plugins/pagerduty.py:151 +msgid "Component" +msgstr "" + +#: apprise/plugins/pagerduty.py:167 +msgid "Class" +msgstr "" + +#: apprise/plugins/pagerduty.py:185 +msgid "Severity" +msgstr "" + +#: apprise/plugins/pagerduty.py:202 +msgid "Custom Details" +msgstr "" + +#: apprise/plugins/pagertree.py:105 +msgid "Integration ID" +msgstr "" + +#: apprise/plugins/pagertree.py:124 +msgid "Third Party ID" +msgstr "" + +#: apprise/plugins/pagertree.py:150 +msgid "Meta Extras" +msgstr "" + +#: apprise/plugins/parseplatform.py:107 +msgid "Master Key" +msgstr "" + +#: apprise/plugins/parseplatform.py:120 +msgid "Device" +msgstr "" + +#: apprise/plugins/plivo.py:88 +msgid "Auth ID" +msgstr "" + +#: apprise/plugins/plivo.py:94 apprise/plugins/sinch.py:113 +#: apprise/plugins/twilio.py:147 +msgid "Auth Token" +msgstr "" + +#: apprise/plugins/prowl.py:128 +msgid "Provider Key" +msgstr "" + +#: apprise/plugins/pushdeer.py:85 +msgid "Pushkey" +msgstr "" + +#: apprise/plugins/pushed.py:83 +msgid "Application Key" +msgstr "" + +#: apprise/plugins/pushed.py:89 apprise/plugins/reddit.py:158 +msgid "Application Secret" +msgstr "" + +#: apprise/plugins/pushjet.py:82 +msgid "Secret Key" +msgstr "" + +#: apprise/plugins/pushme.py:81 apprise/plugins/signal_api.py:160 +#: apprise/plugins/smseagle.py:195 +msgid "Show Status" +msgstr "" + +#: apprise/plugins/pushover.py:188 +msgid "User Key" +msgstr "" + +#: apprise/plugins/pushover.py:234 +msgid "URL Title" +msgstr "" + +#: apprise/plugins/pushover.py:239 +msgid "Retry" +msgstr "" + +#: apprise/plugins/pushover.py:245 +msgid "Expire" +msgstr "" + +#: apprise/plugins/pushplus.py:48 +msgid "Pushplus" +msgstr "" + +#: apprise/plugins/pushplus.py:68 apprise/plugins/qq.py:66 +msgid "User Token" +msgstr "" + +#: apprise/plugins/pushsafer.py:360 +msgid "Private Key" +msgstr "" + +#: apprise/plugins/pushsafer.py:397 +msgid "Vibration" +msgstr "" + +#: apprise/plugins/pushy.py:79 +msgid "Secret API Key" +msgstr "" + +#: apprise/plugins/fcm/__init__.py:162 apprise/plugins/pushy.py:91 +#: apprise/plugins/sns.py:131 apprise/plugins/wxpusher.py:128 +msgid "Target Topic" +msgstr "" + +#: apprise/plugins/qq.py:46 +msgid "QQ Push" +msgstr "" + +#: apprise/plugins/reddit.py:151 +msgid "Application ID" +msgstr "" + +#: apprise/plugins/reddit.py:165 +msgid "Target Subreddit" +msgstr "" + +#: apprise/plugins/reddit.py:185 +msgid "Kind" +msgstr "" + +#: apprise/plugins/reddit.py:191 +msgid "Flair ID" +msgstr "" + +#: apprise/plugins/reddit.py:196 +msgid "Flair Text" +msgstr "" + +#: apprise/plugins/reddit.py:201 +msgid "NSFW" +msgstr "" + +#: apprise/plugins/reddit.py:207 +msgid "Is Ad?" +msgstr "" + +#: apprise/plugins/reddit.py:213 +msgid "Send Replies" +msgstr "" + +#: apprise/plugins/reddit.py:219 +msgid "Is Spoiler" +msgstr "" + +#: apprise/plugins/reddit.py:225 +msgid "Resubmit Flag" +msgstr "" + +#: apprise/plugins/revolt.py:104 +msgid "Channel ID" +msgstr "" + +#: apprise/plugins/revolt.py:130 +msgid "Embed URL" +msgstr "" + +#: apprise/plugins/rocketchat.py:145 +msgid "Webhook" +msgstr "" + +#: apprise/plugins/rocketchat.py:182 +msgid "Use Avatar" +msgstr "" + +#: apprise/plugins/rsyslog.py:173 apprise/plugins/syslog.py:144 +msgid "Facility" +msgstr "" + +#: apprise/plugins/rsyslog.py:203 apprise/plugins/syslog.py:161 +msgid "Log PID" +msgstr "" + +#: apprise/plugins/ryver.py:93 apprise/plugins/zulip.py:130 +msgid "Organization" +msgstr "" + +#: apprise/plugins/sendgrid.py:166 apprise/plugins/sendpulse.py:175 +msgid "Template Data" +msgstr "" + +#: apprise/plugins/ses.py:160 apprise/plugins/sns.py:106 +msgid "Access Key ID" +msgstr "" + +#: apprise/plugins/ses.py:166 apprise/plugins/sns.py:112 +msgid "Secret Access Key" +msgstr "" + +#: apprise/plugins/ses.py:172 apprise/plugins/sinch.py:160 +#: apprise/plugins/sns.py:118 +msgid "Region" +msgstr "" + +#: apprise/plugins/ses.py:179 apprise/plugins/smtp2go.py:124 +#: apprise/plugins/sparkpost.py:175 +msgid "Target Emails" +msgstr "" + +#: apprise/plugins/seven.py:115 apprise/plugins/smseagle.py:205 +msgid "Flash" +msgstr "" + +#: apprise/plugins/seven.py:119 +msgid "Label" +msgstr "" + +#: apprise/plugins/sfr.py:58 +msgid "Société Française du Radiotéléphone" +msgstr "" + +#: apprise/plugins/sfr.py:90 +msgid "Service ID" +msgstr "" + +#: apprise/plugins/sfr.py:95 +msgid "Service Password" +msgstr "" + +#: apprise/plugins/sfr.py:101 +msgid "Space ID" +msgstr "" + +#: apprise/plugins/sfr.py:107 +msgid "Recipient Phone Number" +msgstr "" + +#: apprise/plugins/sfr.py:131 +msgid "Sender Name" +msgstr "" + +#: apprise/plugins/sfr.py:138 +msgid "Media Type" +msgstr "" + +#: apprise/plugins/sfr.py:145 +msgid "Timeout" +msgstr "" + +#: apprise/plugins/sfr.py:151 +msgid "TTS Voice" +msgstr "" + +#: apprise/plugins/signal_api.py:131 apprise/plugins/smseagle.py:158 +msgid "Target Group ID" +msgstr "" + +#: apprise/plugins/signl4.py:86 +msgid "Service" +msgstr "" + +#: apprise/plugins/signl4.py:90 +msgid "Location" +msgstr "" + +#: apprise/plugins/signl4.py:94 +msgid "Alerting Scenario" +msgstr "" + +#: apprise/plugins/signl4.py:98 +msgid "Filtering" +msgstr "" + +#: apprise/plugins/signl4.py:103 +msgid "External ID" +msgstr "" + +#: apprise/plugins/signl4.py:107 +msgid "Status" +msgstr "" + +#: apprise/plugins/simplepush.py:113 +msgid "Salt" +msgstr "" + +#: apprise/plugins/simplepush.py:126 +msgid "Event" +msgstr "" + +#: apprise/plugins/sinch.py:106 apprise/plugins/twilio.py:140 +msgid "Account SID" +msgstr "" + +#: apprise/plugins/sinch.py:134 apprise/plugins/twilio.py:168 +msgid "Target Short Code" +msgstr "" + +#: apprise/plugins/slack.py:194 +msgid "OAuth Access Token" +msgstr "" + +#: apprise/plugins/slack.py:225 +msgid "Target Encoded ID" +msgstr "" + +#: apprise/plugins/slack.py:265 +msgid "Include Footer" +msgstr "" + +#: apprise/plugins/slack.py:273 +msgid "Use Blocks" +msgstr "" + +#: apprise/plugins/slack.py:282 +msgid "Include Timestamp" +msgstr "" + +#: apprise/plugins/slack.py:288 apprise/plugins/twitter.py:179 +msgid "Message Mode" +msgstr "" + +#: apprise/plugins/smpp.py:61 +msgid "SMPP" +msgstr "" + +#: apprise/plugins/smpp.py:103 +msgid "Host" +msgstr "" + +#: apprise/plugins/smseagle.py:165 +msgid "Target Contact" +msgstr "" + +#: apprise/plugins/smseagle.py:200 +msgid "Test Only" +msgstr "" + +#: apprise/plugins/smsmanager.py:146 +msgid "Gateway" +msgstr "" + +#: apprise/plugins/spike.py:48 +msgid "Spike.sh" +msgstr "" + +#: apprise/plugins/splunk.py:117 +msgid "Splunk On-Call" +msgstr "" + +#: apprise/plugins/splunk.py:172 +msgid "Target Routing Key" +msgstr "" + +#: apprise/plugins/splunk.py:179 +msgid "Entity ID" +msgstr "" + +#: apprise/plugins/spugpush.py:48 +msgid "SpugPush" +msgstr "" + +#: apprise/plugins/streamlabs.py:125 +msgid "Alert Type" +msgstr "" + +#: apprise/plugins/streamlabs.py:131 +msgid "Image Link" +msgstr "" + +#: apprise/plugins/streamlabs.py:136 +msgid "Sound Link" +msgstr "" + +#: apprise/plugins/streamlabs.py:141 apprise/plugins/windows.py:100 +#: apprise/plugins/xbmc.py:123 +msgid "Duration" +msgstr "" + +#: apprise/plugins/streamlabs.py:147 +msgid "Special Text Color" +msgstr "" + +#: apprise/plugins/streamlabs.py:153 +msgid "Amount" +msgstr "" + +#: apprise/plugins/streamlabs.py:159 +msgid "Currency" +msgstr "" + +#: apprise/plugins/streamlabs.py:165 +msgid "Name" +msgstr "" + +#: apprise/plugins/streamlabs.py:171 +msgid "Identifier" +msgstr "" + +#: apprise/plugins/synology.py:116 +msgid "Upload" +msgstr "" + +#: apprise/plugins/syslog.py:167 +msgid "Log to STDERR" +msgstr "" + +#: apprise/plugins/telegram.py:353 +msgid "Target Chat ID" +msgstr "" + +#: apprise/plugins/telegram.py:376 +msgid "Detect Bot Owner" +msgstr "" + +#: apprise/plugins/telegram.py:382 +msgid "Silent Notification" +msgstr "" + +#: apprise/plugins/telegram.py:387 +msgid "Web Page Preview" +msgstr "" + +#: apprise/plugins/telegram.py:392 +msgid "Topic Thread ID" +msgstr "" + +#: apprise/plugins/telegram.py:399 +msgid "Markdown Version" +msgstr "" + +#: apprise/plugins/telegram.py:408 +msgid "Content Placement" +msgstr "" + +#: apprise/plugins/threema.py:85 +msgid "Gateway ID" +msgstr "" + +#: apprise/plugins/threema.py:110 +msgid "Target Threema ID" +msgstr "" + +#: apprise/plugins/twilio.py:203 +msgid "Notification Method: sms or call" +msgstr "" + +#: apprise/plugins/twitter.py:138 +msgid "Consumer Key" +msgstr "" + +#: apprise/plugins/twitter.py:144 +msgid "Consumer Secret" +msgstr "" + +#: apprise/plugins/twitter.py:156 +msgid "Access Secret" +msgstr "" + +#: apprise/plugins/viber.py:49 +msgid "Viber" +msgstr "" + +#: apprise/plugins/viber.py:81 +msgid "Authentication Token" +msgstr "" + +#: apprise/plugins/viber.py:87 +msgid "Receiver IDs" +msgstr "" + +#: apprise/plugins/viber.py:101 +msgid "Bot Avatar URL" +msgstr "" + +#: apprise/plugins/voipms.py:83 +msgid "User Email" +msgstr "" + +#: apprise/plugins/vapid/__init__.py:179 apprise/plugins/vonage.py:136 +msgid "ttl" +msgstr "" + +#: apprise/plugins/wecombot.py:99 +msgid "Bot Webhook Key" +msgstr "" + +#: apprise/plugins/whatsapp.py:106 +msgid "Template Name" +msgstr "" + +#: apprise/plugins/whatsapp.py:112 +msgid "From Phone ID" +msgstr "" + +#: apprise/plugins/windows.py:62 +msgid "A local Microsoft Windows environment is required." +msgstr "" + +#: apprise/plugins/workflows.py:137 +msgid "Workflow ID" +msgstr "" + +#: apprise/plugins/workflows.py:145 +msgid "Signature" +msgstr "" + +#: apprise/plugins/workflows.py:168 +msgid "Use Power Automate URL" +msgstr "" + +#: apprise/plugins/workflows.py:175 +msgid "Wrap Text" +msgstr "" + +#: apprise/plugins/workflows.py:190 +msgid "API Version" +msgstr "" + +#: apprise/plugins/wxpusher.py:121 +msgid "App Token" +msgstr "" + +#: apprise/plugins/wxpusher.py:133 +msgid "Target User ID" +msgstr "" + +#: apprise/plugins/zulip.py:148 +msgid "Target Stream" +msgstr "" + +#: apprise/plugins/email/base.py:150 +msgid "To Email" +msgstr "" + +#: apprise/plugins/email/base.py:173 +msgid "SMTP Server" +msgstr "" + +#: apprise/plugins/email/base.py:178 apprise/plugins/xmpp/base.py:129 +msgid "Secure Mode" +msgstr "" + +#: apprise/plugins/email/base.py:190 +msgid "PGP Encryption" +msgstr "" + +#: apprise/plugins/email/base.py:196 +msgid "PGP Public Key Path" +msgstr "" + +#: apprise/plugins/fcm/__init__.py:148 +msgid "OAuth2 KeyFile" +msgstr "" + +#: apprise/plugins/fcm/__init__.py:193 +msgid "Custom Image URL" +msgstr "" + +#: apprise/plugins/fcm/__init__.py:205 +msgid "Notification Color" +msgstr "" + +#: apprise/plugins/fcm/__init__.py:215 +msgid "Data Entries" +msgstr "" + +#: apprise/plugins/irc/base.py:159 +msgid "Real Name" +msgstr "" + +#: apprise/plugins/irc/base.py:160 +msgid "Nickname" +msgstr "" + +#: apprise/plugins/irc/base.py:162 +msgid "Join Channels" +msgstr "" + +#: apprise/plugins/irc/base.py:167 +msgid "Auth Mode" +msgstr "" + +#: apprise/plugins/vapid/__init__.py:193 +msgid "PEM Private KeyFile" +msgstr "" + +#: apprise/plugins/vapid/__init__.py:199 +msgid "Subscripion File" +msgstr "" + +#: apprise/plugins/xmpp/base.py:136 +msgid "Get Roster" +msgstr "" + +#: apprise/plugins/xmpp/base.py:141 +msgid "Use Subject" +msgstr "" + +#: apprise/plugins/xmpp/base.py:146 +msgid "Keep Connection Alive" +msgstr "" + diff --git a/libs/apprise/i18n/en/LC_MESSAGES/apprise.mo b/libs/apprise/i18n/en/LC_MESSAGES/apprise.mo index 8eafd65129..01f1f728d3 100644 Binary files a/libs/apprise/i18n/en/LC_MESSAGES/apprise.mo and b/libs/apprise/i18n/en/LC_MESSAGES/apprise.mo differ diff --git a/libs/apprise/i18n/en/LC_MESSAGES/apprise.po b/libs/apprise/i18n/en/LC_MESSAGES/apprise.po new file mode 100644 index 0000000000..22a1923350 --- /dev/null +++ b/libs/apprise/i18n/en/LC_MESSAGES/apprise.po @@ -0,0 +1,1882 @@ +# English translations for apprise. +# Copyright (C) 2026 Chris Caron +# This file is distributed under the same license as the apprise project. +# Chris Caron , 2026. +# +msgid "" +msgstr "" +"Project-Id-Version: apprise 1.9.8\n" +"Report-Msgid-Bugs-To: lead2gold@gmail.com\n" +"POT-Creation-Date: 2026-03-08 16:43-0400\n" +"PO-Revision-Date: 2019-05-24 20:00-0400\n" +"Last-Translator: Chris Caron \n" +"Language: en\n" +"Language-Team: en \n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=utf-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: Babel 2.18.0\n" + +#: apprise/attachment/base.py:96 apprise/url.py:141 +msgid "Verify SSL" +msgstr "Verify SSL" + +#: apprise/url.py:151 +#, fuzzy +msgid "Socket Read Timeout" +msgstr "Server Timeout" + +#: apprise/url.py:165 +#, fuzzy +msgid "Socket Connect Timeout" +msgstr "Server Timeout" + +#: apprise/attachment/base.py:82 +msgid "Cache Age" +msgstr "" + +#: apprise/attachment/base.py:88 +msgid "Forced Mime Type" +msgstr "" + +#: apprise/attachment/base.py:92 +msgid "Forced File Name" +msgstr "" + +#: apprise/attachment/file.py:41 apprise/config/file.py:41 +msgid "Local File" +msgstr "" + +#: apprise/attachment/http.py:46 apprise/config/http.py:54 +msgid "Web Based" +msgstr "" + +#: apprise/attachment/memory.py:44 apprise/config/memory.py:37 +msgid "Memory" +msgstr "" + +#: apprise/plugins/__init__.py:280 +msgid "Schema" +msgstr "Schema" + +#: apprise/plugins/__init__.py:401 +msgid "No dependencies." +msgstr "" + +#: apprise/plugins/__init__.py:404 +msgid "Packages are required to function." +msgstr "" + +#: apprise/plugins/__init__.py:408 +msgid "Packages are recommended to improve functionality." +msgstr "" + +#: apprise/plugins/africas_talking.py:132 +#, fuzzy +msgid "App User Name" +msgstr "User Name" + +#: apprise/plugins/africas_talking.py:138 apprise/plugins/brevo.py:112 +#: apprise/plugins/burstsms.py:104 apprise/plugins/clicksend.py:98 +#: apprise/plugins/dot.py:121 apprise/plugins/fcm/__init__.py:143 +#: apprise/plugins/httpsms.py:77 apprise/plugins/join.py:140 +#: apprise/plugins/kavenegar.py:115 apprise/plugins/kumulos.py:87 +#: apprise/plugins/mailgun.py:146 apprise/plugins/messagebird.py:78 +#: apprise/plugins/one_signal.py:113 apprise/plugins/opsgenie.py:236 +#: apprise/plugins/pagerduty.py:131 apprise/plugins/popcorn_notify.py:71 +#: apprise/plugins/prowl.py:121 apprise/plugins/resend.py:107 +#: apprise/plugins/sendgrid.py:116 apprise/plugins/seven.py:75 +#: apprise/plugins/simplepush.py:101 apprise/plugins/smsmanager.py:106 +#: apprise/plugins/smtp2go.py:118 apprise/plugins/sparkpost.py:169 +#: apprise/plugins/splunk.py:165 apprise/plugins/techuluspush.py:97 +#: apprise/plugins/twilio.py:197 apprise/plugins/vapid/__init__.py:152 +#: apprise/plugins/vonage.py:80 +msgid "API Key" +msgstr "API Key" + +#: apprise/plugins/africas_talking.py:145 apprise/plugins/fortysixelks.py:106 +#, fuzzy +msgid "Target Phone" +msgstr "Target Phone No" + +#: apprise/plugins/africas_talking.py:150 apprise/plugins/aprs.py:187 +#: apprise/plugins/bark.py:159 apprise/plugins/brevo.py:129 +#: apprise/plugins/bulksms.py:137 apprise/plugins/bulkvs.py:110 +#: apprise/plugins/burstsms.py:131 apprise/plugins/clickatell.py:90 +#: apprise/plugins/clicksend.py:112 apprise/plugins/d7networks.py:110 +#: apprise/plugins/dapnet.py:138 apprise/plugins/dingtalk.py:111 +#: apprise/plugins/email/base.py:140 apprise/plugins/fcm/__init__.py:168 +#: apprise/plugins/flock.py:125 apprise/plugins/fortysixelks.py:111 +#: apprise/plugins/httpsms.py:97 apprise/plugins/irc/base.py:149 +#: apprise/plugins/join.py:172 apprise/plugins/kavenegar.py:135 +#: apprise/plugins/line.py:93 apprise/plugins/mailgun.py:157 +#: apprise/plugins/mastodon.py:190 apprise/plugins/matrix.py:262 +#: apprise/plugins/mattermost.py:185 apprise/plugins/messagebird.py:99 +#: apprise/plugins/mqtt.py:174 apprise/plugins/msg91.py:123 +#: apprise/plugins/nextcloud.py:148 apprise/plugins/nextcloudtalk.py:101 +#: apprise/plugins/notifiarr.py:103 apprise/plugins/notificationapi.py:187 +#: apprise/plugins/ntfy.py:241 apprise/plugins/office365.py:148 +#: apprise/plugins/one_signal.py:141 apprise/plugins/plivo.py:114 +#: apprise/plugins/popcorn_notify.py:89 apprise/plugins/pushbullet.py:105 +#: apprise/plugins/pushed.py:107 apprise/plugins/pushover.py:206 +#: apprise/plugins/pushsafer.py:376 apprise/plugins/pushy.py:97 +#: apprise/plugins/reddit.py:170 apprise/plugins/resend.py:124 +#: apprise/plugins/revolt.py:112 apprise/plugins/rocketchat.py:166 +#: apprise/plugins/sendgrid.py:133 apprise/plugins/sendpulse.py:136 +#: apprise/plugins/seven.py:88 apprise/plugins/sfr.py:113 +#: apprise/plugins/signal_api.py:138 apprise/plugins/sinch.py:140 +#: apprise/plugins/slack.py:248 apprise/plugins/smpp.py:128 +#: apprise/plugins/smseagle.py:172 apprise/plugins/smsmanager.py:119 +#: apprise/plugins/sns.py:138 apprise/plugins/telegram.py:359 +#: apprise/plugins/threema.py:115 apprise/plugins/twilio.py:174 +#: apprise/plugins/twist.py:123 apprise/plugins/twitter.py:168 +#: apprise/plugins/vapid/__init__.py:158 apprise/plugins/voipms.py:107 +#: apprise/plugins/vonage.py:108 apprise/plugins/whatsapp.py:126 +#: apprise/plugins/wxpusher.py:139 apprise/plugins/xmpp/base.py:118 +#: apprise/plugins/zulip.py:153 +msgid "Targets" +msgstr "Targets" + +#: apprise/plugins/africas_talking.py:168 +#, fuzzy +msgid "From" +msgstr "Rooms" + +#: apprise/plugins/africas_talking.py:174 apprise/plugins/bulksms.py:170 +#: apprise/plugins/bulkvs.py:131 apprise/plugins/burstsms.py:166 +#: apprise/plugins/clicksend.py:130 apprise/plugins/d7networks.py:128 +#: apprise/plugins/dapnet.py:167 apprise/plugins/mailgun.py:194 +#: apprise/plugins/mastodon.py:215 apprise/plugins/one_signal.py:161 +#: apprise/plugins/opsgenie.py:288 apprise/plugins/plivo.py:137 +#: apprise/plugins/popcorn_notify.py:104 apprise/plugins/signal_api.py:155 +#: apprise/plugins/smseagle.py:190 apprise/plugins/smsmanager.py:152 +#: apprise/plugins/smtp2go.py:151 apprise/plugins/sparkpost.py:209 +#: apprise/plugins/twitter.py:193 +#, fuzzy +msgid "Batch Mode" +msgstr "Webhook Mode" + +#: apprise/plugins/africas_talking.py:179 +#, fuzzy +msgid "SMS Mode" +msgstr "Secure Mode" + +#: apprise/plugins/apprise_api.py:102 apprise/plugins/bark.py:134 +#: apprise/plugins/custom_form.py:121 apprise/plugins/custom_json.py:103 +#: apprise/plugins/custom_xml.py:103 apprise/plugins/emby.py:85 +#: apprise/plugins/enigma2.py:110 apprise/plugins/fluxer.py:159 +#: apprise/plugins/gotify.py:129 apprise/plugins/growl.py:140 +#: apprise/plugins/home_assistant.py:79 apprise/plugins/irc/base.py:117 +#: apprise/plugins/lametric.py:460 apprise/plugins/mastodon.py:168 +#: apprise/plugins/matrix.py:220 apprise/plugins/mattermost.py:151 +#: apprise/plugins/misskey.py:117 apprise/plugins/mqtt.py:147 +#: apprise/plugins/nextcloud.py:116 apprise/plugins/nextcloudtalk.py:74 +#: apprise/plugins/notica.py:126 apprise/plugins/ntfy.py:211 +#: apprise/plugins/parseplatform.py:90 apprise/plugins/pushdeer.py:75 +#: apprise/plugins/pushjet.py:71 apprise/plugins/rocketchat.py:120 +#: apprise/plugins/rsyslog.py:180 apprise/plugins/signal_api.py:97 +#: apprise/plugins/smseagle.py:135 apprise/plugins/synology.py:83 +#: apprise/plugins/workflows.py:125 apprise/plugins/xbmc.py:96 +#: apprise/plugins/xmpp/base.py:96 +msgid "Hostname" +msgstr "Hostname" + +#: apprise/plugins/apprise_api.py:107 apprise/plugins/bark.py:139 +#: apprise/plugins/custom_form.py:126 apprise/plugins/custom_json.py:108 +#: apprise/plugins/custom_xml.py:108 apprise/plugins/email/base.py:129 +#: apprise/plugins/emby.py:90 apprise/plugins/enigma2.py:115 +#: apprise/plugins/fluxer.py:163 apprise/plugins/gotify.py:140 +#: apprise/plugins/growl.py:145 apprise/plugins/home_assistant.py:84 +#: apprise/plugins/irc/base.py:122 apprise/plugins/lametric.py:464 +#: apprise/plugins/mastodon.py:178 apprise/plugins/matrix.py:224 +#: apprise/plugins/mattermost.py:167 apprise/plugins/misskey.py:127 +#: apprise/plugins/mqtt.py:152 apprise/plugins/nextcloud.py:121 +#: apprise/plugins/nextcloudtalk.py:79 apprise/plugins/notica.py:130 +#: apprise/plugins/ntfy.py:215 apprise/plugins/parseplatform.py:95 +#: apprise/plugins/pushdeer.py:79 apprise/plugins/pushjet.py:76 +#: apprise/plugins/rocketchat.py:125 apprise/plugins/rsyslog.py:185 +#: apprise/plugins/signal_api.py:102 apprise/plugins/smpp.py:108 +#: apprise/plugins/smseagle.py:140 apprise/plugins/synology.py:88 +#: apprise/plugins/workflows.py:130 apprise/plugins/xbmc.py:101 +#: apprise/plugins/xmpp/base.py:101 +msgid "Port" +msgstr "Port" + +#: apprise/plugins/apprise_api.py:113 apprise/plugins/bark.py:145 +#: apprise/plugins/bluesky.py:119 apprise/plugins/custom_form.py:132 +#: apprise/plugins/custom_json.py:114 apprise/plugins/custom_xml.py:114 +#: apprise/plugins/emby.py:97 apprise/plugins/enigma2.py:121 +#: apprise/plugins/freemobile.py:78 apprise/plugins/home_assistant.py:90 +#: apprise/plugins/lametric.py:471 apprise/plugins/matrix.py:230 +#: apprise/plugins/nextcloud.py:127 apprise/plugins/nextcloudtalk.py:85 +#: apprise/plugins/notica.py:136 apprise/plugins/ntfy.py:221 +#: apprise/plugins/opsgenie.py:242 apprise/plugins/pushjet.py:88 +#: apprise/plugins/rocketchat.py:131 apprise/plugins/signal_api.py:108 +#: apprise/plugins/smpp.py:92 apprise/plugins/synology.py:94 +#: apprise/plugins/xbmc.py:107 +msgid "Username" +msgstr "Username" + +#: apprise/plugins/apprise_api.py:117 apprise/plugins/aprs.py:172 +#: apprise/plugins/bark.py:149 apprise/plugins/bluesky.py:124 +#: apprise/plugins/bulksms.py:117 apprise/plugins/bulkvs.py:90 +#: apprise/plugins/custom_form.py:136 apprise/plugins/custom_json.py:118 +#: apprise/plugins/custom_xml.py:118 apprise/plugins/dapnet.py:123 +#: apprise/plugins/email/base.py:119 apprise/plugins/emby.py:101 +#: apprise/plugins/enigma2.py:125 apprise/plugins/freemobile.py:83 +#: apprise/plugins/growl.py:151 apprise/plugins/home_assistant.py:94 +#: apprise/plugins/irc/base.py:132 apprise/plugins/matrix.py:234 +#: apprise/plugins/mqtt.py:163 apprise/plugins/nextcloud.py:131 +#: apprise/plugins/nextcloudtalk.py:90 apprise/plugins/notica.py:140 +#: apprise/plugins/ntfy.py:225 apprise/plugins/pushjet.py:92 +#: apprise/plugins/reddit.py:145 apprise/plugins/rocketchat.py:135 +#: apprise/plugins/signal_api.py:112 apprise/plugins/simplepush.py:108 +#: apprise/plugins/smpp.py:97 apprise/plugins/synology.py:98 +#: apprise/plugins/twist.py:101 apprise/plugins/voipms.py:88 +#: apprise/plugins/xbmc.py:111 apprise/plugins/xmpp/base.py:112 +msgid "Password" +msgstr "Password" + +#: apprise/plugins/apprise_api.py:122 apprise/plugins/chanify.py:74 +#: apprise/plugins/dingtalk.py:93 apprise/plugins/feishu.py:80 +#: apprise/plugins/gotify.py:123 apprise/plugins/mattermost.py:157 +#: apprise/plugins/notica.py:119 apprise/plugins/notifiarr.py:91 +#: apprise/plugins/ntfy.py:230 apprise/plugins/pushme.py:62 +#: apprise/plugins/ryver.py:99 apprise/plugins/serverchan.py:70 +#: apprise/plugins/slack.py:294 apprise/plugins/synology.py:103 +#: apprise/plugins/webexteams.py:116 apprise/plugins/zulip.py:136 +msgid "Token" +msgstr "Token" + +#: apprise/plugins/apprise_api.py:136 apprise/plugins/ntfy.py:288 +#: apprise/plugins/opsgenie.py:307 apprise/plugins/pagertree.py:133 +#, fuzzy +msgid "Tags" +msgstr "Targets" + +#: apprise/plugins/apprise_api.py:140 +msgid "Query Method" +msgstr "" + +#: apprise/plugins/apprise_api.py:154 apprise/plugins/custom_form.py:165 +#: apprise/plugins/custom_json.py:141 apprise/plugins/custom_xml.py:141 +#: apprise/plugins/enigma2.py:153 apprise/plugins/nextcloud.py:181 +#: apprise/plugins/nextcloudtalk.py:122 apprise/plugins/notica.py:156 +#: apprise/plugins/pagertree.py:142 apprise/plugins/synology.py:128 +msgid "HTTP Header" +msgstr "HTTP Header" + +#: apprise/plugins/aprs.py:167 apprise/plugins/bulksms.py:112 +#: apprise/plugins/bulkvs.py:85 apprise/plugins/clicksend.py:93 +#: apprise/plugins/dapnet.py:118 apprise/plugins/email/base.py:115 +#: apprise/plugins/mailgun.py:136 apprise/plugins/mqtt.py:158 +#: apprise/plugins/reddit.py:140 apprise/plugins/sendpulse.py:108 +#: apprise/plugins/smtp2go.py:108 apprise/plugins/sparkpost.py:159 +msgid "User Name" +msgstr "User Name" + +#: apprise/plugins/aprs.py:178 apprise/plugins/aprs.py:199 +#: apprise/plugins/dapnet.py:129 apprise/plugins/dapnet.py:150 +#, fuzzy +msgid "Target Callsign" +msgstr "Target Emails" + +#: apprise/plugins/aprs.py:204 +msgid "Resend Delay" +msgstr "" + +#: apprise/plugins/aprs.py:211 +msgid "Locale" +msgstr "" + +#: apprise/plugins/bark.py:154 apprise/plugins/fcm/__init__.py:157 +#: apprise/plugins/pushbullet.py:89 apprise/plugins/pushover.py:200 +#: apprise/plugins/pushsafer.py:366 apprise/plugins/pushy.py:85 +msgid "Target Device" +msgstr "Target Device" + +#: apprise/plugins/bark.py:174 apprise/plugins/lametric.py:516 +#: apprise/plugins/macosx.py:124 apprise/plugins/pushover.py:223 +#: apprise/plugins/pushsafer.py:392 apprise/plugins/pushy.py:110 +msgid "Sound" +msgstr "Sound" + +#: apprise/plugins/bark.py:179 +msgid "Level" +msgstr "" + +#: apprise/plugins/bark.py:184 +msgid "Volume" +msgstr "" + +#: apprise/plugins/bark.py:190 apprise/plugins/ntfy.py:270 +#: apprise/plugins/pagerduty.py:172 +msgid "Click" +msgstr "" + +#: apprise/plugins/bark.py:194 apprise/plugins/pushy.py:114 +msgid "Badge" +msgstr "" + +#: apprise/plugins/bark.py:199 +msgid "Category" +msgstr "" + +#: apprise/plugins/bark.py:203 apprise/plugins/join.py:158 +#: apprise/plugins/pagerduty.py:163 +msgid "Group" +msgstr "Group" + +#: apprise/plugins/bark.py:207 apprise/plugins/dbus.py:225 +#: apprise/plugins/discord.py:196 apprise/plugins/fcm/__init__.py:197 +#: apprise/plugins/flock.py:136 apprise/plugins/fluxer.py:250 +#: apprise/plugins/glib.py:187 apprise/plugins/gnome.py:154 +#: apprise/plugins/growl.py:175 apprise/plugins/join.py:182 +#: apprise/plugins/line.py:108 apprise/plugins/macosx.py:115 +#: apprise/plugins/matrix.py:273 apprise/plugins/mattermost.py:211 +#: apprise/plugins/msteams.py:200 apprise/plugins/notifiarr.py:125 +#: apprise/plugins/ntfy.py:256 apprise/plugins/one_signal.py:155 +#: apprise/plugins/pagerduty.py:191 apprise/plugins/ryver.py:124 +#: apprise/plugins/slack.py:259 apprise/plugins/telegram.py:370 +#: apprise/plugins/vapid/__init__.py:204 apprise/plugins/windows.py:106 +#: apprise/plugins/workflows.py:162 apprise/plugins/xbmc.py:129 +msgid "Include Image" +msgstr "Include Image" + +#: apprise/plugins/bark.py:213 apprise/plugins/mattermost.py:207 +#: apprise/plugins/revolt.py:128 +msgid "Icon URL" +msgstr "" + +#: apprise/plugins/bark.py:217 apprise/plugins/streamlabs.py:119 +msgid "Call" +msgstr "" + +#: apprise/plugins/base.py:192 +msgid "Overflow Mode" +msgstr "Overflow Mode" + +#: apprise/plugins/base.py:207 +msgid "Notify Format" +msgstr "Notify Format" + +#: apprise/plugins/base.py:217 +#, fuzzy +msgid "Interpret Emojis" +msgstr "Target Emails" + +#: apprise/plugins/base.py:227 +msgid "Persistent Storage" +msgstr "" + +#: apprise/plugins/base.py:237 +#, fuzzy +msgid "Timezone" +msgstr "Server Timeout" + +#: apprise/plugins/brevo.py:119 apprise/plugins/resend.py:114 +#: apprise/plugins/sendgrid.py:123 +#, fuzzy +msgid "Source Email" +msgstr "Source JID" + +#: apprise/plugins/brevo.py:124 apprise/plugins/email/base.py:135 +#: apprise/plugins/mailgun.py:152 apprise/plugins/notificationapi.py:172 +#: apprise/plugins/office365.py:143 apprise/plugins/one_signal.py:124 +#: apprise/plugins/popcorn_notify.py:84 apprise/plugins/pushbullet.py:100 +#: apprise/plugins/pushsafer.py:371 apprise/plugins/resend.py:119 +#: apprise/plugins/sendgrid.py:128 apprise/plugins/sendpulse.py:131 +#: apprise/plugins/slack.py:231 apprise/plugins/threema.py:105 +msgid "Target Email" +msgstr "Target Email" + +#: apprise/plugins/brevo.py:143 apprise/plugins/email/base.py:165 +#: apprise/plugins/mailgun.py:186 apprise/plugins/notificationapi.py:218 +#: apprise/plugins/office365.py:162 apprise/plugins/resend.py:138 +#: apprise/plugins/sendgrid.py:147 apprise/plugins/sendpulse.py:152 +#: apprise/plugins/ses.py:206 apprise/plugins/smtp2go.py:143 +#: apprise/plugins/sparkpost.py:201 +msgid "Carbon Copy" +msgstr "" + +#: apprise/plugins/brevo.py:147 apprise/plugins/email/base.py:169 +#: apprise/plugins/mailgun.py:190 apprise/plugins/notificationapi.py:222 +#: apprise/plugins/office365.py:166 apprise/plugins/resend.py:142 +#: apprise/plugins/sendgrid.py:151 apprise/plugins/sendpulse.py:156 +#: apprise/plugins/ses.py:210 apprise/plugins/smtp2go.py:147 +#: apprise/plugins/sparkpost.py:205 +msgid "Blind Carbon Copy" +msgstr "" + +#: apprise/plugins/brevo.py:151 apprise/plugins/ses.py:196 +#, fuzzy +msgid "Reply To Email" +msgstr "To Email" + +#: apprise/plugins/bulksms.py:123 apprise/plugins/bulkvs.py:103 +#: apprise/plugins/burstsms.py:124 apprise/plugins/clickatell.py:83 +#: apprise/plugins/clicksend.py:105 apprise/plugins/d7networks.py:103 +#: apprise/plugins/dingtalk.py:106 apprise/plugins/httpsms.py:90 +#: apprise/plugins/kavenegar.py:128 apprise/plugins/messagebird.py:92 +#: apprise/plugins/msg91.py:116 apprise/plugins/plivo.py:107 +#: apprise/plugins/popcorn_notify.py:77 apprise/plugins/seven.py:81 +#: apprise/plugins/signal_api.py:124 apprise/plugins/sinch.py:127 +#: apprise/plugins/smpp.py:121 apprise/plugins/smseagle.py:151 +#: apprise/plugins/smsmanager.py:112 apprise/plugins/sns.py:125 +#: apprise/plugins/threema.py:98 apprise/plugins/twilio.py:161 +#: apprise/plugins/voipms.py:100 apprise/plugins/vonage.py:101 +#: apprise/plugins/whatsapp.py:119 +msgid "Target Phone No" +msgstr "Target Phone No" + +#: apprise/plugins/bulksms.py:130 apprise/plugins/nextcloud.py:142 +#, fuzzy +msgid "Target Group" +msgstr "Target Topic" + +#: apprise/plugins/bulksms.py:152 apprise/plugins/bulkvs.py:96 +#: apprise/plugins/bulkvs.py:125 apprise/plugins/clickatell.py:78 +#: apprise/plugins/fortysixelks.py:100 apprise/plugins/httpsms.py:83 +#: apprise/plugins/httpsms.py:115 apprise/plugins/signal_api.py:117 +#: apprise/plugins/sinch.py:120 apprise/plugins/smpp.py:114 +#: apprise/plugins/smsmanager.py:137 apprise/plugins/twilio.py:154 +#: apprise/plugins/voipms.py:94 apprise/plugins/vonage.py:94 +msgid "From Phone No" +msgstr "From Phone No" + +#: apprise/plugins/bulksms.py:158 +#, fuzzy +msgid "Route Group" +msgstr "Group" + +#: apprise/plugins/bulksms.py:165 apprise/plugins/d7networks.py:123 +msgid "Unicode Characters" +msgstr "" + +#: apprise/plugins/burstsms.py:111 apprise/plugins/threema.py:92 +#: apprise/plugins/vonage.py:87 +#, fuzzy +msgid "API Secret" +msgstr "Application Secret" + +#: apprise/plugins/burstsms.py:118 +#, fuzzy +msgid "Sender ID" +msgstr "To User ID" + +#: apprise/plugins/burstsms.py:155 +msgid "Country" +msgstr "" + +#: apprise/plugins/burstsms.py:164 +msgid "validity" +msgstr "" + +#: apprise/plugins/chanify.py:47 +msgid "Chanify" +msgstr "" + +#: apprise/plugins/clickatell.py:45 +msgid "Clickatell" +msgstr "" + +#: apprise/plugins/clickatell.py:72 apprise/plugins/rocketchat.py:140 +#, fuzzy +msgid "API Token" +msgstr "API Key" + +#: apprise/plugins/custom_form.py:148 apprise/plugins/custom_json.py:130 +#: apprise/plugins/custom_xml.py:130 +msgid "Fetch Method" +msgstr "" + +#: apprise/plugins/custom_form.py:154 +msgid "Attach File As" +msgstr "" + +#: apprise/plugins/custom_form.py:169 apprise/plugins/custom_json.py:145 +#: apprise/plugins/custom_xml.py:145 apprise/plugins/pagertree.py:146 +msgid "Payload Extras" +msgstr "" + +#: apprise/plugins/custom_form.py:173 apprise/plugins/custom_json.py:149 +#: apprise/plugins/custom_xml.py:149 +msgid "GET Params" +msgstr "" + +#: apprise/plugins/d7networks.py:97 +#, fuzzy +msgid "API Access Token" +msgstr "Access Token" + +#: apprise/plugins/d7networks.py:141 apprise/plugins/seven.py:107 +msgid "Originating Address" +msgstr "" + +#: apprise/plugins/dapnet.py:155 apprise/plugins/gotify.py:153 +#: apprise/plugins/growl.py:163 apprise/plugins/join.py:188 +#: apprise/plugins/lametric.py:494 apprise/plugins/ntfy.py:282 +#: apprise/plugins/opsgenie.py:293 apprise/plugins/prowl.py:141 +#: apprise/plugins/pushover.py:217 apprise/plugins/pushsafer.py:387 +#: apprise/plugins/smseagle.py:210 +msgid "Priority" +msgstr "Priority" + +#: apprise/plugins/dapnet.py:161 +msgid "Transmitter Groups" +msgstr "" + +#: apprise/plugins/dbus.py:153 +msgid "libdbus-1.so.x must be installed." +msgstr "" + +#: apprise/plugins/dbus.py:157 apprise/plugins/glib.py:126 +msgid "DBus Notification" +msgstr "" + +#: apprise/plugins/dbus.py:201 apprise/plugins/glib.py:163 +#: apprise/plugins/gnome.py:142 apprise/plugins/pagertree.py:128 +msgid "Urgency" +msgstr "Urgency" + +#: apprise/plugins/dbus.py:213 apprise/plugins/glib.py:175 +msgid "X-Axis" +msgstr "X-Axis" + +#: apprise/plugins/dbus.py:219 apprise/plugins/glib.py:181 +msgid "Y-Axis" +msgstr "Y-Axis" + +#: apprise/plugins/dingtalk.py:100 apprise/plugins/signl4.py:76 +#, fuzzy +msgid "Secret" +msgstr "Secret Key" + +#: apprise/plugins/discord.py:125 apprise/plugins/flock.py:106 +#: apprise/plugins/fluxer.py:169 apprise/plugins/ryver.py:106 +#: apprise/plugins/slack.py:186 apprise/plugins/viber.py:95 +#: apprise/plugins/zulip.py:124 +msgid "Bot Name" +msgstr "Bot Name" + +#: apprise/plugins/discord.py:130 apprise/plugins/fluxer.py:174 +#: apprise/plugins/ifttt.py:103 +msgid "Webhook ID" +msgstr "Webhook ID" + +#: apprise/plugins/discord.py:136 apprise/plugins/fluxer.py:181 +#: apprise/plugins/google_chat.py:118 +msgid "Webhook Token" +msgstr "Webhook Token" + +#: apprise/plugins/discord.py:149 apprise/plugins/fluxer.py:201 +msgid "Text To Speech" +msgstr "Text To Speech" + +#: apprise/plugins/discord.py:154 apprise/plugins/fluxer.py:206 +msgid "Avatar Image" +msgstr "Avatar Image" + +#: apprise/plugins/discord.py:159 apprise/plugins/fluxer.py:211 +#: apprise/plugins/ntfy.py:262 +#, fuzzy +msgid "Avatar URL" +msgstr "Avatar Image" + +#: apprise/plugins/discord.py:163 apprise/plugins/fluxer.py:215 +#: apprise/plugins/pushover.py:229 +msgid "URL" +msgstr "" + +#: apprise/plugins/discord.py:172 apprise/plugins/fluxer.py:222 +msgid "Thread ID" +msgstr "" + +#: apprise/plugins/discord.py:176 apprise/plugins/fluxer.py:230 +msgid "Display Footer" +msgstr "Display Footer" + +#: apprise/plugins/discord.py:181 apprise/plugins/fluxer.py:235 +msgid "Footer Logo" +msgstr "Footer Logo" + +#: apprise/plugins/discord.py:186 apprise/plugins/fluxer.py:240 +#, fuzzy +msgid "Use Fields" +msgstr "To User ID" + +#: apprise/plugins/discord.py:191 apprise/plugins/fluxer.py:245 +msgid "Discord Flags" +msgstr "" + +#: apprise/plugins/discord.py:205 apprise/plugins/fluxer.py:256 +msgid "Ping Users/Roles" +msgstr "" + +#: apprise/plugins/dot.py:127 +#, fuzzy +msgid "Device Serial Number" +msgstr "Device ID" + +#: apprise/plugins/dot.py:133 +#, fuzzy +msgid "API Mode" +msgstr "API Key" + +#: apprise/plugins/dot.py:147 +msgid "Refresh Now" +msgstr "" + +#: apprise/plugins/dot.py:153 +msgid "Text Signature" +msgstr "" + +#: apprise/plugins/dot.py:157 +msgid "Icon Base64 (Text API)" +msgstr "" + +#: apprise/plugins/dot.py:161 +msgid "Image Base64 (Image API)" +msgstr "" + +#: apprise/plugins/dot.py:166 +msgid "Link" +msgstr "" + +#: apprise/plugins/dot.py:170 +#, fuzzy +msgid "Border" +msgstr "Modal" + +#: apprise/plugins/dot.py:177 +msgid "Dither Type" +msgstr "" + +#: apprise/plugins/dot.py:183 +msgid "Dither Kernel" +msgstr "" + +#: apprise/plugins/emby.py:112 +msgid "Modal" +msgstr "Modal" + +#: apprise/plugins/enigma2.py:130 apprise/plugins/gotify.py:134 +#: apprise/plugins/mattermost.py:163 apprise/plugins/notica.py:145 +msgid "Path" +msgstr "" + +#: apprise/plugins/enigma2.py:140 +msgid "Server Timeout" +msgstr "Server Timeout" + +#: apprise/plugins/feishu.py:49 +msgid "Feishu" +msgstr "" + +#: apprise/plugins/flock.py:99 apprise/plugins/twitter.py:150 +msgid "Access Key" +msgstr "Access Key" + +#: apprise/plugins/flock.py:111 +msgid "To User ID" +msgstr "To User ID" + +#: apprise/plugins/flock.py:118 +msgid "To Channel ID" +msgstr "To Channel ID" + +#: apprise/plugins/fcm/__init__.py:182 apprise/plugins/fcm/__init__.py:188 +#: apprise/plugins/fluxer.py:195 apprise/plugins/lametric.py:510 +#: apprise/plugins/mattermost.py:223 apprise/plugins/notificationapi.py:209 +#: apprise/plugins/ntfy.py:296 apprise/plugins/vapid/__init__.py:169 +#, fuzzy +msgid "Mode" +msgstr "Modal" + +#: apprise/plugins/fluxer.py:226 +#, fuzzy +msgid "Thread Name" +msgstr "Bot Name" + +#: apprise/plugins/fortysixelks.py:58 +msgid "46elks" +msgstr "" + +#: apprise/plugins/fortysixelks.py:89 +#, fuzzy +msgid "API Username" +msgstr "User Name" + +#: apprise/plugins/fortysixelks.py:94 +#, fuzzy +msgid "API Password" +msgstr "Password" + +#: apprise/plugins/freemobile.py:48 +msgid "Free-Mobile" +msgstr "" + +#: apprise/plugins/glib.py:122 +msgid "libdbus-1.so.x or libdbus-2.so.x must be installed." +msgstr "" + +#: apprise/plugins/gnome.py:100 +msgid "A local Gnome environment is required." +msgstr "" + +#: apprise/plugins/gnome.py:104 +msgid "Gnome Notification" +msgstr "" + +#: apprise/plugins/google_chat.py:106 +msgid "Workspace" +msgstr "" + +#: apprise/plugins/google_chat.py:112 +#, fuzzy +msgid "Webhook Key" +msgstr "Webhook Token" + +#: apprise/plugins/google_chat.py:124 +#, fuzzy +msgid "Thread Key" +msgstr "Secret Key" + +#: apprise/plugins/growl.py:169 apprise/plugins/mqtt.py:195 +#: apprise/plugins/msteams.py:206 apprise/plugins/nextcloud.py:163 +msgid "Version" +msgstr "Version" + +#: apprise/plugins/growl.py:181 +msgid "Sticky" +msgstr "" + +#: apprise/plugins/home_assistant.py:99 +#, fuzzy +msgid "Long-Lived Access Token" +msgstr "Access Token" + +#: apprise/plugins/home_assistant.py:113 +msgid "Notification ID" +msgstr "" + +#: apprise/plugins/ifttt.py:109 +msgid "Events" +msgstr "Events" + +#: apprise/plugins/ifttt.py:129 +msgid "Add Tokens" +msgstr "Add Tokens" + +#: apprise/plugins/ifttt.py:133 +msgid "Remove Tokens" +msgstr "Remove Tokens" + +#: apprise/plugins/join.py:147 +msgid "Device ID" +msgstr "Device ID" + +#: apprise/plugins/join.py:153 +#, fuzzy +msgid "Device Name" +msgstr "Device ID" + +#: apprise/plugins/kavenegar.py:122 apprise/plugins/messagebird.py:85 +#: apprise/plugins/plivo.py:100 +#, fuzzy +msgid "Source Phone No" +msgstr "Target Phone No" + +#: apprise/plugins/kumulos.py:99 +#, fuzzy +msgid "Server Key" +msgstr "Secret Key" + +#: apprise/plugins/lametric.py:436 +#, fuzzy +msgid "Device API Key" +msgstr "Device ID" + +#: apprise/plugins/lametric.py:442 apprise/plugins/one_signal.py:102 +#: apprise/plugins/parseplatform.py:101 +msgid "App ID" +msgstr "" + +#: apprise/plugins/lametric.py:448 +#, fuzzy +msgid "App Version" +msgstr "Version" + +#: apprise/plugins/lametric.py:455 +#, fuzzy +msgid "App Access Token" +msgstr "Access Token" + +#: apprise/plugins/lametric.py:500 +msgid "Custom Icon" +msgstr "" + +#: apprise/plugins/lametric.py:504 +msgid "Icon Type" +msgstr "" + +#: apprise/plugins/lametric.py:521 +msgid "Cycles" +msgstr "" + +#: apprise/plugins/lark.py:47 +msgid "Lark (Feishu)" +msgstr "" + +#: apprise/plugins/lark.py:67 apprise/plugins/revolt.py:98 +#: apprise/plugins/telegram.py:344 +msgid "Bot Token" +msgstr "Bot Token" + +#: apprise/plugins/line.py:82 apprise/plugins/mastodon.py:173 +#: apprise/plugins/matrix.py:239 apprise/plugins/misskey.py:122 +#: apprise/plugins/pushbullet.py:83 apprise/plugins/pushover.py:194 +#: apprise/plugins/smseagle.py:146 apprise/plugins/spugpush.py:68 +#: apprise/plugins/streamlabs.py:105 apprise/plugins/whatsapp.py:99 +msgid "Access Token" +msgstr "Access Token" + +#: apprise/plugins/irc/base.py:137 apprise/plugins/line.py:88 +#: apprise/plugins/mastodon.py:184 apprise/plugins/matrix.py:244 +#: apprise/plugins/nextcloud.py:136 apprise/plugins/one_signal.py:129 +#: apprise/plugins/opsgenie.py:258 apprise/plugins/pushed.py:95 +#: apprise/plugins/rocketchat.py:155 apprise/plugins/slack.py:236 +#: apprise/plugins/twitter.py:162 apprise/plugins/zulip.py:143 +msgid "Target User" +msgstr "Target User" + +#: apprise/plugins/macosx.py:65 +msgid "" +"Only works with Mac OS X 10.8 and higher. Additionally requires that /usr/" +"local/bin/terminal-notifier is locally accessible." +msgstr "" + +#: apprise/plugins/macosx.py:72 +msgid "MacOSX Notification" +msgstr "" + +#: apprise/plugins/macosx.py:128 +msgid "Open/Click URL" +msgstr "" + +#: apprise/plugins/email/base.py:124 apprise/plugins/mailgun.py:141 +#: apprise/plugins/sendpulse.py:112 apprise/plugins/smtp2go.py:113 +#: apprise/plugins/sparkpost.py:164 +msgid "Domain" +msgstr "Domain" + +#: apprise/plugins/email/base.py:160 apprise/plugins/mailgun.py:168 +#: apprise/plugins/resend.py:154 apprise/plugins/ses.py:201 +#: apprise/plugins/smtp2go.py:135 apprise/plugins/sparkpost.py:186 +msgid "From Name" +msgstr "From Name" + +#: apprise/plugins/mailgun.py:176 apprise/plugins/notificationapi.py:203 +#: apprise/plugins/opsgenie.py:281 apprise/plugins/pagerduty.py:176 +#: apprise/plugins/sparkpost.py:191 +msgid "Region Name" +msgstr "Region Name" + +#: apprise/plugins/email/base.py:209 apprise/plugins/mailgun.py:204 +#: apprise/plugins/smtp2go.py:161 apprise/plugins/sparkpost.py:219 +#, fuzzy +msgid "Email Header" +msgstr "HTTP Header" + +#: apprise/plugins/mailgun.py:208 apprise/plugins/msteams.py:222 +#: apprise/plugins/notificationapi.py:246 apprise/plugins/sparkpost.py:223 +#: apprise/plugins/workflows.py:201 +#, fuzzy +msgid "Template Tokens" +msgstr "Remove Tokens" + +#: apprise/plugins/mastodon.py:204 apprise/plugins/misskey.py:143 +msgid "Visibility" +msgstr "" + +#: apprise/plugins/mastodon.py:210 apprise/plugins/twitter.py:185 +msgid "Cache Results" +msgstr "" + +#: apprise/plugins/mastodon.py:220 +msgid "Sensitive Attachments" +msgstr "" + +#: apprise/plugins/mastodon.py:225 +msgid "Spoiler Text" +msgstr "" + +#: apprise/plugins/mastodon.py:229 +msgid "Idempotency-Key" +msgstr "" + +#: apprise/plugins/mastodon.py:233 +msgid "Language Code" +msgstr "" + +#: apprise/plugins/matrix.py:250 apprise/plugins/rocketchat.py:161 +msgid "Target Room ID" +msgstr "Target Room ID" + +#: apprise/plugins/matrix.py:256 +msgid "Target Room Alias" +msgstr "Target Room Alias" + +#: apprise/plugins/matrix.py:279 +#, fuzzy +msgid "Server Discovery" +msgstr "Server Timeout" + +#: apprise/plugins/matrix.py:284 +msgid "Force Home Server on Room IDs" +msgstr "" + +#: apprise/plugins/matrix.py:289 apprise/plugins/rocketchat.py:177 +#: apprise/plugins/ryver.py:118 +msgid "Webhook Mode" +msgstr "Webhook Mode" + +#: apprise/plugins/matrix.py:295 +msgid "Matrix API Verion" +msgstr "" + +#: apprise/plugins/matrix.py:301 apprise/plugins/notificationapi.py:154 +msgid "Message Type" +msgstr "" + +#: apprise/plugins/irc/base.py:128 apprise/plugins/mattermost.py:147 +#: apprise/plugins/xmpp/base.py:107 +#, fuzzy +msgid "User" +msgstr "Username" + +#: apprise/plugins/irc/base.py:143 apprise/plugins/mattermost.py:173 +#: apprise/plugins/notifiarr.py:97 apprise/plugins/pushbullet.py:94 +#: apprise/plugins/pushed.py:101 apprise/plugins/rocketchat.py:149 +#: apprise/plugins/slack.py:242 apprise/plugins/twist.py:112 +msgid "Target Channel" +msgstr "Target Channel" + +#: apprise/plugins/mattermost.py:179 apprise/plugins/twist.py:118 +#, fuzzy +msgid "Target Channel ID" +msgstr "Target Channel" + +#: apprise/plugins/mqtt.py:169 +#, fuzzy +msgid "Target Queue" +msgstr "Target User" + +#: apprise/plugins/mqtt.py:188 +msgid "QOS" +msgstr "" + +#: apprise/plugins/mqtt.py:201 apprise/plugins/notificationapi.py:161 +#: apprise/plugins/office365.py:130 apprise/plugins/sendpulse.py:117 +#, fuzzy +msgid "Client ID" +msgstr "Account SID" + +#: apprise/plugins/mqtt.py:205 +msgid "Use Session" +msgstr "" + +#: apprise/plugins/mqtt.py:210 +msgid "Retain Messages" +msgstr "" + +#: apprise/plugins/msg91.py:102 apprise/plugins/sendpulse.py:161 +msgid "Template ID" +msgstr "" + +#: apprise/plugins/msg91.py:109 +#, fuzzy +msgid "Authentication Key" +msgstr "Application Key" + +#: apprise/plugins/msg91.py:138 +msgid "Short URL" +msgstr "" + +#: apprise/plugins/msg91.py:148 apprise/plugins/whatsapp.py:168 +msgid "Template Mapping" +msgstr "" + +#: apprise/plugins/msteams.py:151 +#, fuzzy +msgid "Team Name" +msgstr "Bot Name" + +#: apprise/plugins/msteams.py:159 apprise/plugins/slack.py:203 +msgid "Token A" +msgstr "Token A" + +#: apprise/plugins/msteams.py:168 apprise/plugins/slack.py:211 +msgid "Token B" +msgstr "Token B" + +#: apprise/plugins/msteams.py:177 apprise/plugins/slack.py:219 +msgid "Token C" +msgstr "Token C" + +#: apprise/plugins/msteams.py:186 +#, fuzzy +msgid "Token D" +msgstr "Token C" + +#: apprise/plugins/msteams.py:212 apprise/plugins/workflows.py:180 +msgid "Template Path" +msgstr "" + +#: apprise/plugins/nextcloud.py:169 apprise/plugins/nextcloudtalk.py:113 +msgid "URL Prefix" +msgstr "" + +#: apprise/plugins/nextcloudtalk.py:43 +msgid "Nextcloud Talk" +msgstr "" + +#: apprise/plugins/nextcloudtalk.py:96 +#, fuzzy +msgid "Room ID" +msgstr "Target Room ID" + +#: apprise/plugins/notifiarr.py:121 +msgid "Discord Event ID" +msgstr "" + +#: apprise/plugins/notifiarr.py:131 apprise/plugins/pagerduty.py:145 +#, fuzzy +msgid "Source" +msgstr "Source JID" + +#: apprise/plugins/notificationapi.py:166 apprise/plugins/office365.py:137 +#: apprise/plugins/sendpulse.py:124 +#, fuzzy +msgid "Client Secret" +msgstr "Access Secret" + +#: apprise/plugins/notificationapi.py:177 +#, fuzzy +msgid "Target ID" +msgstr "Target User" + +#: apprise/plugins/notificationapi.py:182 +#, fuzzy +msgid "Target SMS" +msgstr "Targets" + +#: apprise/plugins/notificationapi.py:198 +msgid "Channels" +msgstr "Channels" + +#: apprise/plugins/email/base.py:185 apprise/plugins/notificationapi.py:226 +#: apprise/plugins/resend.py:146 +msgid "Reply To" +msgstr "" + +#: apprise/plugins/email/base.py:155 apprise/plugins/notificationapi.py:231 +#: apprise/plugins/sendpulse.py:147 apprise/plugins/ses.py:154 +msgid "From Email" +msgstr "From Email" + +#: apprise/plugins/fcm/__init__.py:153 apprise/plugins/notifico.py:124 +#, fuzzy +msgid "Project ID" +msgstr "Target JID" + +#: apprise/plugins/notifico.py:133 +msgid "Message Hook" +msgstr "" + +#: apprise/plugins/notifico.py:148 +msgid "IRC Colors" +msgstr "" + +#: apprise/plugins/notifico.py:154 +msgid "Prefix" +msgstr "" + +#: apprise/plugins/ntfy.py:235 +msgid "Topic" +msgstr "" + +#: apprise/plugins/ntfy.py:252 +msgid "Attach" +msgstr "" + +#: apprise/plugins/ntfy.py:266 +msgid "Attach Filename" +msgstr "" + +#: apprise/plugins/ntfy.py:274 +msgid "Delay" +msgstr "" + +#: apprise/plugins/ntfy.py:278 apprise/plugins/twist.py:107 +#, fuzzy +msgid "Email" +msgstr "To Email" + +#: apprise/plugins/ntfy.py:292 +#, fuzzy +msgid "Actions" +msgstr "Duration" + +#: apprise/plugins/ntfy.py:305 +#, fuzzy +msgid "Authentication Type" +msgstr "Authorization Token" + +#: apprise/plugins/office365.py:118 +#, fuzzy +msgid "Tenant Domain" +msgstr "Domain" + +#: apprise/plugins/office365.py:125 +msgid "Account Email or Object ID" +msgstr "" + +#: apprise/plugins/one_signal.py:108 apprise/plugins/sendgrid.py:157 +msgid "Template" +msgstr "" + +#: apprise/plugins/one_signal.py:119 +#, fuzzy +msgid "Target Player ID" +msgstr "Target Tag ID" + +#: apprise/plugins/one_signal.py:135 +#, fuzzy +msgid "Include Segment" +msgstr "Include Image" + +#: apprise/plugins/one_signal.py:166 +msgid "Enable Contents" +msgstr "" + +#: apprise/plugins/one_signal.py:172 +msgid "Decode Template Args" +msgstr "" + +#: apprise/plugins/one_signal.py:181 +msgid "Subtitle" +msgstr "" + +#: apprise/plugins/one_signal.py:185 apprise/plugins/sfr.py:125 +#: apprise/plugins/whatsapp.py:130 +msgid "Language" +msgstr "" + +#: apprise/plugins/one_signal.py:195 +msgid "Custom Data" +msgstr "" + +#: apprise/plugins/one_signal.py:199 +msgid "Postback Data" +msgstr "" + +#: apprise/plugins/opsgenie.py:246 +#, fuzzy +msgid "Target Escalation" +msgstr "Target Chat ID" + +#: apprise/plugins/opsgenie.py:252 +#, fuzzy +msgid "Target Schedule" +msgstr "Target Channel" + +#: apprise/plugins/opsgenie.py:264 +#, fuzzy +msgid "Target Team" +msgstr "Target Email" + +#: apprise/plugins/opsgenie.py:270 +#, fuzzy +msgid "Targets " +msgstr "Targets" + +#: apprise/plugins/opsgenie.py:299 +msgid "Entity" +msgstr "" + +#: apprise/plugins/opsgenie.py:303 +msgid "Alias" +msgstr "" + +#: apprise/plugins/opsgenie.py:314 apprise/plugins/pagertree.py:118 +#: apprise/plugins/splunk.py:202 +#, fuzzy +msgid "Action" +msgstr "Duration" + +#: apprise/plugins/opsgenie.py:325 +#, fuzzy +msgid "Details" +msgstr "Target Emails" + +#: apprise/plugins/opsgenie.py:329 apprise/plugins/splunk.py:213 +msgid "Action Mapping" +msgstr "" + +#: apprise/plugins/pagerduty.py:138 apprise/plugins/spike.py:68 +#, fuzzy +msgid "Integration Key" +msgstr "Application Key" + +#: apprise/plugins/pagerduty.py:151 +#, fuzzy +msgid "Component" +msgstr "From Phone No" + +#: apprise/plugins/pagerduty.py:167 +msgid "Class" +msgstr "" + +#: apprise/plugins/pagerduty.py:185 +msgid "Severity" +msgstr "" + +#: apprise/plugins/pagerduty.py:202 +#, fuzzy +msgid "Custom Details" +msgstr "To Email" + +#: apprise/plugins/pagertree.py:105 +msgid "Integration ID" +msgstr "" + +#: apprise/plugins/pagertree.py:124 +msgid "Third Party ID" +msgstr "" + +#: apprise/plugins/pagertree.py:150 +msgid "Meta Extras" +msgstr "" + +#: apprise/plugins/parseplatform.py:107 +#, fuzzy +msgid "Master Key" +msgstr "User Key" + +#: apprise/plugins/parseplatform.py:120 +#, fuzzy +msgid "Device" +msgstr "Device ID" + +#: apprise/plugins/plivo.py:88 +#, fuzzy +msgid "Auth ID" +msgstr "Account SID" + +#: apprise/plugins/plivo.py:94 apprise/plugins/sinch.py:113 +#: apprise/plugins/twilio.py:147 +msgid "Auth Token" +msgstr "Auth Token" + +#: apprise/plugins/prowl.py:128 +msgid "Provider Key" +msgstr "Provider Key" + +#: apprise/plugins/pushdeer.py:85 +#, fuzzy +msgid "Pushkey" +msgstr "User Key" + +#: apprise/plugins/pushed.py:83 +msgid "Application Key" +msgstr "Application Key" + +#: apprise/plugins/pushed.py:89 apprise/plugins/reddit.py:158 +msgid "Application Secret" +msgstr "Application Secret" + +#: apprise/plugins/pushjet.py:82 +msgid "Secret Key" +msgstr "Secret Key" + +#: apprise/plugins/pushme.py:81 apprise/plugins/signal_api.py:160 +#: apprise/plugins/smseagle.py:195 +msgid "Show Status" +msgstr "" + +#: apprise/plugins/pushover.py:188 +msgid "User Key" +msgstr "User Key" + +#: apprise/plugins/pushover.py:234 +msgid "URL Title" +msgstr "" + +#: apprise/plugins/pushover.py:239 +msgid "Retry" +msgstr "" + +#: apprise/plugins/pushover.py:245 +msgid "Expire" +msgstr "" + +#: apprise/plugins/pushplus.py:48 +msgid "Pushplus" +msgstr "" + +#: apprise/plugins/pushplus.py:68 apprise/plugins/qq.py:66 +#, fuzzy +msgid "User Token" +msgstr "User Key" + +#: apprise/plugins/pushsafer.py:360 +#, fuzzy +msgid "Private Key" +msgstr "Provider Key" + +#: apprise/plugins/pushsafer.py:397 +#, fuzzy +msgid "Vibration" +msgstr "Duration" + +#: apprise/plugins/pushy.py:79 +#, fuzzy +msgid "Secret API Key" +msgstr "Secret Key" + +#: apprise/plugins/fcm/__init__.py:162 apprise/plugins/pushy.py:91 +#: apprise/plugins/sns.py:131 apprise/plugins/wxpusher.py:128 +msgid "Target Topic" +msgstr "Target Topic" + +#: apprise/plugins/qq.py:46 +msgid "QQ Push" +msgstr "" + +#: apprise/plugins/reddit.py:151 +#, fuzzy +msgid "Application ID" +msgstr "Application Key" + +#: apprise/plugins/reddit.py:165 +#, fuzzy +msgid "Target Subreddit" +msgstr "Target User" + +#: apprise/plugins/reddit.py:185 +msgid "Kind" +msgstr "" + +#: apprise/plugins/reddit.py:191 +msgid "Flair ID" +msgstr "" + +#: apprise/plugins/reddit.py:196 +msgid "Flair Text" +msgstr "" + +#: apprise/plugins/reddit.py:201 +msgid "NSFW" +msgstr "" + +#: apprise/plugins/reddit.py:207 +msgid "Is Ad?" +msgstr "" + +#: apprise/plugins/reddit.py:213 +msgid "Send Replies" +msgstr "" + +#: apprise/plugins/reddit.py:219 +msgid "Is Spoiler" +msgstr "" + +#: apprise/plugins/reddit.py:225 +msgid "Resubmit Flag" +msgstr "" + +#: apprise/plugins/revolt.py:104 +#, fuzzy +msgid "Channel ID" +msgstr "To Channel ID" + +#: apprise/plugins/revolt.py:130 +msgid "Embed URL" +msgstr "" + +#: apprise/plugins/rocketchat.py:145 +msgid "Webhook" +msgstr "Webhook" + +#: apprise/plugins/rocketchat.py:182 +msgid "Use Avatar" +msgstr "Use Avatar" + +#: apprise/plugins/rsyslog.py:173 apprise/plugins/syslog.py:144 +msgid "Facility" +msgstr "" + +#: apprise/plugins/rsyslog.py:203 apprise/plugins/syslog.py:161 +msgid "Log PID" +msgstr "" + +#: apprise/plugins/ryver.py:93 apprise/plugins/zulip.py:130 +msgid "Organization" +msgstr "Organization" + +#: apprise/plugins/sendgrid.py:166 apprise/plugins/sendpulse.py:175 +msgid "Template Data" +msgstr "" + +#: apprise/plugins/ses.py:160 apprise/plugins/sns.py:106 +msgid "Access Key ID" +msgstr "Access Key ID" + +#: apprise/plugins/ses.py:166 apprise/plugins/sns.py:112 +msgid "Secret Access Key" +msgstr "Secret Access Key" + +#: apprise/plugins/ses.py:172 apprise/plugins/sinch.py:160 +#: apprise/plugins/sns.py:118 +msgid "Region" +msgstr "Region" + +#: apprise/plugins/ses.py:179 apprise/plugins/smtp2go.py:124 +#: apprise/plugins/sparkpost.py:175 +msgid "Target Emails" +msgstr "Target Emails" + +#: apprise/plugins/seven.py:115 apprise/plugins/smseagle.py:205 +msgid "Flash" +msgstr "" + +#: apprise/plugins/seven.py:119 +msgid "Label" +msgstr "" + +#: apprise/plugins/sfr.py:58 +msgid "Société Française du Radiotéléphone" +msgstr "" + +#: apprise/plugins/sfr.py:90 +#, fuzzy +msgid "Service ID" +msgstr "Device ID" + +#: apprise/plugins/sfr.py:95 +#, fuzzy +msgid "Service Password" +msgstr "Password" + +#: apprise/plugins/sfr.py:101 +#, fuzzy +msgid "Space ID" +msgstr "Source JID" + +#: apprise/plugins/sfr.py:107 +msgid "Recipient Phone Number" +msgstr "" + +#: apprise/plugins/sfr.py:131 +#, fuzzy +msgid "Sender Name" +msgstr "User Name" + +#: apprise/plugins/sfr.py:138 +msgid "Media Type" +msgstr "" + +#: apprise/plugins/sfr.py:145 +#, fuzzy +msgid "Timeout" +msgstr "Server Timeout" + +#: apprise/plugins/sfr.py:151 +#, fuzzy +msgid "TTS Voice" +msgstr "Target Device" + +#: apprise/plugins/signal_api.py:131 apprise/plugins/smseagle.py:158 +#, fuzzy +msgid "Target Group ID" +msgstr "Target Room ID" + +#: apprise/plugins/signl4.py:86 +#, fuzzy +msgid "Service" +msgstr "Device ID" + +#: apprise/plugins/signl4.py:90 +#, fuzzy +msgid "Location" +msgstr "Duration" + +#: apprise/plugins/signl4.py:94 +msgid "Alerting Scenario" +msgstr "" + +#: apprise/plugins/signl4.py:98 +msgid "Filtering" +msgstr "" + +#: apprise/plugins/signl4.py:103 +#, fuzzy +msgid "External ID" +msgstr "To User ID" + +#: apprise/plugins/signl4.py:107 +#, fuzzy +msgid "Status" +msgstr "Targets" + +#: apprise/plugins/simplepush.py:113 +msgid "Salt" +msgstr "" + +#: apprise/plugins/simplepush.py:126 +#, fuzzy +msgid "Event" +msgstr "Events" + +#: apprise/plugins/sinch.py:106 apprise/plugins/twilio.py:140 +msgid "Account SID" +msgstr "Account SID" + +#: apprise/plugins/sinch.py:134 apprise/plugins/twilio.py:168 +msgid "Target Short Code" +msgstr "Target Short Code" + +#: apprise/plugins/slack.py:194 +#, fuzzy +msgid "OAuth Access Token" +msgstr "Access Token" + +#: apprise/plugins/slack.py:225 +msgid "Target Encoded ID" +msgstr "Target Encoded ID" + +#: apprise/plugins/slack.py:265 +#, fuzzy +msgid "Include Footer" +msgstr "Include Image" + +#: apprise/plugins/slack.py:273 +msgid "Use Blocks" +msgstr "" + +#: apprise/plugins/slack.py:282 +#, fuzzy +msgid "Include Timestamp" +msgstr "Include Image" + +#: apprise/plugins/slack.py:288 apprise/plugins/twitter.py:179 +#, fuzzy +msgid "Message Mode" +msgstr "Secure Mode" + +#: apprise/plugins/smpp.py:61 +msgid "SMPP" +msgstr "" + +#: apprise/plugins/smpp.py:103 +#, fuzzy +msgid "Host" +msgstr "Hostname" + +#: apprise/plugins/smseagle.py:165 +#, fuzzy +msgid "Target Contact" +msgstr "Target Chat ID" + +#: apprise/plugins/smseagle.py:200 +msgid "Test Only" +msgstr "" + +#: apprise/plugins/smsmanager.py:146 +msgid "Gateway" +msgstr "" + +#: apprise/plugins/spike.py:48 +msgid "Spike.sh" +msgstr "" + +#: apprise/plugins/splunk.py:117 +msgid "Splunk On-Call" +msgstr "" + +#: apprise/plugins/splunk.py:172 +#, fuzzy +msgid "Target Routing Key" +msgstr "Target Tag ID" + +#: apprise/plugins/splunk.py:179 +msgid "Entity ID" +msgstr "" + +#: apprise/plugins/spugpush.py:48 +msgid "SpugPush" +msgstr "" + +#: apprise/plugins/streamlabs.py:125 +msgid "Alert Type" +msgstr "" + +#: apprise/plugins/streamlabs.py:131 +msgid "Image Link" +msgstr "" + +#: apprise/plugins/streamlabs.py:136 +#, fuzzy +msgid "Sound Link" +msgstr "Sound" + +#: apprise/plugins/streamlabs.py:141 apprise/plugins/windows.py:100 +#: apprise/plugins/xbmc.py:123 +msgid "Duration" +msgstr "Duration" + +#: apprise/plugins/streamlabs.py:147 +msgid "Special Text Color" +msgstr "" + +#: apprise/plugins/streamlabs.py:153 +msgid "Amount" +msgstr "" + +#: apprise/plugins/streamlabs.py:159 +#, fuzzy +msgid "Currency" +msgstr "Urgency" + +#: apprise/plugins/streamlabs.py:165 +#, fuzzy +msgid "Name" +msgstr "Username" + +#: apprise/plugins/streamlabs.py:171 +msgid "Identifier" +msgstr "" + +#: apprise/plugins/synology.py:116 +msgid "Upload" +msgstr "" + +#: apprise/plugins/syslog.py:167 +msgid "Log to STDERR" +msgstr "" + +#: apprise/plugins/telegram.py:353 +msgid "Target Chat ID" +msgstr "Target Chat ID" + +#: apprise/plugins/telegram.py:376 +msgid "Detect Bot Owner" +msgstr "Detect Bot Owner" + +#: apprise/plugins/telegram.py:382 +msgid "Silent Notification" +msgstr "" + +#: apprise/plugins/telegram.py:387 +msgid "Web Page Preview" +msgstr "" + +#: apprise/plugins/telegram.py:392 +msgid "Topic Thread ID" +msgstr "" + +#: apprise/plugins/telegram.py:399 +#, fuzzy +msgid "Markdown Version" +msgstr "Version" + +#: apprise/plugins/telegram.py:408 +msgid "Content Placement" +msgstr "" + +#: apprise/plugins/threema.py:85 +msgid "Gateway ID" +msgstr "" + +#: apprise/plugins/threema.py:110 +#, fuzzy +msgid "Target Threema ID" +msgstr "Target Tag ID" + +#: apprise/plugins/twilio.py:203 +msgid "Notification Method: sms or call" +msgstr "" + +#: apprise/plugins/twitter.py:138 +msgid "Consumer Key" +msgstr "Consumer Key" + +#: apprise/plugins/twitter.py:144 +msgid "Consumer Secret" +msgstr "Consumer Secret" + +#: apprise/plugins/twitter.py:156 +msgid "Access Secret" +msgstr "Access Secret" + +#: apprise/plugins/viber.py:49 +msgid "Viber" +msgstr "" + +#: apprise/plugins/viber.py:81 +#, fuzzy +msgid "Authentication Token" +msgstr "Application Key" + +#: apprise/plugins/viber.py:87 +msgid "Receiver IDs" +msgstr "" + +#: apprise/plugins/viber.py:101 +#, fuzzy +msgid "Bot Avatar URL" +msgstr "Avatar Image" + +#: apprise/plugins/voipms.py:83 +#, fuzzy +msgid "User Email" +msgstr "From Email" + +#: apprise/plugins/vapid/__init__.py:179 apprise/plugins/vonage.py:136 +msgid "ttl" +msgstr "" + +#: apprise/plugins/wecombot.py:99 +#, fuzzy +msgid "Bot Webhook Key" +msgstr "Webhook Token" + +#: apprise/plugins/whatsapp.py:106 +msgid "Template Name" +msgstr "" + +#: apprise/plugins/whatsapp.py:112 +#, fuzzy +msgid "From Phone ID" +msgstr "From Phone No" + +#: apprise/plugins/windows.py:62 +msgid "A local Microsoft Windows environment is required." +msgstr "" + +#: apprise/plugins/workflows.py:137 +#, fuzzy +msgid "Workflow ID" +msgstr "Overflow Mode" + +#: apprise/plugins/workflows.py:145 +msgid "Signature" +msgstr "" + +#: apprise/plugins/workflows.py:168 +msgid "Use Power Automate URL" +msgstr "" + +#: apprise/plugins/workflows.py:175 +msgid "Wrap Text" +msgstr "" + +#: apprise/plugins/workflows.py:190 +#, fuzzy +msgid "API Version" +msgstr "Version" + +#: apprise/plugins/wxpusher.py:121 +#, fuzzy +msgid "App Token" +msgstr "Auth Token" + +#: apprise/plugins/wxpusher.py:133 +#, fuzzy +msgid "Target User ID" +msgstr "Target User" + +#: apprise/plugins/zulip.py:148 +#, fuzzy +msgid "Target Stream" +msgstr "Target User" + +#: apprise/plugins/email/base.py:150 +msgid "To Email" +msgstr "To Email" + +#: apprise/plugins/email/base.py:173 +msgid "SMTP Server" +msgstr "SMTP Server" + +#: apprise/plugins/email/base.py:178 apprise/plugins/xmpp/base.py:129 +msgid "Secure Mode" +msgstr "Secure Mode" + +#: apprise/plugins/email/base.py:190 +msgid "PGP Encryption" +msgstr "" + +#: apprise/plugins/email/base.py:196 +msgid "PGP Public Key Path" +msgstr "" + +#: apprise/plugins/fcm/__init__.py:148 +msgid "OAuth2 KeyFile" +msgstr "" + +#: apprise/plugins/fcm/__init__.py:193 +msgid "Custom Image URL" +msgstr "" + +#: apprise/plugins/fcm/__init__.py:205 +msgid "Notification Color" +msgstr "" + +#: apprise/plugins/fcm/__init__.py:215 +msgid "Data Entries" +msgstr "" + +#: apprise/plugins/irc/base.py:159 +#, fuzzy +msgid "Real Name" +msgstr "Bot Name" + +#: apprise/plugins/irc/base.py:160 +#, fuzzy +msgid "Nickname" +msgstr "Username" + +#: apprise/plugins/irc/base.py:162 +#, fuzzy +msgid "Join Channels" +msgstr "Channels" + +#: apprise/plugins/irc/base.py:167 +#, fuzzy +msgid "Auth Mode" +msgstr "Webhook Mode" + +#: apprise/plugins/vapid/__init__.py:193 +msgid "PEM Private KeyFile" +msgstr "" + +#: apprise/plugins/vapid/__init__.py:199 +msgid "Subscripion File" +msgstr "" + +#: apprise/plugins/xmpp/base.py:136 +#, fuzzy +msgid "Get Roster" +msgstr "Target User" + +#: apprise/plugins/xmpp/base.py:141 +msgid "Use Subject" +msgstr "" + +#: apprise/plugins/xmpp/base.py:146 +#, fuzzy +msgid "Keep Connection Alive" +msgstr "Server Timeout" diff --git a/libs/apprise/locale.py b/libs/apprise/locale.py index f113cf5601..c06aff1cc5 100644 --- a/libs/apprise/locale.py +++ b/libs/apprise/locale.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,16 +25,15 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +import contextlib import ctypes import locale -import contextlib import os +from os.path import abspath, dirname, join import re -from os.path import join -from os.path import dirname -from os.path import abspath -from .logger import logger +from typing import Union +from .logger import logger # This gets toggled to True if we succeed GETTEXT_LOADED = False @@ -54,39 +52,37 @@ class AppriseLocale: - """ - A wrapper class to gettext so that we can manipulate multiple lanaguages - on the fly if required. - - """ + """A wrapper class to gettext so that we can manipulate multiple lanaguages + on the fly if required.""" # Define our translation domain - _domain = 'apprise' + _domain = "apprise" # The path to our translations - _locale_dir = abspath(join(dirname(__file__), 'i18n')) + _locale_dir = abspath(join(dirname(__file__), "i18n")) # Locale regular expression _local_re = re.compile( - r'^((?PC)|(?P([a-z]{2}))([_:](?P[a-z]{2}))?)' - r'(\.(?P[a-z0-9-]+))?$', re.IGNORECASE) + r"^((?PC)|(?P([a-z]{2}))([_:](?P[a-z]{2}))?)" + r"(\.(?P[a-z0-9-]+))?$", + re.IGNORECASE, + ) # Define our default encoding - _default_encoding = 'utf-8' + _default_encoding = "utf-8" # The function to assign `_` by default - _fn = 'gettext' + _fn = "gettext" # The language we should fall back to if all else fails - _default_language = 'en' + _default_language = "en" def __init__(self, language=None): - """ - Initializes our object, if a language is specified, then we - initialize ourselves to that, otherwise we use whatever we detect - from the local operating system. If all else fails, we resort to the - defined default_language. + """Initializes our object, if a language is specified, then we + initialize ourselves to that, otherwise we use whatever we detect from + the local operating system. + If all else fails, we resort to the defined default_language. """ # Cache previously loaded translations @@ -106,16 +102,17 @@ def __init__(self, language=None): self.add(self.lang) def add(self, lang=None, set_default=True): - """ - Add a language to our list - """ + """Add a language to our list.""" lang = lang if lang else self._default_language if lang not in self._gtobjs: # Load our gettext object and install our language try: self._gtobjs[lang] = gettext.translation( - self._domain, localedir=self._locale_dir, languages=[lang], - fallback=False) + self._domain, + localedir=self._locale_dir, + languages=[lang], + fallback=False, + ) # The non-intrusive method of applying the gettext change to # the global namespace only @@ -124,8 +121,9 @@ def add(self, lang=None, set_default=True): except FileNotFoundError: # The translation directory does not exist logger.debug( - 'Could not load translation path: %s', - join(self._locale_dir, lang)) + "Could not load translation path: %s", + join(self._locale_dir, lang), + ) # Fallback (handle case where self.lang does not exist) if self.lang not in self._gtobjs: @@ -134,10 +132,10 @@ def add(self, lang=None, set_default=True): return False - logger.trace('Loaded language %s', lang) + logger.trace("Loaded language %s", lang) if set_default: - logger.debug('Language set to %s', lang) + logger.debug("Language set to %s", lang) self.lang = lang return True @@ -172,8 +170,7 @@ def lang_at(self, lang, mapto=_fn): @property def gettext(self): - """ - Return the current language gettext() function + """Return the current language gettext() function. Useful for assigning to `_` """ @@ -181,9 +178,7 @@ def gettext(self): @staticmethod def detect_language(lang=None, detect_fallback=True): - """ - Returns the language (if it's retrievable) - """ + """Returns the language (if it's retrievable)""" # We want to only use the 2 character version of this language # hence en_CA becomes en, en_US becomes en. if not isinstance(lang, str): @@ -194,19 +189,20 @@ def detect_language(lang=None, detect_fallback=True): # Posix lookup lookup = os.environ.get localename = None - for variable in ('LC_ALL', 'LC_CTYPE', 'LANG', 'LANGUAGE'): + for variable in ("LC_ALL", "LC_CTYPE", "LANG", "LANGUAGE"): localename = lookup(variable, None) if localename: result = AppriseLocale._local_re.match(localename) - if result and result.group('lang'): - return result.group('lang').lower() + if result and result.group("lang"): + return result.group("lang").lower() # Windows handling - if hasattr(ctypes, 'windll'): + if hasattr(ctypes, "windll"): windll = ctypes.windll.kernel32 try: lang = locale.windows_locale[ - windll.GetUserDefaultUILanguage()] + windll.GetUserDefaultUILanguage() + ] # Our detected windows language return lang[0:2].lower() @@ -220,7 +216,7 @@ def detect_language(lang=None, detect_fallback=True): # Acquire our locale lang = locale.getlocale()[0] # Compatibility for Python >= 3.12 - if lang == 'C': + if lang == "C": lang = AppriseLocale._default_language except (ValueError, TypeError) as e: @@ -229,32 +225,27 @@ def detect_language(lang=None, detect_fallback=True): # case, we want to better notify the end user of this. Users # receiving this error should check their environment # variables. - logger.warning( - 'Language detection failure / {}'.format(str(e))) + logger.warning(f"Language detection failure / {e!s}") return None return None if not lang else lang[0:2].lower() def __getstate__(self): - """ - Pickle Support dumps() - """ + """Pickle Support dumps()""" state = self.__dict__.copy() # Remove the unpicklable entries. - del state['_gtobjs'] - del state['_AppriseLocale__fn_map'] + del state["_gtobjs"] + del state["_AppriseLocale__fn_map"] return state def __setstate__(self, state): - """ - Pickle Support loads() - """ + """Pickle Support loads()""" self.__dict__.update(state) # Our mapping to our _fn self.__fn_map = None self._gtobjs = {} - self.add(state['lang'], set_default=True) + self.add(state["lang"], set_default=True) # @@ -264,15 +255,11 @@ def __setstate__(self, state): class LazyTranslation: - """ - Doesn't translate anything until str() or unicode() references - are made. + """Doesn't translate anything until str() or unicode() references are + made.""" - """ def __init__(self, text, *args, **kwargs): - """ - Store our text - """ + """Store our text.""" self.text = text super().__init__(*args, **kwargs) @@ -283,7 +270,10 @@ def __str__(self): # Lazy translation handling def gettext_lazy(text): - """ - A dummy function that can be referenced - """ + """A dummy function that can be referenced.""" + return LazyTranslation(text=text) + + +# Identify our Translatable content +Translatable = Union[str, LazyTranslation] diff --git a/libs/apprise/logger.py b/libs/apprise/logger.py index c8c9d67103..89ef90725d 100644 --- a/libs/apprise/logger.py +++ b/libs/apprise/logger.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,12 +25,13 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -import os -import logging +import contextlib from io import StringIO +import logging +import os # The root identifier needed to monitor 'apprise' logging -LOGGER_NAME = 'apprise' +LOGGER_NAME = "apprise" # Define a verbosity level that is a noisier then debug mode logging.TRACE = logging.DEBUG - 1 @@ -54,9 +54,7 @@ def trace(self, message, *args, **kwargs): def deprecate(self, message, *args, **kwargs): - """ - Deprication Warning Logging - """ + """Deprication Warning Logging.""" if self.isEnabledFor(logging.DEPRECATE): self._log(logging.DEPRECATE, message, args, **kwargs) @@ -70,25 +68,29 @@ def deprecate(self, message, *args, **kwargs): class LogCapture: + """A class used to allow one to instantiate loggers that write to memory + for temporary purposes. e.g.: + + 1. with LogCapture() as captured: + 2. + 3. # Send our notification(s) + 4. aobj.notify("hello world") + 5. + 6. # retrieve our logs produced by the above call via our + 7. # `captured` StringIO object we have access to within the `with` + 8. # block here: + 9. print(captured.getvalue()) """ - A class used to allow one to instantiate loggers that write to - memory for temporary purposes. e.g.: - - 1. with LogCapture() as captured: - 2. - 3. # Send our notification(s) - 4. aobj.notify("hello world") - 5. - 6. # retrieve our logs produced by the above call via our - 7. # `captured` StringIO object we have access to within the `with` - 8. # block here: - 9. print(captured.getvalue()) - """ - def __init__(self, path=None, level=None, name=LOGGER_NAME, delete=True, - fmt='%(asctime)s - %(levelname)s - %(message)s'): - """ - Instantiate a temporary log capture object + def __init__( + self, + path=None, + level=None, + name=LOGGER_NAME, + delete=True, + fmt="%(asctime)s - %(levelname)s - %(message)s", + ): + """Instantiate a temporary log capture object. If a path is specified, then log content is sent to that file instead of a StringIO object. @@ -99,7 +101,6 @@ def __init__(self, path=None, level=None, name=LOGGER_NAME, delete=True, they are not automatically cleaned up afterwards. Optionally over-ride the fmt as well if you wish. - """ # Our memory buffer placeholder self.__buffer_ptr = StringIO() @@ -117,23 +118,25 @@ def __init__(self, path=None, level=None, name=LOGGER_NAME, delete=True, self.__logger = logging.getLogger(name) # Prepare our handler - self.__handler = logging.StreamHandler(self.__buffer_ptr) \ - if not self.__path else logging.FileHandler( - self.__path, mode='a', encoding='utf-8') + self.__handler = ( + logging.StreamHandler(self.__buffer_ptr) + if not self.__path + else logging.FileHandler(self.__path, mode="a", encoding="utf-8") + ) # Use the specified level, otherwise take on the already # effective level of our logger self.__handler.setLevel( - self.__level if self.__level is not None - else self.__logger.getEffectiveLevel()) + self.__level + if self.__level is not None + else self.__logger.getEffectiveLevel() + ) # Prepare our formatter self.__handler.setFormatter(logging.Formatter(fmt)) def __enter__(self): - """ - Allows logger manipulation within a 'with' block - """ + """Allows logger manipulation within a 'with' block.""" if self.__level is not None: # Temporary adjust our log level if required @@ -153,11 +156,11 @@ def __enter__(self): if self.__path: # If a path has been identified, ensure we can write to the path # and that the file exists - with open(self.__path, 'a'): + with open(self.__path, "a"): os.utime(self.__path, None) # Update our buffer pointer - self.__buffer_ptr = open(self.__path, 'r') + self.__buffer_ptr = open(self.__path) # Add our handler self.__logger.addHandler(self.__handler) @@ -166,9 +169,7 @@ def __enter__(self): return self.__buffer_ptr def __exit__(self, exc_type, exc_value, tb): - """ - removes the handler gracefully when the with block has completed - """ + """Removes the handler gracefully when the with block has completed.""" # Flush our content self.__handler.flush() @@ -186,16 +187,8 @@ def __exit__(self, exc_type, exc_value, tb): self.__buffer_ptr.close() self.__handler.close() if self.__delete: - try: + with contextlib.suppress(OSError): # Always remove file afterwards os.unlink(self.__path) - except OSError: - # It's okay if the file does not exist - pass - - if exc_type is not None: - # pass exception on if one was generated - return False - - return True + return exc_type is None diff --git a/libs/apprise/manager.py b/libs/apprise/manager.py index d44357be4a..5e8fdb5d12 100644 --- a/libs/apprise/manager.py +++ b/libs/apprise/manager.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,52 +25,47 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +import contextlib +import hashlib +import inspect import os +from os.path import abspath, dirname, join import re import sys -import time -import hashlib -import inspect import threading -from .utils.module import import_module -from .utils.singleton import Singleton -from .utils.parse import parse_list -from .utils.disk import path_decode -from os.path import dirname -from os.path import abspath -from os.path import join +import time from .logger import logger +from .utils.disk import path_decode +from .utils.module import import_module +from .utils.parse import parse_list +from .utils.singleton import Singleton class PluginManager(metaclass=Singleton): - """ - Designed to be a singleton object to maintain all initialized loading - of modules in memory. - """ + """Designed to be a singleton object to maintain all initialized loading of + modules in memory.""" # Description (used for logging) - name = 'Singleton Plugin' + name = "Singleton Plugin" # Memory Space - _id = 'undefined' + _id = "undefined" # Our Module Python path name - module_name_prefix = f'apprise.{_id}' + module_name_prefix = f"apprise.{_id}" # The module path to scan module_path = join(abspath(dirname(__file__)), _id) # For filtering our result when scanning a module - module_filter_re = re.compile(r'^(?P((?!_)[A-Za-z0-9]+))$') + module_filter_re = re.compile(r"^(?P((?!_)[A-Za-z0-9]+))$") # thread safe loading _lock = threading.Lock() def __init__(self, *args, **kwargs): - """ - Over-ride our class instantiation to provide a singleton - """ + """Over-ride our class instantiation to provide a singleton.""" self._module_map = None self._schema_map = None @@ -115,22 +109,22 @@ def __init__(self, *args, **kwargs): self._loaded = set() def unload_modules(self, disable_native=False): - """ - Reset our object and unload all modules - """ + """Reset our object and unload all modules.""" with self._lock: if self._custom_module_map: # Handle Custom Module Assignments for meta in self._custom_module_map.values(): - if meta['name'] not in self._module_map: + if meta["name"] not in self._module_map: # Nothing to remove continue # For the purpose of tidying up un-used modules in memory - loaded = [m for m in sys.modules.keys() - if m.startswith( - self._module_map[meta['name']]['path'])] + loaded = [ + m + for m in sys.modules + if m.startswith(self._module_map[meta["name"]]["path"]) + ] for module_path in loaded: del sys.modules[module_path] @@ -154,9 +148,7 @@ def unload_modules(self, disable_native=False): self._paths_previously_scanned = set() def load_modules(self, path=None, name=None, force=False): - """ - Load our modules into memory - """ + """Load our modules into memory.""" # Default value module_name_prefix = self.module_name_prefix if name is None else name @@ -181,8 +173,8 @@ def load_modules(self, path=None, name=None, force=False): # The .py extension is optional as we support loading directories # too module_re = re.compile( - r'^(?P(?!base|_)[a-z0-9_]+)(\.py)?$', - re.I) + r"^(?P(?!base|_)[a-z0-9_]+)(\.py)?$", re.I + ) t_start = time.time() for f in os.listdir(module_path): @@ -193,62 +185,73 @@ def load_modules(self, path=None, name=None, force=False): continue # Store our notification/plugin name: - module_name = match.group('name') - module_pyname = '{}.{}'.format(module_name_prefix, module_name) + module_name = match.group("name") + module_pyname = f"{module_name_prefix}.{module_name}" if module_name in self._module_map: logger.warning( "%s(s) (%s) already loaded; ignoring %s", - self.name, module_name, os.path.join(module_path, f)) + self.name, + module_name, + os.path.join(module_path, f), + ) continue try: module = __import__( module_pyname, - globals(), locals(), - fromlist=[module_name]) + globals(), + locals(), + fromlist=[module_name], + ) except ImportError: # No problem, we can try again another way... module = import_module( - os.path.join(module_path, f), module_pyname) + os.path.join(module_path, f), module_pyname + ) if not module: # logging found in import_module and not needed here continue module_class = None - for m_class in [obj for obj in dir(module) - if self.module_filter_re.match(obj)]: + for m_class in [ + obj + for obj in dir(module) + if self.module_filter_re.match(obj) + ]: # Get our plugin plugin = getattr(module, m_class) - if not hasattr(plugin, 'app_id'): + if not hasattr(plugin, "app_id"): # Filter out non-notification modules logger.trace( "(%s.%s) import failed; no app_id defined in %s", - self.name, m_class, os.path.join(module_path, f)) + self.name, + m_class, + os.path.join(module_path, f), + ) continue # Add our plugin name to our module map self._module_map[module_name] = { - 'plugin': set([plugin]), - 'module': module, - 'path': '{}.{}'.format( - module_name_prefix, module_name), - 'native': True, + "plugin": {plugin}, + "module": module, + "path": f"{module_name_prefix}.{module_name}", + "native": True, } - fn = getattr(plugin, 'schemas', None) - schemas = set([]) if not callable(fn) else fn(plugin) + fn = getattr(plugin, "schemas", None) + schemas = set() if not callable(fn) else fn(plugin) # map our schema to our plugin for schema in schemas: if schema in self._schema_map: logger.error( - "{} schema ({}) mismatch detected -" - ' {} already maps to {}' - .format(self.name, schema, - self._schema_map[schema], - plugin)) + f"{self.name} schema ({schema}) mismatch" + " detected -" + f" {self._schema_map[schema]} already maps to" + f" {plugin}" + ) continue # Assign plugin @@ -265,42 +268,45 @@ def load_modules(self, path=None, name=None, force=False): logger.trace( "%s (%s) import failed; no filename/Class " "match found in %s", - self.name, module_name, os.path.join(module_path, f)) + self.name, + module_name, + os.path.join(module_path, f), + ) continue logger.trace( - '{} {} loaded in {:.6f}s'.format( - self.name, module_name, (time.time() - tl_start))) + f"{self.name} {module_name} loaded in" + f" {time.time() - tl_start:.6f}s" + ) # Track the directory loaded so we never load it again self._loaded.add(module_path) logger.debug( - '{} {}(s) and {} Schema(s) loaded in {:.4f}s' - .format( - self.name, - len(self._module_map) - module_count, - len(self._schema_map) - schema_count, - (time.time() - t_start))) + f"{len(self._module_map) - module_count} {self.name}(s) and" + f" {len(self._schema_map) - schema_count} Schema(s) loaded in" + f" {time.time() - t_start:.4f}s" + ) def module_detection(self, paths, cache=True): - """ - Leverage the @notify decorator and load all objects found matching - this. - """ + """Leverage the @notify decorator and load all objects found matching + this.""" # A simple restriction that we don't allow periods in the filename at # all so it can't be hidden (Linux OS's) and it won't conflict with # Python path naming. This also prevents us from loading any python # file that starts with an underscore or dash # We allow for __init__.py as well module_re = re.compile( - r'^(?P[_a-z0-9][a-z0-9._-]+)?(\.py)?$', re.I) + r"^(?P[_a-z0-9][a-z0-9._-]+)?(\.py)?$", re.I + ) # Validate if we're a loadable Python file or not - valid_python_file_re = re.compile(r'.+\.py(o|c)?$', re.IGNORECASE) + valid_python_file_re = re.compile(r".+\.py(o|c)?$", re.IGNORECASE) if isinstance(paths, str): - paths = [paths, ] + paths = [ + paths, + ] if not paths or not isinstance(paths, (tuple, list)): # We're done @@ -312,19 +318,18 @@ def _import_module(path): # another's namespace if not (path and valid_python_file_re.match(path)): # Ignore file/module type - logger.trace('Plugin Scan: Skipping %s', path) + logger.trace("Plugin Scan: Skipping %s", path) return t_start = time.time() - module_name = hashlib.sha1(path.encode('utf-8')).hexdigest() + module_name = hashlib.sha1(path.encode("utf-8")).hexdigest() module_pyname = "{prefix}.{name}".format( - prefix='apprise.custom.module', name=module_name) + prefix="apprise.custom.module", name=module_name + ) if module_pyname in self._custom_module_map: # First clear out existing entries - for schema in \ - self._custom_module_map[module_pyname]['notify']\ - .keys(): + for schema in self._custom_module_map[module_pyname]["notify"]: # Remove any mapped modules to this file del self._schema_map[schema] @@ -336,35 +341,39 @@ def _import_module(path): module = import_module(path, module_pyname) if not module: # No problem, we can't use this object - logger.warning('Failed to load custom module: %s', _path) + logger.warning("Failed to load custom module: %s", path_) return # Print our loaded modules if any if module_pyname in self._custom_module_map: logger.debug( - 'Custom module %s - %d schema(s) (name=%s) ' - 'loaded in %.6fs', _path, - len(self._custom_module_map[module_pyname]['notify']), - module_name, (time.time() - t_start)) + "Custom module %s - %d schema(s) (name=%s) " + "loaded in %.6fs", + path_, + len(self._custom_module_map[module_pyname]["notify"]), + module_name, + (time.time() - t_start), + ) # Add our plugin name to our module map self._module_map[module_name] = { - 'plugin': set(), - 'module': module, - 'path': module_pyname, - 'native': False, + "plugin": set(), + "module": module, + "path": module_pyname, + "native": False, } - for schema, meta in\ - self._custom_module_map[module_pyname]['notify']\ - .items(): + for schema, _meta in self._custom_module_map[module_pyname][ + "notify" + ].items(): # For mapping purposes; map our element in our main list - self._module_map[module_name]['plugin'].add( - self._schema_map[schema]) + self._module_map[module_name]["plugin"].add( + self._schema_map[schema] + ) # Log our success - logger.info('Loaded custom notification: %s://', schema) + logger.info("Loaded custom notification: %s://", schema) else: # The code reaches here if we successfully loaded the Python # module but no hooks/triggers were found. So we can safely @@ -375,40 +384,44 @@ def _import_module(path): # end of _import_module() return - for _path in paths: - path = path_decode(_path) - if (cache and path in self._paths_previously_scanned) \ - or not os.path.exists(path): + for path_ in paths: + path = path_decode(path_) + if ( + cache and path in self._paths_previously_scanned + ) or not os.path.exists(path): # We're done as we've already scanned this continue # Store our path as a way of hashing it has been handled self._paths_previously_scanned.add(path) - if os.path.isdir(path) and not \ - os.path.isfile(os.path.join(path, '__init__.py')): + if os.path.isdir(path) and not os.path.isfile( + os.path.join(path, "__init__.py") + ): - logger.debug('Scanning for custom plugins in: %s', path) + logger.debug("Scanning for custom plugins in: %s", path) for entry in os.listdir(path): re_match = module_re.match(entry) if not re_match: # keep going - logger.trace('Plugin Scan: Ignoring %s', entry) + logger.trace("Plugin Scan: Ignoring %s", entry) continue new_path = os.path.join(path, entry) if os.path.isdir(new_path): # Update our path - new_path = os.path.join(path, entry, '__init__.py') + new_path = os.path.join(path, entry, "__init__.py") if not os.path.isfile(new_path): logger.trace( - 'Plugin Scan: Ignoring %s', - os.path.join(path, entry)) + "Plugin Scan: Ignoring %s", + os.path.join(path, entry), + ) continue - if not cache or \ - (cache and new_path not in - self._paths_previously_scanned): + if not cache or ( + cache + and new_path not in self._paths_previously_scanned + ): # Load our module _import_module(new_path) @@ -419,7 +432,7 @@ def _import_module(path): # This logic is safe to apply because we already # validated the directories state above; update our # path - path = os.path.join(path, '__init__.py') + path = os.path.join(path, "__init__.py") if cache and path in self._paths_previously_scanned: continue @@ -430,7 +443,7 @@ def _import_module(path): # must be a match and must have a .py extension if not re_match or not re_match.group(1): # keep going - logger.trace('Plugin Scan: Ignoring %s', path) + logger.trace("Plugin Scan: Ignoring %s", path) continue # Load our module @@ -438,10 +451,8 @@ def _import_module(path): return None - def add(self, plugin, schemas=None, url=None, send_func=None): - """ - Ability to manually add Notification services to our stack - """ + def add(self, plugin, schemas=None, url=None, send_func=None, force=False): + """Ability to manually add Notification services to our stack.""" if not self: # Lazy load @@ -450,7 +461,9 @@ def add(self, plugin, schemas=None, url=None, send_func=None): # Acquire a list of schemas p_schemas = parse_list(plugin.secure_protocol, plugin.protocol) if isinstance(schemas, str): - schemas = [schemas, ] + schemas = [ + schemas, + ] elif schemas is None: # Default @@ -459,26 +472,57 @@ def add(self, plugin, schemas=None, url=None, send_func=None): if not schemas or not isinstance(schemas, (set, tuple, list)): # We're done logger.error( - 'The schemas provided (type %s) is unsupported; ' - 'loaded from %s.', + "The schemas provided (type %s) is unsupported; " + "loaded from %s.", type(schemas), - send_func.__name__ if send_func else plugin.__class__.__name__) + send_func.__name__ if send_func else plugin.__class__.__name__, + ) return False # Convert our schemas into a set - schemas = set([s.lower() for s in schemas]) | set(p_schemas) + schemas = {s.lower() for s in schemas} | set(p_schemas) # Valdation conflict = [s for s in schemas if s in self] if conflict: - # we're already handling this schema - logger.warning( - 'The schema(s) (%s) are already defined and could not be ' - 'loaded from %s%s.', - ', '.join(conflict), - 'custom notify function ' if send_func else '', - send_func.__name__ if send_func else plugin.__class__.__name__) - return False + if force: + # Force implies that we unmap any conflicting schema entries + # at the Apprise level, but we do not unload any previously + # imported modules. This ensures other classes can safely + # subclass from prior notify classes. + logger.debug( + "The schema(s) (%s) are already defined and will be " + "force loaded; overriding %s%s.", + ", ".join(conflict), + "custom notify function " if send_func else "", + send_func.__name__ if send_func + else plugin.__class__.__name__, + ) + self.remove(*conflict, unload=False) + + else: + logger.warning( + "The schema(s) (%s) are already defined and could not be " + "loaded from %s%s.", + ", ".join(conflict), + "custom notify function " if send_func else "", + send_func.__name__ if send_func + else plugin.__class__.__name__, + ) + return False + + # Re-check for conflicts after unmapping + conflict = [s for s in schemas if s in self] + if conflict: + logger.warning( + "The schema(s) (%s) are already defined and could not be " + "loaded from %s%s.", + ", ".join(conflict), + "custom notify function " if send_func else "", + send_func.__name__ if send_func + else plugin.__class__.__name__, + ) + return False if send_func: # Acquire the function name @@ -495,37 +539,37 @@ def add(self, plugin, schemas=None, url=None, send_func=None): self._custom_module_map[module_name] = { # Name can be useful for indexing back into the # _module_map object; this is the key to do it with: - 'name': module_name.split('.')[-1], - + "name": module_name.split(".")[-1], # The path to the module loaded - 'path': path, - + "path": path, # Initialize our template - 'notify': {}, + "notify": {}, } for schema in schemas: - self._custom_module_map[module_name]['notify'][schema] = { + self._custom_module_map[module_name]["notify"][schema] = { # The name of the send function the @notify decorator # wrapped - 'fn_name': fn_name, + "fn_name": fn_name, # The URL that was provided in the @notify decorator call # associated with the 'on=' - 'url': url, + "url": url, } else: module_name = hashlib.sha1( - ''.join(schemas).encode('utf-8')).hexdigest() + "".join(schemas).encode("utf-8") + ).hexdigest() module_pyname = "{prefix}.{name}".format( - prefix='apprise.adhoc.module', name=module_name) + prefix="apprise.adhoc.module", name=module_name + ) # Add our plugin name to our module map self._module_map[module_name] = { - 'plugin': set([plugin]), - 'module': None, - 'path': module_pyname, - 'native': False, + "plugin": {plugin}, + "module": None, + "path": module_pyname, + "native": False, } for schema in schemas: @@ -534,38 +578,30 @@ def add(self, plugin, schemas=None, url=None, send_func=None): return True - def remove(self, *schemas): - """ - Removes a loaded element (if defined) - """ + def remove(self, *schemas, unload=True): + """Removes a loaded element (if defined)""" if not self: # Lazy load self.load_modules() for schema in schemas: - try: - del self[schema] - - except KeyError: - pass + with contextlib.suppress(KeyError): + self._unmap_schema(schema, unload=unload) def plugins(self, include_disabled=True): - """ - Return all of our loaded plugins - """ + """Return all of our loaded plugins.""" if not self: # Lazy load self.load_modules() for module in self._module_map.values(): - for plugin in module['plugin']: + for plugin in module["plugin"]: if not include_disabled and not plugin.enabled: continue yield plugin def schemas(self, include_disabled=True): - """ - Return all of our loaded schemas + """Return all of our loaded schemas. if include_disabled == True, then even disabled notifications are returned @@ -575,13 +611,14 @@ def schemas(self, include_disabled=True): self.load_modules() # Return our list - return list(self._schema_map.keys()) if include_disabled else \ - [s for s in self._schema_map.keys() if self._schema_map[s].enabled] + return ( + list(self._schema_map.keys()) + if include_disabled + else [s for s in self._schema_map if self._schema_map[s].enabled] + ) def disable(self, *schemas): - """ - Disables the modules associated with the specified schemas - """ + """Disables the modules associated with the specified schemas.""" if not self: # Lazy load self.load_modules() @@ -598,9 +635,7 @@ def disable(self, *schemas): self._disabled.add(schema) def enable_only(self, *schemas): - """ - Disables the modules associated with the specified schemas - """ + """Disables the modules associated with the specified schemas.""" if not self: # Lazy load self.load_modules() @@ -611,7 +646,8 @@ def enable_only(self, *schemas): for plugin in self.plugins(): # Get our plugin's schema list p_schemas = set( - parse_list(plugin.secure_protocol, plugin.protocol)) + parse_list(plugin.secure_protocol, plugin.protocol) + ) if not schemas & p_schemas: if plugin.enabled: @@ -629,9 +665,7 @@ def enable_only(self, *schemas): plugin.enabled = True def __contains__(self, schema): - """ - Checks if a schema exists - """ + """Checks if a schema exists.""" if not self: # Lazy load self.load_modules() @@ -639,31 +673,76 @@ def __contains__(self, schema): return schema in self._schema_map def __delitem__(self, schema): + """ + removes schema map and also unloads it from memory + """ + self._unmap_schema(schema, unload=True) + + def __setitem__(self, schema, plugin): + """Support fast assigning of Plugin/Notification Objects.""" if not self: # Lazy load self.load_modules() - # Get our plugin (otherwise we throw a KeyError) which is - # intended on del action that doesn't align + # Set default values if not otherwise set + if not plugin.service_name: + # Assign service name if one doesn't exist + plugin.service_name = f"{schema}://" + + p_schemas = set(parse_list(plugin.secure_protocol, plugin.protocol)) + if not p_schemas: + # Assign our protocol + plugin.secure_protocol = schema + p_schemas.add(schema) + + elif schema not in p_schemas: + # Add our others (if defined) + plugin.secure_protocol = { + schema, + *parse_list(plugin.secure_protocol), + } + p_schemas.add(schema) + + if not self.add(plugin, schemas=p_schemas): + raise KeyError("Conflicting Assignment") + + def _unmap_schema(self, schema, *, unload=True): + """Unmap a schema entry without necessarily unloading modules. + + This function removes the schema mapping and updates internal cross + references. When unload is True (default), modules are removed from + sys.modules when they are no longer referenced by Apprise. When unload + is False, the unmapping is performed but any imported modules remain + intact in sys.modules. + """ + + if not self: + # Lazy load + self.load_modules() + + # Get our plugin (otherwise we throw a KeyError) which is intended on + # unmap action that doesn't align. plugin = self._schema_map[schema] # Our list of all schema entries - p_schemas = set([schema]) + p_schemas = {schema} for key in list(self._module_map.keys()): - if plugin in self._module_map[key]['plugin']: + if plugin in self._module_map[key]["plugin"]: # Remove our plugin - self._module_map[key]['plugin'].remove(plugin) + self._module_map[key]["plugin"].remove(plugin) # Custom Plugin Entry; Clean up cross reference - module_pyname = self._module_map[key]['path'] - if not self._module_map[key]['native'] and \ - module_pyname in self._custom_module_map: + module_pyname = self._module_map[key]["path"] + if ( + not self._module_map[key]["native"] + and module_pyname in self._custom_module_map + ): - del self.\ - _custom_module_map[module_pyname]['notify'][schema] + del self._custom_module_map[module_pyname][ + "notify"][schema] - if not self._custom_module_map[module_pyname]['notify']: + if not self._custom_module_map[module_pyname]["notify"]: # # Last custom loaded element # @@ -671,61 +750,34 @@ def __delitem__(self, schema): # Free up custom object entry del self._custom_module_map[module_pyname] - if not self._module_map[key]['plugin']: + if not self._module_map[key]["plugin"]: # # Last element # - if self._module_map[key]['native']: + if self._module_map[key]["native"]: # Get our plugin's schema list - p_schemas = \ - set([s for s in parse_list( - plugin.secure_protocol, plugin.protocol) - if s in self._schema_map]) - - # free system memory - if self._module_map[key]['module']: - del sys.modules[self._module_map[key]['path']] - - # free last remaining pointer in module map + p_schemas = { + s + for s in parse_list( + plugin.secure_protocol, plugin.protocol + ) + if s in self._schema_map + } + + # Free system memory only when unload=True + if unload and self._module_map[key]["module"]: + with contextlib.suppress(KeyError): + del sys.modules[self._module_map[key]["path"]] + + # Free last remaining pointer in module map del self._module_map[key] for schema in p_schemas: - # Final Tidy + # Final tidy del self._schema_map[schema] - def __setitem__(self, schema, plugin): - """ - Support fast assigning of Plugin/Notification Objects - """ - if not self: - # Lazy load - self.load_modules() - - # Set default values if not otherwise set - if not plugin.service_name: - # Assign service name if one doesn't exist - plugin.service_name = f'{schema}://' - - p_schemas = set( - parse_list(plugin.secure_protocol, plugin.protocol)) - if not p_schemas: - # Assign our protocol - plugin.secure_protocol = schema - p_schemas.add(schema) - - elif schema not in p_schemas: - # Add our others (if defined) - plugin.secure_protocol = \ - set([schema] + parse_list(plugin.secure_protocol)) - p_schemas.add(schema) - - if not self.add(plugin, schemas=p_schemas): - raise KeyError('Conflicting Assignment') - def __getitem__(self, schema): - """ - Returns the indexed plugin identified by the schema specified - """ + """Returns the indexed plugin identified by the schema specified.""" if not self: # Lazy load self.load_modules() @@ -733,9 +785,7 @@ def __getitem__(self, schema): return self._schema_map[schema] def __iter__(self): - """ - Returns an iterator so we can iterate over our loaded modules - """ + """Returns an iterator so we can iterate over our loaded modules.""" if not self: # Lazy load self.load_modules() @@ -743,9 +793,7 @@ def __iter__(self): return iter(self._module_map.values()) def __len__(self): - """ - Returns the number of modules/plugins loaded - """ + """Returns the number of modules/plugins loaded.""" if not self: # Lazy load self.load_modules() @@ -753,7 +801,5 @@ def __len__(self): return len(self._module_map) def __bool__(self): - """ - Determines if object has loaded or not - """ - return True if self._loaded and self._module_map is not None else False + """Determines if object has loaded or not.""" + return bool(self._loaded and self._module_map is not None) diff --git a/libs/apprise/manager_attachment.py b/libs/apprise/manager_attachment.py index ffd31ebc95..3e612f14e4 100644 --- a/libs/apprise/manager_attachment.py +++ b/libs/apprise/manager_attachment.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,34 +25,32 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +from os.path import abspath, dirname, join import re -from os.path import dirname -from os.path import abspath -from os.path import join + from .manager import PluginManager class AttachmentManager(PluginManager): - """ - Designed to be a singleton object to maintain all initialized - attachment plugins/modules in memory. - """ + """Designed to be a singleton object to maintain all initialized attachment + plugins/modules in memory.""" # Description (used for logging) - name = 'Attachment Plugin' + name = "Attachment Plugin" # Filename Prefix to filter on - fname_prefix = 'Attach' + fname_prefix = "Attach" # Memory Space - _id = 'attachment' + _id = "attachment" # Our Module Python path name - module_name_prefix = f'apprise.{_id}' + module_name_prefix = f"apprise.{_id}" # The module path to scan module_path = join(abspath(dirname(__file__)), _id) # For filtering our result set module_filter_re = re.compile( - r'^(?P' + fname_prefix + r'(?!Base)[A-Za-z0-9]+)$') + r"^(?P" + fname_prefix + r"(?!Base)[A-Za-z0-9]+)$" + ) diff --git a/libs/apprise/manager_config.py b/libs/apprise/manager_config.py index d7b0fe0834..0544a65004 100644 --- a/libs/apprise/manager_config.py +++ b/libs/apprise/manager_config.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,34 +25,32 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +from os.path import abspath, dirname, join import re -from os.path import dirname -from os.path import abspath -from os.path import join + from .manager import PluginManager class ConfigurationManager(PluginManager): - """ - Designed to be a singleton object to maintain all initialized - configuration plugins/modules in memory. - """ + """Designed to be a singleton object to maintain all initialized + configuration plugins/modules in memory.""" # Description (used for logging) - name = 'Configuration Plugin' + name = "Configuration Plugin" # Filename Prefix to filter on - fname_prefix = 'Config' + fname_prefix = "Config" # Memory Space - _id = 'config' + _id = "config" # Our Module Python path name - module_name_prefix = f'apprise.{_id}' + module_name_prefix = f"apprise.{_id}" # The module path to scan module_path = join(abspath(dirname(__file__)), _id) # For filtering our result set module_filter_re = re.compile( - r'^(?P' + fname_prefix + r'(?!Base)[A-Za-z0-9]+)$') + r"^(?P" + fname_prefix + r"(?!Base)[A-Za-z0-9]+)$" + ) diff --git a/libs/apprise/manager_plugins.py b/libs/apprise/manager_plugins.py index 0eb5465e76..00ac595118 100644 --- a/libs/apprise/manager_plugins.py +++ b/libs/apprise/manager_plugins.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,35 +25,32 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +from os.path import abspath, dirname, join import re -from os.path import dirname -from os.path import abspath -from os.path import join + from .manager import PluginManager class NotificationManager(PluginManager): - """ - Designed to be a singleton object to maintain all initialized notifications - in memory. - """ + """Designed to be a singleton object to maintain all initialized + notifications in memory.""" # Description (used for logging) - name = 'Notification Plugin' + name = "Notification Plugin" # Filename Prefix to filter on - fname_prefix = 'Notify' + fname_prefix = "Notify" # Memory Space - _id = 'plugins' + _id = "plugins" # Our Module Python path name - module_name_prefix = f'apprise.{_id}' + module_name_prefix = f"apprise.{_id}" # The module path to scan module_path = join(abspath(dirname(__file__)), _id) # For filtering our result set module_filter_re = re.compile( - r'^(?P' + fname_prefix + - r'(?!Base|ImageSize|Type)[A-Za-z0-9]+)$') + r"^(?P" + fname_prefix + r"(?!Base|ImageSize|Type)[A-Za-z0-9]+)$" + ) diff --git a/libs/apprise/persistent_store.py b/libs/apprise/persistent_store.py index 8299d6f459..926647af87 100644 --- a/libs/apprise/persistent_store.py +++ b/libs/apprise/persistent_store.py @@ -1,82 +1,81 @@ -# -*- coding: utf-8 -*- +# BSD 2-Clause License # -# Copyright (C) 2025 Chris Caron -# All rights reserved. +# Apprise - Push Notification Library. +# Copyright (c) 2026, Chris Caron # -# This code is licensed under the MIT License. +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: # -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files(the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions : +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. # -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. # -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -import os -import re -import gzip -import zlib +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + import base64 -import glob -import tempfile -import json import binascii -from . import exception +import builtins +import contextlib +from datetime import datetime, timedelta, timezone +import glob +import gzip +import hashlib from itertools import chain -from datetime import datetime, timezone, timedelta +import json +import os +import re +import tempfile import time -import hashlib -from .common import PersistentStoreMode, PERSISTENT_STORE_MODES -from .utils.disk import path_decode +from typing import Any, Optional, Union +import zlib + +from . import exception +from .common import PersistentStoreMode from .logger import logger +from .utils.disk import path_decode # Used for writing/reading time stored in cache file EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc) # isoformat is spelled out for compatibility with Python v3.6 -AWARE_DATE_ISO_FORMAT = '%Y-%m-%dT%H:%M:%S.%f%z' -NAIVE_DATE_ISO_FORMAT = '%Y-%m-%dT%H:%M:%S.%f' +AWARE_DATE_ISO_FORMAT = "%Y-%m-%dT%H:%M:%S.%f%z" +NAIVE_DATE_ISO_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" def _ntf_tidy(ntf): - """ - Reusable NamedTemporaryFile cleanup - """ + """Reusable NamedTemporaryFile Cleanup.""" if ntf: # Cleanup - try: + with contextlib.suppress(OSError): ntf.close() - except OSError: - # Already closed - pass - try: os.unlink(ntf.name) - logger.trace( - 'Persistent temporary file removed: %s', ntf.name) + logger.trace("Persistent temporary file removed: %s", ntf.name) except (FileNotFoundError, AttributeError): # AttributeError: something weird was passed in, no action required # FileNotFound: no worries; we were removing it anyway pass - except (OSError, IOError) as e: + except OSError as e: logger.error( - 'Persistent temporary file removal failed: %s', - ntf.name) - logger.debug( - 'Persistent Storage Exception: %s', str(e)) + "Persistent temporary file removal failed: %s", ntf.name + ) + logger.debug("Persistent Storage Exception: %s", str(e)) class CacheObject: @@ -84,10 +83,13 @@ class CacheObject: hash_engine = hashlib.sha256 hash_length = 6 - def __init__(self, value=None, expires=False, persistent=True): - """ - Tracks our objects and associates a time limit with them - """ + def __init__( + self, + value: Any = None, + expires: Union[bool, float, int, datetime, None] = False, + persistent: bool = True, + ) -> None: + """Tracks our objects and associates a time limit with them.""" self.__value = value self.__class_name = value.__class__.__name__ @@ -97,18 +99,21 @@ def __init__(self, value=None, expires=False, persistent=True): self.set_expiry(expires) # Whether or not we persist this object to disk or not - self.__persistent = True if persistent else False - - def set(self, value, expires=None, persistent=None): - """ - Sets fields on demand, if set to none, then they are left as is + self.__persistent = bool(persistent) - The intent of set is that it allows you to set a new a value - and optionally alter meta information against it. + def set( + self, + value: Any, + expires: Union[bool, float, int, datetime, None] = None, + persistent: Optional[bool] = None, + ) -> None: + """Sets fields on demand, if set to none, then they are left as is. - If expires or persistent isn't specified then their previous values - are used. + The intent of set is that it allows you to set a new a value and + optionally alter meta information against it. + If expires or persistent isn't specified then their previous values are + used. """ self.__value = value @@ -117,12 +122,11 @@ def set(self, value, expires=None, persistent=None): self.set_expiry(expires) if persistent is not None: - self.__persistent = True if persistent else False + self.__persistent = bool(persistent) - def set_expiry(self, expires=None): - """ - Sets a new expiry - """ + def set_expiry(self, expires: + Union[datetime, bool, float, int, None] = None) -> None: + """Sets a new expiry.""" if isinstance(expires, datetime): self.__expires = expires.astimezone(timezone.utc) @@ -136,72 +140,82 @@ def set_expiry(self, expires=None): self.__expires = datetime.now(tz=timezone.utc) elif isinstance(expires, (float, int)): - self.__expires = \ - datetime.now(tz=timezone.utc) + timedelta(seconds=expires) + self.__expires = datetime.now(tz=timezone.utc) + timedelta( + seconds=expires + ) else: # Unsupported raise AttributeError( - f"An invalid expiry time ({expires} was specified") - - def hash(self): - """ - Our checksum to track the validity of our data - """ - try: - return self.hash_engine( - str(self).encode('utf-8'), usedforsecurity=False).hexdigest() + f"An invalid expiry time ({expires} was specified" + ) - except TypeError: - # Python <= v3.8 - usedforsecurity flag does not work - return self.hash_engine(str(self).encode('utf-8')).hexdigest() + def hash(self) -> str: + """Our checksum to track the validity of our data.""" + return self.hash_engine( + str(self).encode("utf-8"), usedforsecurity=False + ).hexdigest() - def json(self): - """ - Returns our preparable json object - """ + def json(self) -> Optional[dict[str, Any]]: + """Returns our preparable json object.""" return { - 'v': self.__value, - 'x': (self.__expires - EPOCH).total_seconds() - if self.__expires else None, - 'c': self.__class_name if not isinstance(self.__value, datetime) - else ( - 'aware_datetime' if self.__value.tzinfo else 'naive_datetime'), - '!': self.hash()[:self.hash_length], + "v": self.__value, + "x": ( + (self.__expires - EPOCH).total_seconds() + if self.__expires + else None + ), + "c": ( + self.__class_name + if not isinstance(self.__value, datetime) + else ( + "aware_datetime" + if self.__value.tzinfo + else "naive_datetime" + ) + ), + "!": self.hash()[: self.hash_length], } @staticmethod - def instantiate(content, persistent=True, verify=True): - """ - Loads back data read in and returns a CacheObject or None if it could - not be loaded. You can pass in the contents of CacheObject.json() and - you'll receive a copy assuming the hash checks okay + def instantiate( + content: dict[str, Any], + persistent: bool = True, + verify: bool = True, + ) -> Optional["CacheObject"]: + """Loads back data read in and returns a CacheObject or None if it + could not be loaded. + You can pass in the contents of CacheObject.json() and you'll receive a + copy assuming the hash checks okay """ try: - value = content['v'] - expires = content['x'] + value = content["v"] + expires = content["x"] if expires is not None: expires = datetime.fromtimestamp(expires, timezone.utc) # Acquire some useful integrity objects - class_name = content.get('c', '') + class_name = content.get("c", "") if not isinstance(class_name, str): - raise TypeError('Class name not expected string') + raise TypeError("Class name not expected string") - hashsum = content.get('!', '') + hashsum = content.get("!", "") if not isinstance(hashsum, str): - raise TypeError('SHA1SUM not expected string') + raise TypeError("SHA1SUM not expected string") except (TypeError, KeyError) as e: - logger.trace(f'CacheObject could not be parsed from {content}') - logger.trace('CacheObject exception: %s', str(e)) + logger.trace(f"CacheObject could not be parsed from {content}") + logger.trace("CacheObject exception: %s", str(e)) return None - if class_name in ('aware_datetime', 'naive_datetime', 'datetime'): + if class_name in ("aware_datetime", "naive_datetime", "datetime"): # If datetime is detected, it will fall under the naive category - iso_format = AWARE_DATE_ISO_FORMAT \ - if class_name[0] == 'a' else NAIVE_DATE_ISO_FORMAT + iso_format = ( + AWARE_DATE_ISO_FORMAT + if class_name[0] == "a" + else NAIVE_DATE_ISO_FORMAT + ) try: # Python v3.6 Support value = datetime.strptime(value, iso_format) @@ -210,64 +224,63 @@ def instantiate(content, persistent=True, verify=True): # TypeError is thrown if content is not string # ValueError is thrown if the string is not a valid format logger.trace( - f'CacheObject (dt) corrupted loading from {content}') + f"CacheObject (dt) corrupted loading from {content}" + ) return None - elif class_name == 'bytes': + elif class_name == "bytes": try: # Convert our object back to a bytes value = base64.b64decode(value) except binascii.Error: logger.trace( - f'CacheObject (bin) corrupted loading from {content}') + f"CacheObject (bin) corrupted loading from {content}" + ) return None # Initialize our object co = CacheObject(value, expires, persistent=persistent) - if verify and co.hash()[:co.hash_length] != hashsum: + if verify and co.hash()[: co.hash_length] != hashsum: # Our object was tampered with - logger.debug(f'Tampering detected with cache entry {co}') + logger.debug(f"Tampering detected with cache entry {co}") del co return None return co @property - def value(self): - """ - Returns our value - """ + def value(self) -> Any: + """Returns our value.""" return self.__value @property - def persistent(self): - """ - Returns our persistent value - """ + def persistent(self) -> bool: + """Returns our persistent value.""" return self.__persistent @property - def expires(self): - """ - Returns the datetime the object will expire - """ + def expires(self) -> Optional[datetime]: + """Returns the datetime the object will expire.""" return self.__expires @property - def expires_sec(self): - """ - Returns the number of seconds from now the object will expire - """ - - return None if self.__expires is None else max( - 0.0, (self.__expires - datetime.now(tz=timezone.utc)) - .total_seconds()) + def expires_sec(self) -> Optional[float]: + """Returns the number of seconds from now the object will expire.""" + + return ( + None + if self.__expires is None + else max( + 0.0, + ( + self.__expires - datetime.now(tz=timezone.utc) + ).total_seconds(), + ) + ) - def __bool__(self): - """ - Returns True it the object hasn't expired, and False if it has - """ + def __bool__(self) -> bool: + """Returns True it the object hasn't expired, and False if it has.""" if self.__expires is None: # No Expiry return True @@ -275,48 +288,45 @@ def __bool__(self): # Calculate if we've expired or not return self.__expires > datetime.now(tz=timezone.utc) - def __eq__(self, other): - """ - Handles equality == flag - """ + def __eq__(self, other) -> bool: + """Handles equality == flag.""" if isinstance(other, CacheObject): return str(self) == str(other) return self.__value == other - def __str__(self): - """ - string output of our data - """ - persistent = '+' if self.persistent else '-' - return f'{self.__class_name}:{persistent}:{self.__value} expires: ' +\ - ('never' if self.__expires is None - else self.__expires.strftime(NAIVE_DATE_ISO_FORMAT)) + def __str__(self) -> str: + """String output of our data.""" + persistent = "+" if self.persistent else "-" + return f"{self.__class_name}:{persistent}:{self.__value} expires: " + ( + "never" + if self.__expires is None + else self.__expires.strftime(NAIVE_DATE_ISO_FORMAT) + ) class CacheJSONEncoder(json.JSONEncoder): - """ - A JSON Encoder for handling each of our cache objects - """ + """A JSON Encoder for handling each of our cache objects.""" def default(self, entry): if isinstance(entry, datetime): return entry.strftime( - AWARE_DATE_ISO_FORMAT if entry.tzinfo is not None - else NAIVE_DATE_ISO_FORMAT) + AWARE_DATE_ISO_FORMAT + if entry.tzinfo is not None + else NAIVE_DATE_ISO_FORMAT + ) elif isinstance(entry, CacheObject): return entry.json() elif isinstance(entry, bytes): - return base64.b64encode(entry).decode('utf-8') + return base64.b64encode(entry).decode("utf-8") return super().default(entry) class PersistentStore: - """ - An object to make working with persistent storage easier + """An object to make working with persistent storage easier. read() and write() are used for direct file i/o @@ -331,41 +341,46 @@ class PersistentStore: default_file_expiry = 2678400 # File encoding to use - encoding = 'utf-8' + encoding = "utf-8" # Default data set - base_key = 'default' + base_key = "default" # Directory to store cache - __cache_key = 'cache' + __cache_key = "cache" # Our Temporary working directory - temp_dir = 'tmp' + temp_dir = "tmp" # The directory our persistent store content gets placed in - data_dir = 'var' + data_dir = "var" # Our Persistent Store File Extension - __extension = '.psdata' + __extension = ".psdata" # Identify our backup file extension - __backup_extension = '._psbak' + __backup_extension = "._psbak" # Used to verify the key specified is valid # - must start with an alpha_numeric # - following optional characters can include period, underscore and # equal - __valid_key = re.compile(r'[a-z0-9][a-z0-9._-]*', re.I) + __valid_key = re.compile(r"[a-z0-9][a-z0-9._-]*", re.I) # Reference only __not_found_ref = (None, None) - def __init__(self, path=None, namespace='default', mode=None): - """ - Provide the namespace to work within. namespaces can only contain - alpha-numeric characters with the exception of '-' (dash), '_' - (underscore), and '.' (period). The namespace must be be relative - to the current URL being controlled. + def __init__( + self, + path: Optional[str] = None, + namespace: str = "default", + mode: Optional[Union[str, PersistentStoreMode]] = None, + ) -> None: + """Provide the namespace to work within. + + namespaces can only contain alpha-numeric characters with the exception + of '-' (dash), '_' (underscore), and '.' (period). The namespace must + be be relative to the current URL being controlled. """ # Initalize our mode so __del__() calls don't go bad on the # error checking below @@ -377,17 +392,19 @@ def __init__(self, path=None, namespace='default', mode=None): # Files to renew on calls to flush self.__renew = set() - if not isinstance(namespace, str) \ - or not self.__valid_key.match(namespace): + if not isinstance(namespace, str) or not self.__valid_key.match( + namespace + ): raise AttributeError( f"Persistent Storage namespace ({namespace}) provided is" - " invalid") + " invalid" + ) if isinstance(path, str): # A storage path has been defined if mode is None: # Store Default if no mode was provided along side of it - mode = PERSISTENT_STORE_MODES[0] + mode = PersistentStoreMode.AUTO # Store our information self.__base_path = os.path.join(path_decode(path), namespace) @@ -400,13 +417,6 @@ def __init__(self, path=None, namespace='default', mode=None): self.__temp_path = None self.__data_path = None - if mode not in PERSISTENT_STORE_MODES: - raise AttributeError( - f"Persistent Storage mode ({mode}) provided is invalid") - - # Store our mode - self.__mode = mode - # Tracks when we have content to flush self.__dirty = False @@ -417,17 +427,34 @@ def __init__(self, path=None, namespace='default', mode=None): # Internal Cache self._cache = None + try: + # Store our mode + self.__mode = ( + mode if isinstance(mode, PersistentStoreMode) + else PersistentStoreMode(mode.lower()) + ) + + except (AttributeError, ValueError): + err = ( + f"An invalid persistent storage mode ({mode}) was specified.", + ) + logger.warning(err) + raise AttributeError(err) from None + # Prepare our environment self.__prepare() - def read(self, key=None, compress=True, expires=False): - """ - Returns the content of the persistent store object + def read( + self, + key: Optional[str] = None, + compress: bool = True, + expires: Union[bool, float, int] = False, + ) -> Optional[bytes]: + """Returns the content of the persistent store object. if refresh is set to True, then the file's modify time is updated - preventing it from getting caught in prune calls. It's a means - of allowing it to persist and not get cleaned up in later prune - calls. + preventing it from getting caught in prune calls. It's a means of + allowing it to persist and not get cleaned up in later prune calls. Content is always returned as a byte object """ @@ -435,8 +462,11 @@ def read(self, key=None, compress=True, expires=False): with self.open(key, mode="rb", compress=compress) as fd: results = fd.read(self.max_file_size) if expires is False: - self.__renew.add(os.path.join( - self.__data_path, f"{key}{self.__extension}")) + self.__renew.add( + os.path.join( + self.__data_path, f"{key}{self.__extension}" + ) + ) return results @@ -446,18 +476,22 @@ def read(self, key=None, compress=True, expires=False): # - Logging of error already occurred inside self.open() pass - except (OSError, zlib.error, EOFError, UnicodeDecodeError, - IOError) as e: + except (OSError, zlib.error, EOFError, UnicodeDecodeError) as e: # We can't access the file or it does not exist - logger.warning('Could not read with persistent key: %s', key) - logger.debug('Persistent Storage Exception: %s', str(e)) + logger.warning("Could not read with persistent key: %s", key) + logger.debug("Persistent Storage Exception: %s", str(e)) # return none return None - def write(self, data, key=None, compress=True, _recovery=False): - """ - Writes the content to the persistent store if it doesn't exceed our + def write( + self, + data: Union[bytes, str, Any], + key: Optional[str] = None, + compress: bool = True, + _recovery: bool = False, + ) -> bool: + """Writes the content to the persistent store if it doesn't exceed our filesize limit. Content is always written as a byte object @@ -470,32 +504,38 @@ def write(self, data, key=None, compress=True, _recovery=False): elif not isinstance(key, str) or not self.__valid_key.match(key): raise AttributeError( - f"Persistent Storage key ({key} provided is invalid") + f"Persistent Storage key ({key} provided is invalid" + ) if not isinstance(data, (bytes, str)): # One last check, we will accept read() objets with the expectation # it will return a binary dataset - if not (hasattr(data, 'read') and callable(getattr(data, 'read'))): + if not (hasattr(data, "read") and callable(data.read)): raise AttributeError( - "Invalid data type {} provided to Persistent Storage" - .format(type(data))) + f"Invalid data type {type(data)} provided to Persistent" + " Storage" + ) try: # Read in our data data = data.read() if not isinstance(data, (bytes, str)): raise AttributeError( - "Invalid data type {} provided to Persistent Storage" - .format(type(data))) + f"Invalid data type {type(data)} provided to" + " Persistent Storage" + ) except Exception as e: logger.warning( - 'Could read() from potential iostream with persistent ' - 'key: %s', key) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Could read() from potential iostream with persistent " + "key: %s", + key, + ) + logger.debug("Persistent Storage Exception: %s", str(e)) raise exception.AppriseDiskIOError( - "Invalid data type {} provided to Persistent Storage" - .format(type(data))) + f"Invalid data type {type(data)} provided to Persistent" + " Storage" + ) from None if self.__mode == PersistentStoreMode.MEMORY: # Nothing further can be done @@ -513,14 +553,14 @@ def write(self, data, key=None, compress=True, _recovery=False): prev_size = os.stat(io_file).st_size except FileNotFoundError: - # No worries, no size to accomodate + # No worries, no size to accommodate prev_size = 0 - except (OSError, IOError) as e: + except OSError as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point - logger.warning('Could not write with persistent key: %s', key) - logger.debug('Persistent Storage Exception: %s', str(e)) + logger.warning("Could not write with persistent key: %s", key) + logger.debug("Persistent Storage Exception: %s", str(e)) return False # Create a temporary file to write our content into @@ -531,17 +571,17 @@ def write(self, data, key=None, compress=True, _recovery=False): if isinstance(data, str): data = data.encode(self.encoding) - ntf = tempfile.NamedTemporaryFile( - mode="wb", dir=self.__temp_path, - delete=False) + ntf = tempfile.NamedTemporaryFile( # noqa: SIM115 + mode="wb", dir=self.__temp_path, delete=False + ) # Close our file ntf.close() # Pointer to our open call - _open = open if not compress else gzip.open + open_ = open if not compress else gzip.open - with _open(ntf.name, mode='wb') as fd: + with open_(ntf.name, mode="wb") as fd: # Write our content fd.write(data) @@ -550,15 +590,18 @@ def write(self, data, key=None, compress=True, _recovery=False): # Log our progress logger.trace( - 'Wrote %d bytes of data to persistent key: %s', - new_file_size, key) + "Wrote %d bytes of data to persistent key: %s", + new_file_size, + key, + ) except FileNotFoundError: # This happens if the directory path is gone preventing the file # from being created... if not _recovery: return self.write( - data=data, key=key, compress=compress, _recovery=True) + data=data, key=key, compress=compress, _recovery=True + ) # We've already made our best effort to recover if we are here in # our code base... we're going to have to exit @@ -569,24 +612,26 @@ def write(self, data, key=None, compress=True, _recovery=False): # Early Exit return False - except (OSError, UnicodeEncodeError, IOError, zlib.error) as e: + except (OSError, UnicodeEncodeError, zlib.error) as e: # We can't access the file or it does not exist - logger.warning('Could not write to persistent key: %s', key) - logger.debug('Persistent Storage Exception: %s', str(e)) + logger.warning("Could not write to persistent key: %s", key) + logger.debug("Persistent Storage Exception: %s", str(e)) # Tidy our Named Temporary File _ntf_tidy(ntf) return False - if self.max_file_size > 0 and ( - new_file_size + self.size() - prev_size) > self.max_file_size: + if ( + self.max_file_size > 0 + and (new_file_size + self.size() - prev_size) > self.max_file_size + ): # The content to store is to large logger.warning( - 'Persistent content exceeds allowable maximum file length ' - '({}KB); provide {}KB'.format( - int(self.max_file_size / 1024), - int(new_file_size / 1024))) + "Persistent content exceeds allowable maximum file length" + f" ({int(self.max_file_size / 1024)}KB); provide" + f" {int(new_file_size / 1024)}KB" + ) return False # Return our final move @@ -605,16 +650,16 @@ def write(self, data, key=None, compress=True, _recovery=False): return True def __move(self, src, dst): - """ - Moves the new file in place and handles the old if it exists already + """Moves the new file in place and handles the old if it exists already If the transaction fails in any way, the old file is swapped back. Function returns True if successful and False if not. """ # A temporary backup of the file we want to move in place - dst_backup = dst[:-len(self.__backup_extension)] + \ - self.__backup_extension + dst_backup = ( + dst[: -len(self.__backup_extension)] + self.__backup_extension + ) # # Backup the old file (if it exists) allowing us to have a restore @@ -624,37 +669,42 @@ def __move(self, src, dst): # make sure the file isn't already present; if it is; remove it os.unlink(dst_backup) logger.trace( - 'Removed previous persistent backup file: %s', dst_backup) + "Removed previous persistent backup file: %s", dst_backup + ) except FileNotFoundError: # no worries; we were removing it anyway pass - except (OSError, IOError) as e: + except OSError as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.warning( - 'Could not previous persistent data backup: %s', dst_backup) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Could not previous persistent data backup: %s", dst_backup + ) + logger.debug("Persistent Storage Exception: %s", str(e)) return False try: # Back our file up so we have a fallback os.rename(dst, dst_backup) logger.trace( - 'Persistent storage backup file created: %s', dst_backup) + "Persistent storage backup file created: %s", dst_backup + ) except FileNotFoundError: # Not a problem; this is a brand new file we're writing # There is nothing to backup pass - except (OSError, IOError) as e: + except OSError as e: # This isn't good... we couldn't put our new file in place logger.warning( - 'Could not install persistent content %s -> %s', - dst, os.path.basename(dst_backup)) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Could not install persistent content %s -> %s", + dst, + os.path.basename(dst_backup), + ) + logger.debug("Persistent Storage Exception: %s", str(e)) return False # @@ -662,41 +712,52 @@ def __move(self, src, dst): # try: os.rename(src, dst) - logger.trace('Persistent file installed: %s', dst) + logger.trace("Persistent file installed: %s", dst) - except (OSError, IOError) as e: + except OSError as e: # This isn't good... we couldn't put our new file in place # Begin fall-back process before leaving the funtion logger.warning( - 'Could not install persistent content %s -> %s', - src, os.path.basename(dst)) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Could not install persistent content %s -> %s", + src, + os.path.basename(dst), + ) + logger.debug("Persistent Storage Exception: %s", str(e)) try: # Restore our old backup (if it exists) os.rename(dst_backup, dst) - logger.trace( - 'Restoring original persistent content: %s', dst) + logger.trace("Restoring original persistent content: %s", dst) except FileNotFoundError: # Not a problem pass - except (OSError, IOError) as e: + except OSError as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.warning( - 'Failed to restore original persistent file: %s', dst) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Failed to restore original persistent file: %s", dst + ) + logger.debug("Persistent Storage Exception: %s", str(e)) return False return True - def open(self, key=None, mode='r', buffering=-1, encoding=None, - errors=None, newline=None, closefd=True, opener=None, - compress=False, compresslevel=9): - """ - Returns an iterator to our our file within our namespace identified + def open( + self, + key: Optional[str] = None, + mode: str = "r", + buffering: int = -1, + encoding: Optional[str] = None, + errors: Optional[str] = None, + newline: Optional[str] = None, + closefd: bool = True, + opener: Optional[Any] = None, + compress: bool = False, + compresslevel: int = 9, + ) -> Any: + """Returns an iterator to our our file within our namespace identified by the key provided. If no key is provided, then the default is used @@ -707,7 +768,8 @@ def open(self, key=None, mode='r', buffering=-1, encoding=None, elif not isinstance(key, str) or not self.__valid_key.match(key): raise AttributeError( - f"Persistent Storage key ({key} provided is invalid") + f"Persistent Storage key ({key} provided is invalid" + ) if self.__mode == PersistentStoreMode.MEMORY: # Nothing further can be done @@ -715,47 +777,70 @@ def open(self, key=None, mode='r', buffering=-1, encoding=None, io_file = os.path.join(self.__data_path, f"{key}{self.__extension}") try: - return open( - io_file, mode=mode, buffering=buffering, encoding=encoding, - errors=errors, newline=newline, closefd=closefd, - opener=opener) \ - if not compress else gzip.open( - io_file, compresslevel=compresslevel, encoding=encoding, - errors=errors, newline=newline) + return ( + open( + io_file, + mode=mode, + buffering=buffering, + encoding=encoding, + errors=errors, + newline=newline, + closefd=closefd, + opener=opener, + ) + if not compress + else gzip.open( + io_file, + compresslevel=compresslevel, + encoding=encoding, + errors=errors, + newline=newline, + ) + ) except FileNotFoundError: # pass along (but wrap with Apprise exception) raise exception.AppriseFileNotFound( - f"No such file or directory: '{io_file}'") + f"No such file or directory: '{io_file}'" + ) from None - except (OSError, IOError, zlib.error) as e: + except (OSError, zlib.error) as e: # We can't access the file or it does not exist - logger.warning('Could not read with persistent key: %s', key) - logger.debug('Persistent Storage Exception: %s', str(e)) - raise exception.AppriseDiskIOError(str(e)) - - def get(self, key, default=None, lazy=True): - """ - Fetches from cache - """ + logger.warning("Could not read with persistent key: %s", key) + logger.debug("Persistent Storage Exception: %s", str(e)) + raise exception.AppriseDiskIOError(str(e)) from None + + def get( + self, + key: str, + default: Any = None, + lazy: bool = True, + ) -> Any: + """Fetches from cache.""" if self._cache is None and not self.__load_cache(): return default - if key in self._cache and \ - not self.__mode == PersistentStoreMode.MEMORY and \ - not self.__dirty: + if ( + key in self._cache + and self.__mode != PersistentStoreMode.MEMORY + and not self.__dirty + ): # ensure we renew our content self.__renew.add(self.cache_file) - return self._cache[key].value \ - if key in self._cache and self._cache[key] else default + return self._cache[key].value if self._cache.get(key) else default - def set(self, key, value, expires=None, persistent=True, lazy=True): - """ - Cache reference - """ + def set( + self, + key: str, + value: Any, + expires: Union[float, int, datetime, bool, None] = None, + persistent: bool = True, + lazy: bool = True, + ) -> bool: + """Cache reference.""" if self._cache is None and not self.__load_cache(): return False @@ -782,9 +867,8 @@ def set(self, key, value, expires=None, persistent=True, lazy=True): return True - def clear(self, *args): - """ - Remove one or more cache entry by it's key + def clear(self, *args: str) -> Optional[bool]: + """Remove one or more cache entry by it's key. e.g: clear('key') clear('key1', 'key2', key-12') @@ -820,10 +904,8 @@ def clear(self, *args): # Flush changes to disk return self.flush() - def prune(self): - """ - Eliminates expired cache entries - """ + def prune(self) -> bool: + """Eliminates expired cache entries.""" if self._cache is None and not self.__load_cache(): return False @@ -847,8 +929,7 @@ def prune(self): return change def __load_cache(self, _recovery=False): - """ - Loads our cache + """Loads our cache. _recovery is reserved for internal usage and should not be changed """ @@ -864,7 +945,7 @@ def __load_cache(self, _recovery=False): # Prepare our cache file cache_file = self.cache_file try: - with gzip.open(cache_file, 'rb') as f: + with gzip.open(cache_file, "rb") as f: # Read our ontent from disk self._cache = {} for k, v in json.loads(f.read().decode(self.encoding)).items(): @@ -877,32 +958,40 @@ def __load_cache(self, _recovery=False): # Track changes from our loadset self.__dirty = True - except (UnicodeDecodeError, json.decoder.JSONDecodeError, zlib.error, - TypeError, AttributeError, EOFError): + except ( + UnicodeDecodeError, + json.decoder.JSONDecodeError, + zlib.error, + TypeError, + AttributeError, + EOFError, + ): # Let users known there was a problem logger.warning( - 'Corrupted access persistent cache content: %s', - cache_file) + "Corrupted access persistent cache content: %s", cache_file + ) if not _recovery: try: os.unlink(cache_file) logger.trace( - 'Removed previous persistent cache content: %s', - cache_file) + "Removed previous persistent cache content: %s", + cache_file, + ) except FileNotFoundError: # no worries; we were removing it anyway pass - except (OSError, IOError) as e: + except OSError as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.warning( - 'Could not remove persistent cache content: %s', - cache_file) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Could not remove persistent cache content: %s", + cache_file, + ) + logger.debug("Persistent Storage Exception: %s", str(e)) return False return self.__load_cache(_recovery=True) @@ -912,33 +1001,33 @@ def __load_cache(self, _recovery=False): # No problem; no cache to load self._cache = {} - except (OSError, IOError) as e: + except OSError as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.warning( - 'Could not load persistent cache for namespace %s', - os.path.basename(self.__base_path)) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Could not load persistent cache for namespace %s", + os.path.basename(self.__base_path), + ) + logger.debug("Persistent Storage Exception: %s", str(e)) return False # Ensure our dirty flag is set to False return True def __prepare(self, flush=True): - """ - Prepares a working environment - """ + """Prepares a working environment.""" if self.__mode != PersistentStoreMode.MEMORY: # Ensure our path exists try: os.makedirs(self.__base_path, mode=0o770, exist_ok=True) - except (OSError, IOError) as e: + except OSError as e: # Permission error logger.debug( - 'Could not create persistent store directory %s', - self.__base_path) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Could not create persistent store directory %s", + self.__base_path, + ) + logger.debug("Persistent Storage Exception: %s", str(e)) # Mode changed back to MEMORY self.__mode = PersistentStoreMode.MEMORY @@ -947,12 +1036,13 @@ def __prepare(self, flush=True): try: os.makedirs(self.__temp_path, mode=0o770, exist_ok=True) - except (OSError, IOError) as e: + except OSError as e: # Permission error logger.debug( - 'Could not create persistent store directory %s', - self.__temp_path) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Could not create persistent store directory %s", + self.__temp_path, + ) + logger.debug("Persistent Storage Exception: %s", str(e)) # Mode changed back to MEMORY self.__mode = PersistentStoreMode.MEMORY @@ -960,36 +1050,41 @@ def __prepare(self, flush=True): try: os.makedirs(self.__data_path, mode=0o770, exist_ok=True) - except (OSError, IOError) as e: + except OSError as e: # Permission error logger.debug( - 'Could not create persistent store directory %s', - self.__data_path) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Could not create persistent store directory %s", + self.__data_path, + ) + logger.debug("Persistent Storage Exception: %s", str(e)) # Mode changed back to MEMORY self.__mode = PersistentStoreMode.MEMORY if self.__mode is PersistentStoreMode.MEMORY: logger.warning( - 'The persistent storage could not be fully initialized; ' - 'operating in MEMORY mode') + "The persistent storage could not be fully initialized; " + "operating in MEMORY mode" + ) else: if self._cache: # Recovery taking place self.__dirty = True logger.warning( - 'The persistent storage environment was disrupted') + "The persistent storage environment was disrupted" + ) if self.__mode is PersistentStoreMode.FLUSH and flush: # Flush changes to disk return self.flush(_recovery=True) - def flush(self, force=False, _recovery=False): - """ - Save's our cache to disk - """ + def flush( + self, + force: bool = False, + _recovery: bool = False, + ) -> bool: + """Save's our cache to disk.""" if self._cache is None or self.__mode == PersistentStoreMode.MEMORY: # nothing to do @@ -1003,20 +1098,20 @@ def flush(self, force=False, _recovery=False): try: # (access_time, modify_time) os.utime(path, (ftime, ftime)) - logger.trace('file timestamp updated: %s', path) + logger.trace("file timestamp updated: %s", path) except FileNotFoundError: # No worries... move along pass - except (OSError, IOError) as e: + except OSError as e: # We can't access the file or it does not exist - logger.debug('Could not update file timestamp: %s', path) - logger.debug('Persistent Storage Exception: %s', str(e)) + logger.debug("Could not update file timestamp: %s", path) + logger.debug("Persistent Storage Exception: %s", str(e)) if not force and self.__dirty is False: # Nothing further to do - logger.trace('Persistent cache is consistent with memory map') + logger.trace("Persistent cache is consistent with memory map") return True if _recovery: @@ -1033,44 +1128,47 @@ def flush(self, force=False, _recovery=False): # # We're deleting the cache file s there are no entries left in it # - backup_file = cache_file[:-len(self.__backup_extension)] + \ - self.__backup_extension + backup_file = ( + cache_file[: -len(self.__backup_extension)] + + self.__backup_extension + ) try: os.unlink(backup_file) logger.trace( - 'Removed previous persistent cache backup: %s', - backup_file) + "Removed previous persistent cache backup: %s", backup_file + ) except FileNotFoundError: # no worries; we were removing it anyway pass - except (OSError, IOError) as e: + except OSError as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.warning( - 'Could not remove persistent cache backup: %s', - backup_file) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Could not remove persistent cache backup: %s", backup_file + ) + logger.debug("Persistent Storage Exception: %s", str(e)) return False try: os.rename(cache_file, backup_file) logger.trace( - 'Persistent cache backup file created: %s', - backup_file) + "Persistent cache backup file created: %s", backup_file + ) except FileNotFoundError: # Not a problem; do not create a log entry pass - except (OSError, IOError) as e: + except OSError as e: # This isn't good... we couldn't put our new file in place logger.warning( - 'Could not remove stale persistent cache file: %s', - cache_file) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Could not remove stale persistent cache file: %s", + cache_file, + ) + logger.debug("Persistent Storage Exception: %s", str(e)) return False return True @@ -1082,9 +1180,12 @@ def flush(self, force=False, _recovery=False): ntf = None try: - ntf = tempfile.NamedTemporaryFile( - mode="w+", encoding=self.encoding, dir=self.__temp_path, - delete=False) + ntf = tempfile.NamedTemporaryFile( # noqa: SIM115 + mode="w+", + encoding=self.encoding, + dir=self.__temp_path, + delete=False, + ) ntf.close() @@ -1105,9 +1206,10 @@ def flush(self, force=False, _recovery=False): except OSError as e: logger.error( - 'Persistent temporary directory inaccessible: %s', - self.__temp_path) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Persistent temporary directory inaccessible: %s", + self.__temp_path, + ) + logger.debug("Persistent Storage Exception: %s", str(e)) # Tidy our Named Temporary File _ntf_tidy(ntf) @@ -1117,20 +1219,28 @@ def flush(self, force=False, _recovery=False): try: # write our content currently saved to disk to our temporary file - with gzip.open(ntf.name, 'wb') as f: + with gzip.open(ntf.name, "wb") as f: # Write our content to disk - f.write(json.dumps( - {k: v for k, v in self._cache.items() - if v and v.persistent}, - separators=(',', ':'), - cls=CacheJSONEncoder).encode(self.encoding)) + f.write( + json.dumps( + { + k: v + for k, v in self._cache.items() + if v and v.persistent + }, + separators=(",", ":"), + cls=CacheJSONEncoder, + ).encode(self.encoding) + ) except TypeError as e: # JSON object contains content that can not be encoded to disk logger.error( - 'Persistent temporary file can not be written to ' - 'due to bad input data: %s', ntf.name) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Persistent temporary file can not be written to " + "due to bad input data: %s", + ntf.name, + ) + logger.debug("Persistent Storage Exception: %s", str(e)) # Tidy our Named Temporary File _ntf_tidy(ntf) @@ -1140,9 +1250,9 @@ def flush(self, force=False, _recovery=False): except (OSError, EOFError, zlib.error) as e: logger.error( - 'Persistent temporary file inaccessible: %s', - ntf.name) - logger.debug('Persistent Storage Exception: %s', str(e)) + "Persistent temporary file inaccessible: %s", ntf.name + ) + logger.debug("Persistent Storage Exception: %s", str(e)) # Tidy our Named Temporary File _ntf_tidy(ntf) @@ -1162,10 +1272,12 @@ def flush(self, force=False, _recovery=False): return True - def files(self, exclude=True, lazy=True): - """ - Returns the total files - """ + def files( + self, + exclude: bool = True, + lazy: bool = True, + ) -> list[str]: + """Returns the total files.""" if lazy and exclude in self.__cache_files: # Take an early exit with our cached results @@ -1183,54 +1295,73 @@ def files(self, exclude=True, lazy=True): # A list of criteria that should be excluded from the size count self.__exclude_list = ( # Exclude backup cache file from count - re.compile(re.escape(os.path.join( - self.__base_path, - f'{self.__cache_key}{self.__backup_extension}'))), - + re.compile( + re.escape( + os.path.join( + self.__base_path, + f"{self.__cache_key}{self.__backup_extension}", + ) + ) + ), # Exclude temporary files - re.compile(re.escape(self.__temp_path) + r'[/\\].+'), - + re.compile(re.escape(self.__temp_path) + r"[/\\].+"), # Exclude custom backup persistent files re.compile( - re.escape(self.__data_path) + r'[/\\].+' + re.escape( - self.__backup_extension)), + re.escape(self.__data_path) + + r"[/\\].+" + + re.escape(self.__backup_extension) + ), ) try: if exclude: - self.__cache_files[exclude] = \ - [path for path in filter(os.path.isfile, glob.glob( - os.path.join(self.__base_path, '**', '*'), - recursive=True)) - if next((False for p in self.__exclude_list - if p.match(path)), True)] + self.__cache_files[exclude] = [ + path + for path in filter( + os.path.isfile, + glob.glob( + os.path.join(self.__base_path, "**", "*"), + recursive=True, + ), + ) + if next( + (False for p in self.__exclude_list if p.match(path)), + True, + ) + ] else: # No exclusion list applied - self.__cache_files[exclude] = \ - [path for path in filter(os.path.isfile, glob.glob( - os.path.join(self.__base_path, '**', '*'), - recursive=True))] + self.__cache_files[exclude] = list( + filter( + os.path.isfile, + glob.glob( + os.path.join(self.__base_path, "**", "*"), + recursive=True, + ), + ) + ) - except (OSError, IOError): + except OSError: # We can't access the directory or it does not exist self.__cache_files[exclude] = [] return self.__cache_files[exclude] @staticmethod - def disk_scan(path, namespace=None, closest=True): - """ - Scansk a path provided and returns namespaces detected - """ + def disk_scan( + path: str, + namespace: Optional[Union[str, list[str]]] = None, + closest: bool = True, + ) -> list[str]: + """Scansk a path provided and returns namespaces detected.""" - logger.trace('Persistent path can of: %s', path) + logger.trace("Persistent path can of: %s", path) def is_namespace(x): - """ - Validate what was detected is a valid namespace - """ - return os.path.isdir(os.path.join(path, x)) \ - and PersistentStore.__valid_key.match(x) + """Validate what was detected is a valid namespace.""" + return os.path.isdir( + os.path.join(path, x) + ) and PersistentStore.__valid_key.match(x) # Handle our namespace searching if namespace: @@ -1240,38 +1371,51 @@ def is_namespace(x): elif not isinstance(namespace, (tuple, set, list)): raise AttributeError( "namespace must be None, a string, or a tuple/set/list " - "of strings") + "of strings" + ) try: # Acquire all of the files in question - namespaces = \ - [ns for ns in filter(is_namespace, os.listdir(path)) - if not namespace or next( - (True for n in namespace if ns.startswith(n)), False)] \ - if closest else \ - [ns for ns in filter(is_namespace, os.listdir(path)) - if not namespace or ns in namespace] + namespaces = ( + [ + ns + for ns in filter(is_namespace, os.listdir(path)) + if not namespace + or next( + (True for n in namespace if ns.startswith(n)), False + ) + ] + if closest + else [ + ns + for ns in filter(is_namespace, os.listdir(path)) + if not namespace or ns in namespace + ] + ) except FileNotFoundError: # no worries; Nothing to do - logger.debug('Disk Prune path not found; nothing to clean.') + logger.debug("Disk Prune path not found; nothing to clean.") return [] - except (OSError, IOError) as e: + except OSError as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point - logger.error( - 'Disk Scan detetcted inaccessible path: %s', path) - logger.debug( - 'Persistent Storage Exception: %s', str(e)) + logger.error("Disk Scan detetcted inaccessible path: %s", path) + logger.debug("Persistent Storage Exception: %s", str(e)) return [] return namespaces @staticmethod - def disk_prune(path, namespace=None, expires=None, action=False): - """ - Prune persistent disk storage entries that are old and/or unreferenced + def disk_prune( + path: str, + namespace: Optional[Union[str, list[str]]] = None, + expires: Optional[Union[int, float]] = None, + action: bool = False, + ) -> dict[str, list[dict[str, Union[str, bool]]]]: + """Prune persistent disk storage entries that are old and/or + unreferenced. you must specify a path to perform the prune within @@ -1280,23 +1424,24 @@ def disk_prune(path, namespace=None, expires=None, action=False): if action is not set to False, directories to be removed are returned only - """ # Prepare our File Expiry - expires = datetime.now() - timedelta(seconds=expires) \ - if isinstance(expires, (float, int)) and expires >= 0 \ + expires = ( + datetime.now() - timedelta(seconds=expires) + if isinstance(expires, (float, int)) and expires >= 0 else PersistentStore.default_file_expiry + ) # Get our namespaces namespaces = PersistentStore.disk_scan(path, namespace) # Track matches - _map = {} + map_ = {} for namespace in namespaces: # Prepare our map - _map[namespace] = [] + map_[namespace] = [] # Reference Directories base_dir = os.path.join(path, namespace) @@ -1306,26 +1451,52 @@ def disk_prune(path, namespace=None, expires=None, action=False): # Careful to only focus on files created by this Persistent Store # object files = [ - os.path.join(base_dir, f'{PersistentStore.__cache_key}' - f'{PersistentStore.__extension}'), - os.path.join(base_dir, f'{PersistentStore.__cache_key}' - f'{PersistentStore.__backup_extension}'), + os.path.join( + base_dir, + f"{PersistentStore.__cache_key}" + f"{PersistentStore.__extension}", + ), + os.path.join( + base_dir, + f"{PersistentStore.__cache_key}" + f"{PersistentStore.__backup_extension}", + ), ] # Update our files (applying what was defined above too) valid_data_re = re.compile( - r'.*(' + re.escape(PersistentStore.__extension) + - r'|' + re.escape(PersistentStore.__backup_extension) + r')$') + r".*(" + + re.escape(PersistentStore.__extension) + + r"|" + + re.escape(PersistentStore.__backup_extension) + + r")$" + ) - files = [path for path in filter( - os.path.isfile, chain(glob.glob( - os.path.join(data_dir, '*'), recursive=False), files)) - if valid_data_re.match(path)] + files = [ + path + for path in filter( + os.path.isfile, + chain( + glob.glob( + os.path.join(data_dir, "*"), recursive=False + ), + files, + ), + ) + if valid_data_re.match(path) + ] # Now all temporary files - files.extend([path for path in filter( - os.path.isfile, glob.glob( - os.path.join(temp_dir, '*'), recursive=False))]) + files.extend( + list( + filter( + os.path.isfile, + glob.glob( + os.path.join(temp_dir, "*"), recursive=False + ), + ) + ) + ) # Track if we should do a directory sweep later on dir_sweep = True @@ -1339,14 +1510,17 @@ def disk_prune(path, namespace=None, expires=None, action=False): # no worries; we were removing it anyway continue - except (OSError, IOError) as e: + except OSError as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.error( - 'Disk Prune (ns=%s, clean=%s) detetcted inaccessible ' - 'file: %s', namespace, 'yes' if action else 'no', file) - logger.debug( - 'Persistent Storage Exception: %s', str(e)) + "Disk Prune (ns=%s, clean=%s) detetcted inaccessible " + "file: %s", + namespace, + "yes" if action else "no", + file, + ) + logger.debug("Persistent Storage Exception: %s", str(e)) # No longer worth doing a directory sweep dir_sweep = False @@ -1359,19 +1533,22 @@ def disk_prune(path, namespace=None, expires=None, action=False): # Handle Removing # record = { - 'path': file, - 'removed': False, + "path": file, + "removed": False, } if action: try: os.unlink(file) # Update our record - record['removed'] = True + record["removed"] = True logger.info( - 'Disk Prune (ns=%s, clean=%s) removed persistent ' - 'file: %s', namespace, - 'yes' if action else 'no', file) + "Disk Prune (ns=%s, clean=%s) removed persistent " + "file: %s", + namespace, + "yes" if action else "no", + file, + ) except FileNotFoundError: # no longer worth doing a directory sweep @@ -1380,22 +1557,26 @@ def disk_prune(path, namespace=None, expires=None, action=False): # otherwise, no worries; we were removing the file # anyway - except (OSError, IOError) as e: + except OSError as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point logger.error( - 'Disk Prune (ns=%s, clean=%s) failed to remove ' - 'persistent file: %s', namespace, - 'yes' if action else 'no', file) + "Disk Prune (ns=%s, clean=%s) failed to remove " + "persistent file: %s", + namespace, + "yes" if action else "no", + file, + ) logger.debug( - 'Persistent Storage Exception: %s', str(e)) + "Persistent Storage Exception: %s", str(e) + ) # No longer worth doing a directory sweep dir_sweep = False # Store our record - _map[namespace].append(record) + map_[namespace].append(record) # Memory tidy del files @@ -1408,18 +1589,23 @@ def disk_prune(path, namespace=None, expires=None, action=False): try: os.rmdir(dirpath) logger.info( - 'Disk Prune (ns=%s, clean=%s) removed ' - 'persistent dir: %s', namespace, - 'yes' if action else 'no', dirpath) + "Disk Prune (ns=%s, clean=%s) removed " + "persistent dir: %s", + namespace, + "yes" if action else "no", + dirpath, + ) except OSError: # do nothing; pass - return _map + return map_ - def size(self, exclude=True, lazy=True): - """ - Returns the total size of the persistent storage in bytes - """ + def size( + self, + exclude: bool = True, + lazy: bool = True, + ) -> int: + """Returns the total size of the persistent storage in bytes.""" if lazy and self.__cache_size is not None: # Take an early exit @@ -1432,29 +1618,24 @@ def size(self, exclude=True, lazy=True): # Get a list of files (file paths) in the given directory try: - self.__cache_size = sum( - [os.stat(path).st_size for path in - self.files(exclude=exclude, lazy=lazy)]) + self.__cache_size = sum(os.stat(path).st_size + for path in self.files(exclude=exclude, lazy=lazy)) - except (OSError, IOError): + except OSError: # We can't access the directory or it does not exist self.__cache_size = 0 return self.__cache_size - def __del__(self): - """ - Deconstruction of our object - """ + def __del__(self) -> None: + """Deconstruction of our object.""" if self.__mode == PersistentStoreMode.AUTO: # Flush changes to disk self.flush() - def __delitem__(self, key): - """ - Remove a cache entry by it's key - """ + def __delitem__(self, key: str) -> None: + """Remove a cache entry by it's key.""" if self._cache is None and not self.__load_cache(): raise KeyError("Could not initialize cache") @@ -1476,21 +1657,19 @@ def __delitem__(self, key): return - def __contains__(self, key): - """ - Verify if our storage contains the key specified or not. - In additiont to this, if the content is expired, it is considered - to be not contained in the storage. + def __contains__(self, key: str) -> bool: + """Verify if our storage contains the key specified or not. + + In additiont to this, if the content is expired, it is considered to be + not contained in the storage. """ if self._cache is None and not self.__load_cache(): return False return key in self._cache and self._cache[key] - def __setitem__(self, key, value): - """ - Sets a cache value without disrupting existing settings in place - """ + def __setitem__(self, key: str, value: Any) -> None: + """Sets a cache value without disrupting existing settings in place.""" if self._cache is None and not self.__load_cache(): raise KeyError("Could not initialize cache") @@ -1512,10 +1691,8 @@ def __setitem__(self, key, value): return - def __getitem__(self, key): - """ - Returns the indexed value - """ + def __getitem__(self, key: str) -> Any: + """Returns the indexed value.""" if self._cache is None and not self.__load_cache(): raise KeyError("Could not initialize cache") @@ -1526,40 +1703,46 @@ def __getitem__(self, key): return result - def keys(self): - """ - Returns our keys - """ + def keys(self) -> builtins.set[str]: + """Returns our keys.""" if self._cache is None and not self.__load_cache(): # There are no keys to return return {}.keys() return self._cache.keys() - def delete(self, *args, all=None, temp=None, cache=None, validate=True): - """ - Manages our file space and tidys it up + def delete( + self, + *args: str, + all: Optional[bool] = None, + temp: Optional[bool] = None, + cache: Optional[bool] = None, + validate: bool = True, + ) -> bool: + """Manages our file space and tidys it up. - delete('key', 'key2') - delete(all=True) - delete(temp=True, cache=True) + delete('key', 'key2') delete(all=True) delete(temp=True, cache=True) """ # Our failure flag has_error = False valid_key_re = re.compile( - r'^(?P.+)(' + - re.escape(self.__backup_extension) + - r'|' + re.escape(self.__extension) + r')$', re.I) + r"^(?P.+)(" + + re.escape(self.__backup_extension) + + r"|" + + re.escape(self.__extension) + + r")$", + re.I, + ) # Default asignments if all is None: - all = True if not (len(args) or temp or cache) else False + all = bool(not (len(args) or temp or cache)) if temp is None: - temp = True if all else False + temp = bool(all) if cache is None: - cache = True if all else False + cache = bool(all) if cache and self._cache: # Reset our object @@ -1580,14 +1763,21 @@ def delete(self, *args, all=None, temp=None, cache=None, validate=True): if base == self.__base_path and cache: # We're handling a cache file (hopefully) result = valid_key_re.match(fname) - key = None if not result else ( - result['key'] if self.__valid_key.match(result['key']) - else None) + key = ( + None + if not result + else ( + result["key"] + if self.__valid_key.match(result["key"]) + else None + ) + ) if validate and key != self.__cache_key: # We're not dealing with a cache key logger.debug( - 'Persistent File cleanup ignoring file: %s', path) + "Persistent File cleanup ignoring file: %s", path + ) continue # @@ -1597,20 +1787,28 @@ def delete(self, *args, all=None, temp=None, cache=None, validate=True): elif base == self.__data_path and (args or all): # We're handling a file found in our custom data path result = valid_key_re.match(fname) - key = None if not result else ( - result['key'] if self.__valid_key.match(result['key']) - else None) + key = ( + None + if not result + else ( + result["key"] + if self.__valid_key.match(result["key"]) + else None + ) + ) if validate and key is None: # we're set to validate and a non-valid file was found logger.debug( - 'Persistent File cleanup ignoring file: %s', path) + "Persistent File cleanup ignoring file: %s", path + ) continue elif not all and (key is None or key not in args): # no match found logger.debug( - 'Persistent File cleanup ignoring file: %s', path) + "Persistent File cleanup ignoring file: %s", path + ) continue # @@ -1626,24 +1824,23 @@ def delete(self, *args, all=None, temp=None, cache=None, validate=True): else: # No match; move on - logger.debug('Persistent File cleanup ignoring file: %s', path) + logger.debug("Persistent File cleanup ignoring file: %s", path) continue try: os.unlink(path) - logger.info('Removed persistent file: %s', ppath) + logger.info("Removed persistent file: %s", ppath) except FileNotFoundError: # no worries; we were removing it anyway pass - except (OSError, IOError) as e: + except OSError as e: # Permission error of some kind or disk problem... # There is nothing we can do at this point has_error = True - logger.error( - 'Failed to remove persistent file: %s', ppath) - logger.debug('Persistent Storage Exception: %s', str(e)) + logger.error("Failed to remove persistent file: %s", ppath) + logger.debug("Persistent Storage Exception: %s", str(e)) # Reset our reference variables self.__cache_size = None @@ -1652,25 +1849,19 @@ def delete(self, *args, all=None, temp=None, cache=None, validate=True): return not has_error @property - def cache_file(self): - """ - Returns the full path to the namespace directory - """ + def cache_file(self) -> str: + """Returns the full path to the namespace directory.""" return os.path.join( self.__base_path, - f'{self.__cache_key}{self.__extension}', + f"{self.__cache_key}{self.__extension}", ) @property - def path(self): - """ - Returns the full path to the namespace directory - """ + def path(self) -> Optional[str]: + """Returns the full path to the namespace directory.""" return self.__base_path @property - def mode(self): - """ - Returns the full path to the namespace directory - """ + def mode(self) -> PersistentStoreMode: + """Returns the Persistent Storage mode.""" return self.__mode diff --git a/libs/apprise/plugins/__init__.py b/libs/apprise/plugins/__init__.py index 87c840e201..b486e9577c 100644 --- a/libs/apprise/plugins/__init__.py +++ b/libs/apprise/plugins/__init__.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,137 +25,137 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -import os import copy +import os -# Used for testing -from .base import NotifyBase - -from ..common import NotifyImageSize -from ..common import NOTIFY_IMAGE_SIZES -from ..common import NotifyType -from ..common import NOTIFY_TYPES -from ..utils.cwe312 import cwe312_url -from ..utils.parse import parse_list, GET_SCHEMA_RE +from ..common import ( + NOTIFY_IMAGE_SIZES, + NOTIFY_TYPES, + NotifyImageSize, + NotifyType, +) +from ..locale import LazyTranslation, gettext_lazy as _ from ..logger import logger -from ..locale import gettext_lazy as _ -from ..locale import LazyTranslation from ..manager_plugins import NotificationManager +from ..utils.cwe312 import cwe312_url +from ..utils.parse import GET_SCHEMA_RE, parse_list +# Used for testing +from .base import NotifyBase # Grant access to our Notification Manager Singleton N_MGR = NotificationManager() __all__ = [ + "NOTIFY_IMAGE_SIZES", + "NOTIFY_TYPES", + "NotifyBase", # Reference - 'NotifyImageSize', 'NOTIFY_IMAGE_SIZES', 'NotifyType', 'NOTIFY_TYPES', - 'NotifyBase', - + "NotifyImageSize", + "NotifyType", # Tokenizer - 'url_to_dict', + "url_to_dict", ] def _sanitize_token(tokens, default_delimiter): - """ - This is called by the details() function and santizes the output by + """This is called by the details() function and santizes the output by populating expected and consistent arguments if they weren't otherwise - specified. - - """ + specified.""" # Used for tracking groups group_map = {} # Iterate over our tokens - for key in tokens.keys(): + for key in tokens: - for element in tokens[key].keys(): + for element in tokens[key]: # Perform translations (if detected to do so) if isinstance(tokens[key][element], LazyTranslation): tokens[key][element] = str(tokens[key][element]) - if 'alias_of' in tokens[key]: + if "alias_of" in tokens[key]: # Do not touch this field continue - elif 'name' not in tokens[key]: + elif "name" not in tokens[key]: # Default to key - tokens[key]['name'] = key + tokens[key]["name"] = key - if 'map_to' not in tokens[key]: + if "map_to" not in tokens[key]: # Default type to key - tokens[key]['map_to'] = key + tokens[key]["map_to"] = key # Track our map_to objects - if tokens[key]['map_to'] not in group_map: - group_map[tokens[key]['map_to']] = set() - group_map[tokens[key]['map_to']].add(key) + if tokens[key]["map_to"] not in group_map: + group_map[tokens[key]["map_to"]] = set() + group_map[tokens[key]["map_to"]].add(key) - if 'type' not in tokens[key]: + if "type" not in tokens[key]: # Default type to string - tokens[key]['type'] = 'string' + tokens[key]["type"] = "string" - elif tokens[key]['type'].startswith('list'): - if 'delim' not in tokens[key]: + elif tokens[key]["type"].startswith("list"): + if "delim" not in tokens[key]: # Default list delimiter (if not otherwise specified) - tokens[key]['delim'] = default_delimiter + tokens[key]["delim"] = default_delimiter - if key in group_map[tokens[key]['map_to']]: # pragma: no branch + if key in group_map[tokens[key]["map_to"]]: # pragma: no branch # Remove ourselves from the list - group_map[tokens[key]['map_to']].remove(key) + group_map[tokens[key]["map_to"]].remove(key) # Pointing to the set directly so we can dynamically update # ourselves - tokens[key]['group'] = group_map[tokens[key]['map_to']] + tokens[key]["group"] = group_map[tokens[key]["map_to"]] - elif tokens[key]['type'].startswith('choice') \ - and 'default' not in tokens[key] \ - and 'values' in tokens[key] \ - and len(tokens[key]['values']) == 1: + elif ( + tokens[key]["type"].startswith("choice") + and "default" not in tokens[key] + and "values" in tokens[key] + and len(tokens[key]["values"]) == 1 + ): # If there is only one choice; then make it the default # - support dictionaries too - tokens[key]['default'] = tokens[key]['values'][0] \ - if not isinstance(tokens[key]['values'], dict) \ - else next(iter(tokens[key]['values'])) + tokens[key]["default"] = ( + tokens[key]["values"][0] + if not isinstance(tokens[key]["values"], dict) + else next(iter(tokens[key]["values"])) + ) - if 'values' in tokens[key] and isinstance(tokens[key]['values'], dict): + if "values" in tokens[key] and isinstance(tokens[key]["values"], dict): # Convert values into a list if it was defined as a dictionary - tokens[key]['values'] = [k for k in tokens[key]['values'].keys()] + tokens[key]["values"] = list(tokens[key]["values"].keys()) - if 'regex' in tokens[key]: + if "regex" in tokens[key]: # Verify that we are a tuple; convert strings to tuples - if isinstance(tokens[key]['regex'], str): + if isinstance(tokens[key]["regex"], str): # Default tuple setup - tokens[key]['regex'] = \ - (tokens[key]['regex'], None) + tokens[key]["regex"] = (tokens[key]["regex"], None) - elif not isinstance(tokens[key]['regex'], (list, tuple)): + elif not isinstance(tokens[key]["regex"], (list, tuple)): # Invalid regex - del tokens[key]['regex'] + del tokens[key]["regex"] - if 'required' not in tokens[key]: + if "required" not in tokens[key]: # Default required is False - tokens[key]['required'] = False + tokens[key]["required"] = False - if 'private' not in tokens[key]: + if "private" not in tokens[key]: # Private flag defaults to False if not set - tokens[key]['private'] = False + tokens[key]["private"] = False return def details(plugin): - """ - Provides templates that can be used by developers to build URLs + """Provides templates that can be used by developers to build URLs dynamically. - If a list of templates is provided, then they will be used over - the default value. + If a list of templates is provided, then they will be used over the default + value. - If a list of tokens are provided, then they will over-ride any - additional settings built from this script and/or will be appended - to them afterwards. + If a list of tokens are provided, then they will over-ride any additional + settings built from this script and/or will be appended to them afterwards. """ # Our unique list of parsing will be based on the provided templates @@ -210,7 +209,7 @@ def details(plugin): # # # Use regex if you want to share the regular expression # # required to validate the field. The regex will never - # # accomodate the prefix (if one is specified). That is + # # accommodate the prefix (if one is specified). That is # # up to the user building the URLs to include the prefix # # on the URL when constructing it. # # The format is ('regex', 'reg options') @@ -222,7 +221,7 @@ def details(plugin): # # # By default the key of this object is to be interpreted # # as the argument to the notification in question. However - # # To accomodate cases where there are multiple types that + # # To accommodate cases where there are multiple types that # # all map to the same entry, one can find a map_to value. # 'map_to': 'function_arg', # @@ -277,71 +276,69 @@ def details(plugin): template_kwargs = copy.deepcopy(plugin.template_kwargs) # We automatically create a schema entry - template_tokens['schema'] = { - 'name': _('Schema'), - 'type': 'choice:string', - 'required': True, - 'values': parse_list(plugin.secure_protocol, plugin.protocol) + template_tokens["schema"] = { + "name": _("Schema"), + "type": "choice:string", + "required": True, + "values": parse_list(plugin.secure_protocol, plugin.protocol), } # Sanitize our tokens - _sanitize_token(template_tokens, default_delimiter=('/', )) + _sanitize_token(template_tokens, default_delimiter=("/",)) # Delimiter(s) are space and/or comma - _sanitize_token(template_args, default_delimiter=(',', ' ')) - _sanitize_token(template_kwargs, default_delimiter=(',', ' ')) + _sanitize_token(template_args, default_delimiter=(",", " ")) + _sanitize_token(template_kwargs, default_delimiter=(",", " ")) # Argument/Option Handling for key in list(template_args.keys()): - if 'alias_of' in template_args[key]: + if "alias_of" in template_args[key]: # Check if the mapped reference is a list; if it is, then # we need to store a different delimiter - alias_of = template_tokens.get(template_args[key]['alias_of'], {}) - if alias_of.get('type', '').startswith('list') \ - and 'delim' not in template_args[key]: + alias_of = template_tokens.get(template_args[key]["alias_of"], {}) + if ( + alias_of.get("type", "").startswith("list") + and "delim" not in template_args[key] + ): # Set a default delimiter of a comma and/or space if one # hasn't already been specified - template_args[key]['delim'] = (',', ' ') + template_args[key]["delim"] = (",", " ") # _lookup_default looks up what the default value - if '_lookup_default' in template_args[key]: - template_args[key]['default'] = getattr( - plugin, template_args[key]['_lookup_default']) + if "_lookup_default" in template_args[key]: + template_args[key]["default"] = getattr( + plugin, template_args[key]["_lookup_default"] + ) # Tidy as we don't want to pass this along in response - del template_args[key]['_lookup_default'] + del template_args[key]["_lookup_default"] # _exists_if causes the argument to only exist IF after checking # the return of an internal variable requiring a check - if '_exists_if' in template_args[key]: - if not getattr(plugin, - template_args[key]['_exists_if']): + if "_exists_if" in template_args[key]: + if not getattr(plugin, template_args[key]["_exists_if"]): # Remove entire object del template_args[key] else: # We only nee to remove this key - del template_args[key]['_exists_if'] + del template_args[key]["_exists_if"] return { - 'templates': templates, - 'tokens': template_tokens, - 'args': template_args, - 'kwargs': template_kwargs, + "templates": templates, + "tokens": template_tokens, + "args": template_args, + "kwargs": template_kwargs, } def requirements(plugin): - """ - Provides a list of packages and its requirement details - - """ + """Provides a list of packages and its requirement details.""" requirements = { # Use the description to provide a human interpretable description of # what is required to make the plugin work. This is only nessisary # if there are package dependencies - 'details': '', - + "details": "", # Define any required packages needed for the plugin to run. This is # an array of strings that simply look like lines in the # `requirements.txt` file... @@ -354,83 +351,82 @@ def requirements(plugin): # 'cryptography < 3.4`, # ] # - 'packages_required': [], - + "packages_required": [], # Recommended packages identify packages that are not required to make # your plugin work, but would improve it's use or grant it access to # full functionality (that might otherwise be limited). - # Similar to `packages_required`, you would identify each entry in # the array as you would in a `requirements.txt` file. # # - Do not re-provide entries already in the `packages_required` - 'packages_recommended': [], + "packages_recommended": [], } # Populate our template differently if we don't find anything above - if not (hasattr(plugin, 'requirements') - and isinstance(plugin.requirements, dict)): + if not ( + hasattr(plugin, "requirements") + and isinstance(plugin.requirements, dict) + ): # We're done early return requirements # Get our required packages - _req_packages = plugin.requirements.get('packages_required') - if isinstance(_req_packages, str): + req_packages = plugin.requirements.get("packages_required") + if isinstance(req_packages, str): # Convert to list - _req_packages = [_req_packages] + req_packages = [req_packages] - elif not isinstance(_req_packages, (set, list, tuple)): + elif not isinstance(req_packages, (set, list, tuple)): # Allow one to set the required packages to None (as an example) - _req_packages = [] + req_packages = [] - requirements['packages_required'] = [str(p) for p in _req_packages] + requirements["packages_required"] = [str(p) for p in req_packages] # Get our recommended packages - _opt_packages = plugin.requirements.get('packages_recommended') - if isinstance(_opt_packages, str): + opt_packages = plugin.requirements.get("packages_recommended") + if isinstance(opt_packages, str): # Convert to list - _opt_packages = [_opt_packages] + opt_packages = [opt_packages] - elif not isinstance(_opt_packages, (set, list, tuple)): + elif not isinstance(opt_packages, (set, list, tuple)): # Allow one to set the recommended packages to None (as an example) - _opt_packages = [] + opt_packages = [] - requirements['packages_recommended'] = [str(p) for p in _opt_packages] + requirements["packages_recommended"] = [str(p) for p in opt_packages] # Get our package details - _req_details = plugin.requirements.get('details') - if not _req_details: - if not (_req_packages or _opt_packages): - _req_details = _('No dependencies.') + req_details = plugin.requirements.get("details") + if not req_details: + if not (req_packages or opt_packages): + req_details = _("No dependencies.") - elif _req_packages: - _req_details = _('Packages are required to function.') + elif req_packages: + req_details = _("Packages are required to function.") else: # opt_packages - _req_details = \ - _('Packages are recommended to improve functionality.') + req_details = _( + "Packages are recommended to improve functionality." + ) else: # Store our details if defined - requirements['details'] = _req_details + requirements["details"] = req_details # Return our compiled package requirements return requirements def url_to_dict(url, secure_logging=True): - """ - Takes an apprise URL and returns the tokens associated with it - if they can be acquired based on the plugins available. + """Takes an apprise URL and returns the tokens associated with it if they + can be acquired based on the plugins available. - None is returned if the URL could not be parsed, otherwise the - tokens are returned. + None is returned if the URL could not be parsed, otherwise the tokens are + returned. - These tokens can be loaded into apprise through it's add() - function. + These tokens can be loaded into apprise through it's add() function. """ # swap hash (#) tag values with their html version - _url = url.replace('/#', '/%23') + url_ = url.replace("/#", "/%23") # CWE-312 (Secure Logging) Handling loggable_url = url if not secure_logging else cwe312_url(url) @@ -438,14 +434,14 @@ def url_to_dict(url, secure_logging=True): # Attempt to acquire the schema at the very least to allow our plugins to # determine if they can make a better interpretation of a URL geared for # them. - schema = GET_SCHEMA_RE.match(_url) + schema = GET_SCHEMA_RE.match(url_) if schema is None: # Not a valid URL; take an early exit - logger.error('Unsupported URL: {}'.format(loggable_url)) + logger.error(f"Unsupported URL: {loggable_url}") return None # Ensure our schema is always in lower case - schema = schema.group('schema').lower() + schema = schema.group("schema").lower() if schema not in N_MGR: # Give the user the benefit of the doubt that the user may be using # one of the URLs provided to them by their notification service. @@ -453,31 +449,40 @@ def url_to_dict(url, secure_logging=True): # native_url() parse function results = None for plugin in N_MGR.plugins(): - results = plugin.parse_native_url(_url) + results = plugin.parse_native_url(url_) if results: break if not results: - logger.error('Unparseable URL {}'.format(loggable_url)) + logger.error(f"Unparseable URL {loggable_url}") return None - logger.trace('URL {} unpacked as:{}{}'.format( - url, os.linesep, os.linesep.join( - ['{}="{}"'.format(k, v) for k, v in results.items()]))) + logger.trace( + "URL {} unpacked as:{}{}".format( + url, + os.linesep, + os.linesep.join([f'{k}="{v}"' for k, v in results.items()]), + ) + ) else: # Parse our url details of the server object as dictionary # containing all of the information parsed from our URL - results = N_MGR[schema].parse_url(_url) + results = N_MGR[schema].parse_url(url_) if not results: - logger.error('Unparseable {} URL {}'.format( - N_MGR[schema].service_name, loggable_url)) + logger.error( + f"Unparseable {N_MGR[schema].service_name} URL {loggable_url}" + ) return None - logger.trace('{} URL {} unpacked as:{}{}'.format( - N_MGR[schema].service_name, url, - os.linesep, os.linesep.join( - ['{}="{}"'.format(k, v) for k, v in results.items()]))) + logger.trace( + "{} URL {} unpacked as:{}{}".format( + N_MGR[schema].service_name, + url, + os.linesep, + os.linesep.join([f'{k}="{v}"' for k, v in results.items()]), + ) + ) # Return our results return results diff --git a/libs/apprise/plugins/africas_talking.py b/libs/apprise/plugins/africas_talking.py index 5975876484..4a4efc637a 100644 --- a/libs/apprise/plugins/africas_talking.py +++ b/libs/apprise/plugins/africas_talking.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -33,25 +32,28 @@ # API Details: https://developers.africastalking.com/docs/sms/sending/bulk import requests -from .base import NotifyBase from ..common import NotifyType -from ..utils.parse import ( - is_phone_no, parse_bool, parse_phone_no, validate_regex) from ..locale import gettext_lazy as _ +from ..utils.parse import ( + is_phone_no, + parse_bool, + parse_phone_no, + validate_regex, +) +from .base import NotifyBase class AfricasTalkingSMSMode: - """ - Africas Talking SMS Mode - """ + """Africas Talking SMS Mode.""" + # BulkSMS Mode - BULKSMS = 'bulksms' + BULKSMS = "bulksms" # Premium Mode - PREMIUM = 'premium' + PREMIUM = "premium" # Sandbox Mode - SANDBOX = 'sandbox' + SANDBOX = "sandbox" # Define the types in a list for validation purposes @@ -64,48 +66,49 @@ class AfricasTalkingSMSMode: # Extend HTTP Error Messages AFRICAS_TALKING_HTTP_ERROR_MAP = { - 100: 'Processed', - 101: 'Sent', - 102: 'Queued', - 401: 'Risk Hold', - 402: 'Invalid Sender ID', - 403: 'Invalid Phone Number', - 404: 'Unsupported Number Type', - 405: 'Insufficient Balance', - 406: 'User In Blacklist', - 407: 'Could Not Route', - 409: 'Do Not Disturb Rejection', - 500: 'Internal Server Error', - 501: 'Gateway Error', - 502: 'Rejected By Gateway', + 100: "Processed", + 101: "Sent", + 102: "Queued", + 401: "Risk Hold", + 402: "Invalid Sender ID", + 403: "Invalid Phone Number", + 404: "Unsupported Number Type", + 405: "Insufficient Balance", + 406: "User In Blacklist", + 407: "Could Not Route", + 409: "Do Not Disturb Rejection", + 500: "Internal Server Error", + 501: "Gateway Error", + 502: "Rejected By Gateway", } class NotifyAfricasTalking(NotifyBase): - """ - A wrapper for Africas Talking Notifications - """ + """A wrapper for Africas Talking Notifications.""" # The default descriptive name associated with the Notification - service_name = 'Africas Talking' + service_name = "Africas Talking" # The services URL - service_url = 'https://africastalking.com/' + service_url = "https://africastalking.com/" # The default secure protocol - secure_protocol = 'atalk' + secure_protocol = "atalk" # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_africas_talking' + setup_url = "https://appriseit.com/services/africas_talking/" # Africas Talking API Request URLs notify_url = { - AfricasTalkingSMSMode.BULKSMS: - 'https://api.africastalking.com/version1/messaging', - AfricasTalkingSMSMode.PREMIUM: - 'https://content.africastalking.com/version1/messaging', - AfricasTalkingSMSMode.SANDBOX: - 'https://api.sandbox.africastalking.com/version1/messaging', + AfricasTalkingSMSMode.BULKSMS: ( + "https://api.africastalking.com/version1/messaging" + ), + AfricasTalkingSMSMode.PREMIUM: ( + "https://content.africastalking.com/version1/messaging" + ), + AfricasTalkingSMSMode.SANDBOX: ( + "https://api.sandbox.africastalking.com/version1/messaging" + ), } # The maximum allowable characters allowed in the title per message @@ -119,146 +122,172 @@ class NotifyAfricasTalking(NotifyBase): default_batch_size = 50 # Define object templates - templates = ( - '{schema}://{appuser}@{apikey}/{targets}', - ) + templates = ("{schema}://{appuser}@{apikey}/{targets}",) # Define our template tokens - template_tokens = dict(NotifyBase.template_tokens, **{ - 'appuser': { - 'name': _('App User Name'), - 'type': 'string', - 'regex': (r'^[A-Z0-9_-]+$', 'i'), - 'required': True, - }, - 'apikey': { - 'name': _('API Key'), - 'type': 'string', - 'required': True, - 'private': True, - 'regex': (r'^[A-Z0-9_-]+$', 'i'), - }, - 'target_phone': { - 'name': _('Target Phone'), - 'type': 'string', - 'map_to': 'targets', - }, - 'targets': { - 'name': _('Targets'), - 'type': 'list:string', + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "appuser": { + "name": _("App User Name"), + "type": "string", + "regex": (r"^[A-Z0-9_-]+$", "i"), + "required": True, + }, + "apikey": { + "name": _("API Key"), + "type": "string", + "required": True, + "private": True, + "regex": (r"^[A-Z0-9_-]+$", "i"), + }, + "target_phone": { + "name": _("Target Phone"), + "type": "string", + "map_to": "targets", + }, + "targets": { + "name": _("Targets"), + "type": "list:string", + }, }, - }) + ) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'to': { - 'alias_of': 'targets', - }, - 'apikey': { - 'alias_of': 'apikey', - }, - 'from': { - # Your registered short code or alphanumeric - 'name': _('From'), - 'type': 'string', - 'default': 'AFRICASTKNG', - 'map_to': 'sender', - }, - 'batch': { - 'name': _('Batch Mode'), - 'type': 'bool', - 'default': False, - }, - 'mode': { - 'name': _('SMS Mode'), - 'type': 'choice:string', - 'values': AFRICAS_TALKING_SMS_MODES, - 'default': AFRICAS_TALKING_SMS_MODES[0], + template_args = dict( + NotifyBase.template_args, + **{ + "to": { + "alias_of": "targets", + }, + "apikey": { + "alias_of": "apikey", + }, + "from": { + # Your registered short code or alphanumeric + "name": _("From"), + "type": "string", + "default": "AFRICASTKNG", + "map_to": "sender", + }, + "batch": { + "name": _("Batch Mode"), + "type": "bool", + "default": False, + }, + "mode": { + "name": _("SMS Mode"), + "type": "choice:string", + "values": AFRICAS_TALKING_SMS_MODES, + "default": AFRICAS_TALKING_SMS_MODES[0], + }, }, - }) + ) - def __init__(self, appuser, apikey, targets=None, sender=None, batch=None, - mode=None, **kwargs): - """ - Initialize Africas Talking Object - """ + def __init__( + self, + appuser, + apikey, + targets=None, + sender=None, + batch=None, + mode=None, + **kwargs, + ): + """Initialize Africas Talking Object.""" super().__init__(**kwargs) self.appuser = validate_regex( - appuser, *self.template_tokens['appuser']['regex']) + appuser, *self.template_tokens["appuser"]["regex"] + ) if not self.appuser: - msg = 'The Africas Talking appuser specified ({}) is invalid.'\ - .format(appuser) + msg = ( + f"The Africas Talking appuser specified ({appuser}) is" + " invalid." + ) self.logger.warning(msg) raise TypeError(msg) self.apikey = validate_regex( - apikey, *self.template_tokens['apikey']['regex']) + apikey, *self.template_tokens["apikey"]["regex"] + ) if not self.apikey: - msg = 'The Africas Talking apikey specified ({}) is invalid.'\ - .format(apikey) + msg = ( + f"The Africas Talking apikey specified ({apikey}) is invalid." + ) self.logger.warning(msg) raise TypeError(msg) # Prepare Sender - self.sender = self.template_args['from']['default'] \ - if sender is None else sender + self.sender = ( + self.template_args["from"]["default"] if sender is None else sender + ) # Prepare Batch Mode Flag - self.batch = self.template_args['batch']['default'] \ - if batch is None else batch + self.batch = ( + self.template_args["batch"]["default"] if batch is None else batch + ) - self.mode = self.template_args['mode']['default'] \ - if not isinstance(mode, str) else mode.lower() + self.mode = ( + self.template_args["mode"]["default"] + if not isinstance(mode, str) + else mode.lower() + ) if isinstance(mode, str) and mode: self.mode = next( - (a for a in AFRICAS_TALKING_SMS_MODES if a.startswith( - mode.lower())), None) + ( + a + for a in AFRICAS_TALKING_SMS_MODES + if a.startswith(mode.lower()) + ), + None, + ) if self.mode not in AFRICAS_TALKING_SMS_MODES: - msg = 'The Africas Talking mode specified ({}) is invalid.'\ - .format(mode) + msg = ( + f"The Africas Talking mode specified ({mode}) is invalid." + ) self.logger.warning(msg) raise TypeError(msg) else: - self.mode = self.template_args['mode']['default'] + self.mode = self.template_args["mode"]["default"] # Parse our targets - self.targets = list() + self.targets = [] for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = is_phone_no(target) if not result: self.logger.warning( - 'Dropped invalid phone # ' - '({}) specified.'.format(target), + f"Dropped invalid phone # ({target}) specified.", ) continue # store valid phone number # Carry forward '+' if defined, otherwise do not... self.targets.append( - ('+' + result['full']) - if target.lstrip()[0] == '+' else result['full']) + ("+" + result["full"]) + if target.lstrip()[0] == "+" + else result["full"] + ) - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): - """ - Perform Africas Talking Notification - """ + def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): + """Perform Africas Talking Notification.""" if not self.targets: # There is no one to email; we're done self.logger.warning( - 'There are no Africas Talking recipients to notify') + "There are no Africas Talking recipients to notify" + ) return False headers = { - 'User-Agent': self.app_id, - 'Content-Type': 'application/x-www-form-urlencoded', - 'Accept': 'application/json', - 'apiKey': self.apikey, + "User-Agent": self.app_id, + "Content-Type": "application/x-www-form-urlencoded", + "Accept": "application/json", + "apiKey": self.apikey, } # error tracking (used for function return) @@ -271,24 +300,27 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): for index in range(0, len(self.targets), batch_size): # Prepare our payload payload = { - 'username': self.appuser, - 'to': ','.join(self.targets[index:index + batch_size]), - 'from': self.sender, - 'message': body, + "username": self.appuser, + "to": ",".join(self.targets[index : index + batch_size]), + "from": self.sender, + "message": body, } # Acquire our URL notify_url = self.notify_url[self.mode] self.logger.debug( - 'Africas Talking POST URL: %s (cert_verify=%r)' % ( - notify_url, self.verify_certificate)) - self.logger.debug('Africas Talking Payload: %s' % str(payload)) + "Africas Talking POST URL:" + f" {notify_url} (cert_verify={self.verify_certificate!r})" + ) + self.logger.debug(f"Africas Talking Payload: {payload!s}") # Printable target detail - p_target = self.targets[index] if batch_size == 1 \ - else '{} target(s)'.format( - len(self.targets[index:index + batch_size])) + p_target = ( + self.targets[index] + if batch_size == 1 + else f"{len(self.targets[index:index + batch_size])} target(s)" + ) # Always call throttle before any remote server i/o is made self.throttle() @@ -316,20 +348,24 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): if r.status_code not in (100, 101, 102, requests.codes.ok): # We had a problem - status_str = \ + status_str = ( NotifyAfricasTalking.http_response_code_lookup( - r.status_code, AFRICAS_TALKING_HTTP_ERROR_MAP) + r.status_code, AFRICAS_TALKING_HTTP_ERROR_MAP + ) + ) self.logger.warning( - 'Failed to send Africas Talking notification to {}: ' - '{}{}error={}.'.format( + "Failed to send Africas Talking notification to {}: " + "{}{}error={}.".format( p_target, status_str, - ', ' if status_str else '', - r.status_code)) + ", " if status_str else "", + r.status_code, + ) + ) self.logger.debug( - 'Response Details:\r\n{}'.format(r.content)) + "Response Details:\r\n%r", (r.content or b"")[:2000]) # Mark our failure has_error = True @@ -337,14 +373,15 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): else: self.logger.info( - 'Sent Africas Talking notification to {}.' - .format(p_target)) + f"Sent Africas Talking notification to {p_target}." + ) except requests.RequestException as e: self.logger.warning( - 'A Connection error occurred sending Africas Talking ' - 'notification to {}.'.format(p_target)) - self.logger.debug('Socket Exception: %s' % str(e)) + "A Connection error occurred sending Africas Talking " + f"notification to {p_target}." + ) + self.logger.debug(f"Socket Exception: {e!s}") # Mark our failure has_error = True @@ -354,115 +391,116 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): @property def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. """ return (self.secure_protocol, self.appuser, self.apikey) def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = { - 'batch': 'yes' if self.batch else 'no', + "batch": "yes" if self.batch else "no", } - if self.sender != self.template_args['from']['default']: + if self.sender != self.template_args["from"]["default"]: # Set our sender if it was set - params['from'] = self.sender + params["from"] = self.sender - if self.mode != self.template_args['mode']['default']: + if self.mode != self.template_args["mode"]["default"]: # Set our mode - params['mode'] = self.mode + params["mode"] = self.mode # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) - return '{schema}://{appuser}@{apikey}/{targets}?{params}'.format( + return "{schema}://{appuser}@{apikey}/{targets}?{params}".format( schema=self.secure_protocol, - appuser=NotifyAfricasTalking.quote(self.appuser, safe=''), - apikey=self.pprint(self.apikey, privacy, safe=''), - targets='/'.join( - [NotifyAfricasTalking.quote(x, safe='+') - for x in self.targets]), + appuser=NotifyAfricasTalking.quote(self.appuser, safe=""), + apikey=self.pprint(self.apikey, privacy, safe=""), + targets="/".join( + [NotifyAfricasTalking.quote(x, safe="+") for x in self.targets] + ), params=NotifyAfricasTalking.urlencode(params), ) def __len__(self): - """ - Returns the number of targets associated with this notification - """ + """Returns the number of targets associated with this notification.""" # # Factor batch into calculation # batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: - targets = int(targets / batch_size) + \ - (1 if targets % batch_size else 0) + targets = int(targets / batch_size) + ( + 1 if targets % batch_size else 0 + ) return targets if targets > 0 else 1 @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The Application User ID - results['appuser'] = NotifyAfricasTalking.unquote(results['user']) + results["appuser"] = NotifyAfricasTalking.unquote(results["user"]) # Prepare our targets - results['targets'] = [] + results["targets"] = [] # Our Application APIKey - if 'apikey' in results['qsd'] and len(results['qsd']['apikey']): + if "apikey" in results["qsd"] and len(results["qsd"]["apikey"]): # Store our apikey if specified as keyword - results['apikey'] = \ - NotifyAfricasTalking.unquote(results['qsd']['apikey']) + results["apikey"] = NotifyAfricasTalking.unquote( + results["qsd"]["apikey"] + ) # This means our host is actually a phone number (target) - results['targets'].append( - NotifyAfricasTalking.unquote(results['host'])) + results["targets"].append( + NotifyAfricasTalking.unquote(results["host"]) + ) else: # First item is our apikey - results['apikey'] = NotifyAfricasTalking.unquote(results['host']) + results["apikey"] = NotifyAfricasTalking.unquote(results["host"]) # Store our remaining targets found on path - results['targets'].extend( - NotifyAfricasTalking.split_path(results['fullpath'])) + results["targets"].extend( + NotifyAfricasTalking.split_path(results["fullpath"]) + ) # The 'from' makes it easier to use yaml configuration - if 'from' in results['qsd'] and len(results['qsd']['from']): - results['sender'] = \ - NotifyAfricasTalking.unquote(results['qsd']['from']) + if "from" in results["qsd"] and len(results["qsd"]["from"]): + results["sender"] = NotifyAfricasTalking.unquote( + results["qsd"]["from"] + ) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration - if 'to' in results['qsd'] and len(results['qsd']['to']): - results['targets'] += \ - NotifyAfricasTalking.parse_phone_no(results['qsd']['to']) + if "to" in results["qsd"] and len(results["qsd"]["to"]): + results["targets"] += NotifyAfricasTalking.parse_phone_no( + results["qsd"]["to"] + ) # Get our Mode - if 'mode' in results['qsd'] and len(results['qsd']['mode']): - results['mode'] = \ - NotifyAfricasTalking.unquote(results['qsd']['mode']) + if "mode" in results["qsd"] and len(results["qsd"]["mode"]): + results["mode"] = NotifyAfricasTalking.unquote( + results["qsd"]["mode"] + ) # Get Batch Mode Flag - results['batch'] = \ - parse_bool(results['qsd'].get( - 'batch', - NotifyAfricasTalking.template_args['batch']['default'])) + results["batch"] = parse_bool( + results["qsd"].get( + "batch", NotifyAfricasTalking.template_args["batch"]["default"] + ) + ) return results diff --git a/libs/apprise/plugins/apprise_api.py b/libs/apprise/plugins/apprise_api.py index a261620366..a0bacd7041 100644 --- a/libs/apprise/plugins/apprise_api.py +++ b/libs/apprise/plugins/apprise_api.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,24 +25,26 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +from json import dumps +import logging import re + import requests -from json import dumps from .. import exception -from .base import NotifyBase -from ..url import PrivacyMode from ..common import NotifyType -from ..utils.parse import parse_list, validate_regex from ..locale import gettext_lazy as _ +from ..url import PrivacyMode +from ..utils.parse import parse_list, validate_regex +from ..utils.sanitize import sanitize_payload +from .base import NotifyBase class AppriseAPIMethod: - """ - Defines the method to post data tot he remote server - """ - JSON = 'json' - FORM = 'form' + """Defines the method to post data tot he remote server.""" + + JSON = "json" + FORM = "form" APPRISE_API_METHODS = ( @@ -53,24 +54,22 @@ class AppriseAPIMethod: class NotifyAppriseAPI(NotifyBase): - """ - A wrapper for Apprise (Persistent) API Notifications - """ + """A wrapper for Apprise (Persistent) API Notifications.""" # The default descriptive name associated with the Notification - service_name = 'Apprise API' + service_name = "Apprise API" # The services URL - service_url = 'https://github.com/caronc/apprise-api' + service_url = "https://github.com/caronc/apprise-api" # The default protocol - protocol = 'apprise' + protocol = "apprise" # The default secure protocol - secure_protocol = 'apprises' + secure_protocol = "apprises" # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_apprise_api' + setup_url = "https://appriseit.com/services/apprise_api/" # Support attachments attachment_support = True @@ -85,96 +84,104 @@ class NotifyAppriseAPI(NotifyBase): # Define object templates templates = ( - '{schema}://{host}/{token}', - '{schema}://{host}:{port}/{token}', - '{schema}://{user}@{host}/{token}', - '{schema}://{user}@{host}:{port}/{token}', - '{schema}://{user}:{password}@{host}/{token}', - '{schema}://{user}:{password}@{host}:{port}/{token}', + "{schema}://{host}/{token}", + "{schema}://{host}:{port}/{token}", + "{schema}://{user}@{host}/{token}", + "{schema}://{user}@{host}:{port}/{token}", + "{schema}://{user}:{password}@{host}/{token}", + "{schema}://{user}:{password}@{host}:{port}/{token}", ) # Define our tokens; these are the minimum tokens required required to # be passed into this function (as arguments). The syntax appends any # previously defined in the base package and builds onto them - template_tokens = dict(NotifyBase.template_tokens, **{ - 'host': { - 'name': _('Hostname'), - 'type': 'string', - 'required': True, + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "host": { + "name": _("Hostname"), + "type": "string", + "required": True, + }, + "port": { + "name": _("Port"), + "type": "int", + "min": 1, + "max": 65535, + }, + "user": { + "name": _("Username"), + "type": "string", + }, + "password": { + "name": _("Password"), + "type": "string", + "private": True, + }, + "token": { + "name": _("Token"), + "type": "string", + "required": True, + "private": True, + "regex": (r"^[A-Z0-9_-]{1,128}$", "i"), + }, }, - 'port': { - 'name': _('Port'), - 'type': 'int', - 'min': 1, - 'max': 65535, - }, - 'user': { - 'name': _('Username'), - 'type': 'string', - }, - 'password': { - 'name': _('Password'), - 'type': 'string', - 'private': True, - }, - 'token': { - 'name': _('Token'), - 'type': 'string', - 'required': True, - 'private': True, - 'regex': (r'^[A-Z0-9_-]{1,128}$', 'i'), - }, - }) + ) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'tags': { - 'name': _('Tags'), - 'type': 'string', + template_args = dict( + NotifyBase.template_args, + **{ + "tags": { + "name": _("Tags"), + "type": "string", + }, + "method": { + "name": _("Query Method"), + "type": "choice:string", + "values": APPRISE_API_METHODS, + "default": APPRISE_API_METHODS[0], + }, + "to": { + "alias_of": "token", + }, }, - 'method': { - 'name': _('Query Method'), - 'type': 'choice:string', - 'values': APPRISE_API_METHODS, - 'default': APPRISE_API_METHODS[0], - }, - 'to': { - 'alias_of': 'token', - }, - }) + ) # Define any kwargs we're using template_kwargs = { - 'headers': { - 'name': _('HTTP Header'), - 'prefix': '+', + "headers": { + "name": _("HTTP Header"), + "prefix": "+", }, } - def __init__(self, token=None, tags=None, method=None, headers=None, - **kwargs): - """ - Initialize Apprise API Object + def __init__( + self, token=None, tags=None, method=None, headers=None, **kwargs + ): + """Initialize Apprise API Object. headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with - """ super().__init__(**kwargs) self.token = validate_regex( - token, *self.template_tokens['token']['regex']) + token, *self.template_tokens["token"]["regex"] + ) if not self.token: - msg = 'The Apprise API token specified ({}) is invalid.'\ - .format(token) + msg = f"The Apprise API token specified ({token}) is invalid." self.logger.warning(msg) raise TypeError(msg) - self.method = self.template_args['method']['default'] \ - if not isinstance(method, str) else method.lower() + self.method = ( + self.template_args["method"]["default"] + if not isinstance(method, str) + else method.lower() + ) if self.method not in APPRISE_API_METHODS: - msg = 'The method specified ({}) is invalid.'.format(method) + msg = f"The method specified ({method}) is invalid." self.logger.warning(msg) raise TypeError(msg) @@ -189,64 +196,75 @@ def __init__(self, token=None, tags=None, method=None, headers=None, return def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = { - 'method': self.method, + "method": self.method, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Append our headers into our parameters - params.update({'+{}'.format(k): v for k, v in self.headers.items()}) + params.update({f"+{k}": v for k, v in self.headers.items()}) if self.__tags: - params['tags'] = ','.join([x for x in self.__tags]) + params["tags"] = ",".join(list(self.__tags)) # Determine Authentication - auth = '' + auth = "" if self.user and self.password: - auth = '{user}:{password}@'.format( - user=NotifyAppriseAPI.quote(self.user, safe=''), + auth = "{user}:{password}@".format( + user=NotifyAppriseAPI.quote(self.user, safe=""), password=self.pprint( - self.password, privacy, mode=PrivacyMode.Secret, safe=''), + self.password, privacy, mode=PrivacyMode.Secret, safe="" + ), ) elif self.user: - auth = '{user}@'.format( - user=NotifyAppriseAPI.quote(self.user, safe=''), + auth = "{user}@".format( + user=NotifyAppriseAPI.quote(self.user, safe=""), ) default_port = 443 if self.secure else 80 - fullpath = self.fullpath.strip('/') - return '{schema}://{auth}{hostname}{port}{fullpath}{token}' \ - '/?{params}'.format( - schema=self.secure_protocol - if self.secure else self.protocol, - auth=auth, - # never encode hostname since we're expecting it to be a - # valid one - hostname=self.host, - port='' if self.port is None or self.port == default_port - else ':{}'.format(self.port), - fullpath='/{}/'.format(NotifyAppriseAPI.quote( - fullpath, safe='/')) if fullpath else '/', - token=self.pprint(self.token, privacy, safe=''), - params=NotifyAppriseAPI.urlencode(params)) - - def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, - **kwargs): - """ - Perform Apprise API Notification - """ + fullpath = self.fullpath.strip("/") + return ( + "{schema}://{auth}{hostname}{port}{fullpath}{token}" + "/?{params}".format( + schema=self.secure_protocol if self.secure else self.protocol, + auth=auth, + # never encode hostname since we're expecting it to be a + # valid one + hostname=self.host, + port=( + "" + if self.port is None or self.port == default_port + else f":{self.port}" + ), + fullpath=( + "/{}/".format(NotifyAppriseAPI.quote(fullpath, safe="/")) + if fullpath + else "/" + ), + token=self.pprint(self.token, privacy, safe=""), + params=NotifyAppriseAPI.urlencode(params), + ) + ) + + def send( + self, + body, + title="", + notify_type=NotifyType.INFO, + attach=None, + **kwargs, + ): + """Perform Apprise API Notification.""" # Prepare HTTP Headers headers = { - 'User-Agent': self.app_id, + "User-Agent": self.app_id, } # Apply any/all header over-rides defined @@ -260,94 +278,109 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, if not attachment: # We could not access the attachment self.logger.error( - 'Could not access Apprise API attachment {}.'.format( - attachment.url(privacy=True))) + "Could not access Apprise API attachment" + f" {attachment.url(privacy=True)}." + ) return False try: # Our Attachment filename - filename = attachment.name \ - if attachment.name else f'file{no:03}.dat' + filename = ( + attachment.name + if attachment.name + else f"file{no:03}.dat" + ) if self.method == AppriseAPIMethod.JSON: # Output must be in a DataURL format (that's what # PushSafer calls it): attachments.append({ "filename": filename, - 'base64': attachment.base64(), - 'mimetype': attachment.mimetype, + "base64": attachment.base64(), + "mimetype": attachment.mimetype, }) else: # AppriseAPIMethod.FORM files.append(( - 'file{:02d}'.format(no), + f"file{no:02d}", ( filename, - open(attachment.path, 'rb'), + # file handle is safely closed in `finally`; + # inline open is intentional + open(attachment.path, "rb"), # noqa: SIM115 attachment.mimetype, - ) + ), )) except (TypeError, OSError, exception.AppriseException): # We could not access the attachment self.logger.error( - 'Could not access AppriseAPI attachment {}.'.format( - attachment.url(privacy=True))) + "Could not access AppriseAPI attachment" + f" {attachment.url(privacy=True)}." + ) return False self.logger.debug( - 'Appending AppriseAPI attachment {}'.format( - attachment.url(privacy=True))) + "Appending AppriseAPI attachment" + f" {attachment.url(privacy=True)}" + ) # prepare Apprise API Object payload = { # Apprise API Payload - 'title': title, - 'body': body, - 'type': notify_type, - 'format': self.notify_format, + "title": title, + "body": body, + "type": notify_type.value, + "format": self.notify_format.value, } if self.method == AppriseAPIMethod.JSON: - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" if attachments: - payload['attachments'] = attachments + payload["attachments"] = attachments payload = dumps(payload) if self.__tags: - payload['tag'] = self.__tags + payload["tag"] = self.__tags auth = None if self.user: auth = (self.user, self.password) # Set our schema - schema = 'https' if self.secure else 'http' + schema = "https" if self.secure else "http" - url = '%s://%s' % (schema, self.host) + url = f"{schema}://{self.host}" if isinstance(self.port, int): - url += ':%d' % self.port + url += f":{self.port}" - fullpath = self.fullpath.strip('/') - url += '{}'.format('/' + fullpath) if fullpath else '' - url += '/notify/{}'.format(self.token) + fullpath = self.fullpath.strip("/") + url += "{}".format("/" + fullpath) if fullpath else "" + url += f"/notify/{self.token}" # Some entries can not be over-ridden headers.update({ # Our response to be in JSON format always - 'Accept': 'application/json', + "Accept": "application/json", # Pass our Source UUID4 Identifier - 'X-Apprise-ID': self.asset._uid, + "X-Apprise-ID": self.asset._uid, # Pass our current recursion count to our upstream server - 'X-Apprise-Recursion-Count': str(self.asset._recursion + 1), + "X-Apprise-Recursion-Count": str(self.asset._recursion + 1), }) - self.logger.debug('Apprise API POST URL: %s (cert_verify=%r)' % ( - url, self.verify_certificate, - )) - self.logger.debug('Apprise API Payload: %s' % str(payload)) + # Some Debug Logging + if self.logger.isEnabledFor(logging.DEBUG): + # Due to attachments; output can be quite heavy and io intensive + # To accommodate this, we only show our debug payload information + # if required. + self.logger.debug( + "Apprise API POST URL:" + f" {url} (cert_verify={self.verify_certificate!r})" + ) + self.logger.debug( + "Apprise API Payload: %s", sanitize_payload(payload)) # Always call throttle before any remote server i/o is made self.throttle() @@ -364,39 +397,44 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, ) if r.status_code != requests.codes.ok: # We had a problem - status_str = \ - NotifyAppriseAPI.http_response_code_lookup(r.status_code) + status_str = NotifyAppriseAPI.http_response_code_lookup( + r.status_code + ) self.logger.warning( - 'Failed to send Apprise API notification: ' - '{}{}error={}.'.format( - status_str, - ', ' if status_str else '', - r.status_code)) + "Failed to send Apprise API notification: " + "{}{}error={}.".format( + status_str, ", " if status_str else "", r.status_code + ) + ) - self.logger.debug('Response Details:\r\n{}'.format(r.content)) + self.logger.debug( + "Response Details:\r\n%r", (r.content or b"")[:2000]) # Return; we're done return False else: self.logger.info( - 'Sent Apprise API notification; method=%s.', self.method) + "Sent Apprise API notification; method=%s.", self.method + ) except requests.RequestException as e: self.logger.warning( - 'A Connection error occurred sending Apprise API ' - 'notification to %s.' % self.host) - self.logger.debug('Socket Exception: %s' % str(e)) + "A Connection error occurred sending Apprise API " + f"notification to {self.host}." + ) + self.logger.debug(f"Socket Exception: {e!s}") # Return; we're done return False - except (OSError, IOError) as e: + except OSError as e: self.logger.warning( - 'An I/O error occurred while reading one of the ' - 'attached files.') - self.logger.debug('I/O Exception: %s' % str(e)) + "An I/O error occurred while reading one of the " + "attached files." + ) + self.logger.debug(f"I/O Exception: {e!s}") return False finally: @@ -414,34 +452,48 @@ def parse_native_url(url): """ result = re.match( - r'^http(?Ps?)://(?P[A-Z0-9._-]+)' - r'(:(?P[0-9]+))?' - r'(?P/[^?]+?)?/notify/(?P[A-Z0-9_-]{1,32})/?' - r'(?P\?.+)?$', url, re.I) + r"^http(?Ps?)://(?P[A-Z0-9._-]+)" + r"(:(?P[0-9]+))?" + r"(?P/[^?]+?)?/notify/(?P[A-Z0-9_-]{1,32})/?" + r"(?P\?.+)?$", + url, + re.I, + ) if result: return NotifyAppriseAPI.parse_url( - '{schema}://{hostname}{port}{path}/{token}/{params}'.format( - schema=NotifyAppriseAPI.secure_protocol - if result.group('secure') else NotifyAppriseAPI.protocol, - hostname=result.group('hostname'), - port='' if not result.group('port') - else ':{}'.format(result.group('port')), - path='' if not result.group('path') - else result.group('path'), - token=result.group('token'), - params='' if not result.group('params') - else '?{}'.format(result.group('params')))) + "{schema}://{hostname}{port}{path}/{token}/{params}".format( + schema=( + NotifyAppriseAPI.secure_protocol + if result.group("secure") + else NotifyAppriseAPI.protocol + ), + hostname=result.group("hostname"), + port=( + "" + if not result.group("port") + else ":{}".format(result.group("port")) + ), + path=( + "" + if not result.group("path") + else result.group("path") + ), + token=result.group("token"), + params=( + "" + if not result.group("params") + else "?{}".format(result.group("params")) + ), + ) + ) return None @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results @@ -449,40 +501,44 @@ def parse_url(url): # Add our headers that the user can potentially over-ride if they wish # to to our returned result set and tidy entries by unquoting them - results['headers'] = \ - {NotifyAppriseAPI.unquote(x): NotifyAppriseAPI.unquote(y) - for x, y in results['qsd+'].items()} + results["headers"] = { + NotifyAppriseAPI.unquote(x): NotifyAppriseAPI.unquote(y) + for x, y in results["qsd+"].items() + } # Support the passing of tags in the URL - if 'tags' in results['qsd'] and len(results['qsd']['tags']): - results['tags'] = \ - NotifyAppriseAPI.parse_list(results['qsd']['tags']) + if "tags" in results["qsd"] and len(results["qsd"]["tags"]): + results["tags"] = NotifyAppriseAPI.parse_list( + results["qsd"]["tags"] + ) # Support the 'to' & 'token' variable so that we can support rooms # this way too. - if 'token' in results['qsd'] and len(results['qsd']['token']): - results['token'] = \ - NotifyAppriseAPI.unquote(results['qsd']['token']) + if "token" in results["qsd"] and len(results["qsd"]["token"]): + results["token"] = NotifyAppriseAPI.unquote( + results["qsd"]["token"] + ) - elif 'to' in results['qsd'] and len(results['qsd']['to']): - results['token'] = NotifyAppriseAPI.unquote(results['qsd']['to']) + elif "to" in results["qsd"] and len(results["qsd"]["to"]): + results["token"] = NotifyAppriseAPI.unquote(results["qsd"]["to"]) else: # Start with a list of path entries to work with - entries = NotifyAppriseAPI.split_path(results['fullpath']) + entries = NotifyAppriseAPI.split_path(results["fullpath"]) if entries: # use our last entry found - results['token'] = entries[-1] + results["token"] = entries[-1] # pop our last entry off entries = entries[:-1] # re-assemble our full path - results['fullpath'] = '/'.join(entries) + results["fullpath"] = "/".join(entries) # Set method if specified - if 'method' in results['qsd'] and len(results['qsd']['method']): - results['method'] = \ - NotifyAppriseAPI.unquote(results['qsd']['method']) + if "method" in results["qsd"] and len(results["qsd"]["method"]): + results["method"] = NotifyAppriseAPI.unquote( + results["qsd"]["method"] + ) return results diff --git a/libs/apprise/plugins/aprs.py b/libs/apprise/plugins/aprs.py index bf50af9bba..33d40fbe43 100644 --- a/libs/apprise/plugins/aprs.py +++ b/libs/apprise/plugins/aprs.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -67,16 +66,18 @@ # http://www.aprs.org/doc/APRS101.PDF # +import contextlib +from itertools import chain +import re import socket import sys -from itertools import chain -from .base import NotifyBase + +from .. import __version__ +from ..common import NotifyType from ..locale import gettext_lazy as _ from ..url import PrivacyMode -from ..common import NotifyType from ..utils.parse import is_call_sign, parse_call_sign -from .. import __version__ -import re +from .base import NotifyBase # Fixed APRS-IS server locales # Default is 'EURO' @@ -104,14 +105,11 @@ } # Our compiled mapping of bad characters -APRS_COMPILED_MAP = re.compile( - r'(' + '|'.join(APRS_BAD_CHARMAP.keys()) + r')') +APRS_COMPILED_MAP = re.compile(r"(" + "|".join(APRS_BAD_CHARMAP.keys()) + r")") class NotifyAprs(NotifyBase): - """ - A wrapper for APRS Notifications via APRS-IS - """ + """A wrapper for APRS Notifications via APRS-IS.""" # The default descriptive name associated with the Notification service_name = "Aprs" @@ -123,7 +121,7 @@ class NotifyAprs(NotifyBase): secure_protocol = "aprs" # A URL that takes you to the setup/help of the specific protocol - setup_url = "https://github.com/caronc/apprise/wiki/Notify_aprs" + setup_url = "https://appriseit.com/services/aprs/" # APRS default port, supported by all core servers # Details: https://www.aprs-is.net/Connecting.aspx @@ -156,7 +154,7 @@ class NotifyAprs(NotifyBase): request_rate_per_sec = 0.8 # Encoding of retrieved content - aprs_encoding = 'latin-1' + aprs_encoding = "latin-1" # Define object templates templates = ("{schema}://{user}:{password}@{targets}",) @@ -190,7 +188,7 @@ class NotifyAprs(NotifyBase): "type": "list:string", "required": True, }, - } + }, ) # Define our template arguments @@ -215,21 +213,18 @@ class NotifyAprs(NotifyBase): "values": APRS_LOCALES, "default": "EURO", }, - } + }, ) def __init__(self, targets=None, locale=None, delay=None, **kwargs): - """ - Initialize APRS Object - """ + """Initialize APRS Object.""" super().__init__(**kwargs) # Our (future) socket sobject self.sock = None # Parse our targets - self.targets = list() - + self.targets = [] """ Check if the user has provided credentials """ @@ -262,21 +257,19 @@ def __init__(self, targets=None, locale=None, delay=None, **kwargs): """ self.user = self.user.upper() self.device_id = self.device_id.upper() - """ Check if the user has provided a locale for the APRS-IS-server and validate it, if necessary """ - if locale: - if locale.upper() not in APRS_LOCALES: - msg = ( - "Unsupported APRS-IS server locale. " - "Received: {}. Valid: {}".format( - locale, ", ".join(str(x) for x in APRS_LOCALES.keys()) - ) + if locale and locale.upper() not in APRS_LOCALES: + msg = ( + "Unsupported APRS-IS server locale. " + "Received: {}. Valid: {}".format( + locale, ", ".join(str(x) for x in APRS_LOCALES) ) - self.logger.warning(msg) - raise TypeError(msg) + ) + self.logger.warning(msg) + raise TypeError(msg) # Update our delay if delay is None: @@ -285,28 +278,29 @@ def __init__(self, targets=None, locale=None, delay=None, **kwargs): else: try: self.delay = float(delay) - if self.delay < NotifyAprs.template_args["delay"]["min"]: - raise ValueError() - - elif self.delay >= NotifyAprs.template_args["delay"]["max"]: + if ( + self.delay < NotifyAprs.template_args["delay"]["min"] + or self.delay >= NotifyAprs.template_args["delay"]["max"] + ): raise ValueError() except (TypeError, ValueError): - msg = "Unsupported APRS-IS delay ({}) specified. ".format( - delay) + msg = f"Unsupported APRS-IS delay ({delay}) specified. " self.logger.warning(msg) - raise TypeError(msg) + raise TypeError(msg) from None # Bump up our request_rate self.request_rate_per_sec += self.delay # Set the transmitter group - self.locale = \ - NotifyAprs.template_args["locale"]["default"] \ - if not locale else locale.upper() + self.locale = ( + NotifyAprs.template_args["locale"]["default"] + if not locale + else locale.upper() + ) # Used for URL generation afterwards only - self.invalid_targets = list() + self.invalid_targets = [] for target in parse_call_sign(targets): # Validate targets and drop bad ones @@ -315,9 +309,7 @@ def __init__(self, targets=None, locale=None, delay=None, **kwargs): result = is_call_sign(target) if not result: self.logger.warning( - "Dropping invalid Amateur radio call sign ({}).".format( - target - ), + f"Dropping invalid Amateur radio call sign ({target}).", ) self.invalid_targets.append(target.upper()) continue @@ -328,28 +320,17 @@ def __init__(self, targets=None, locale=None, delay=None, **kwargs): return def socket_close(self): - """ - Closes the socket connection whereas present - """ + """Closes the socket connection whereas present.""" if self.sock: - try: + with contextlib.suppress(Exception): self.sock.close() - - except Exception: - # No worries if socket exception thrown on close() - pass - self.sock = None def socket_open(self): - """ - Establishes the connection to the APRS-IS - socket server - """ + """Establishes the connection to the APRS-IS socket server.""" self.logger.debug( - "Creating socket connection with APRS-IS {}:{}".format( - APRS_LOCALES[self.locale], self.notify_port - ) + "Creating socket connection with APRS-IS" + f" {APRS_LOCALES[self.locale]}:{self.notify_port}" ) try: @@ -359,23 +340,23 @@ def socket_open(self): ) except ConnectionError as e: - self.logger.debug("Socket Exception socket_open: %s", str(e)) + self.logger.debug("Socket Exception socket_open: %s", e) self.sock = None return False except socket.gaierror as e: - self.logger.debug("Socket Exception socket_open: %s", str(e)) + self.logger.debug("Socket Exception socket_open: %s", e) self.sock = None return False except socket.timeout as e: self.logger.debug( - "Socket Timeout Exception socket_open: %s", str(e)) + "Socket Timeout Exception socket_open: %s", e) self.sock = None return False except Exception as e: - self.logger.debug("General Exception socket_open: %s", str(e)) + self.logger.debug("General Exception socket_open: %s", e) self.sock = None return False @@ -387,7 +368,7 @@ def socket_open(self): # Get the physical host/port of the server host, port = self.sock.getpeername() # and create debug info - self.logger.debug("Connected to {}:{}".format(host, port)) + self.logger.debug(f"Connected to {host}:{port}") except ValueError: # Seens as if we are running on an operating @@ -399,9 +380,8 @@ def socket_open(self): return True def aprsis_login(self): - """ - Generate the APRS-IS login string, send it to the server - and parse the response + """Generate the APRS-IS login string, send it to the server and parse + the response. Returns True/False wrt whether the login was successful """ @@ -413,8 +393,9 @@ def aprsis_login(self): return False # APRS-IS login string, see https://www.aprs-is.net/Connecting.aspx - login_str = "user {0} pass {1} vers apprise {2}\r\n".format( - self.user, self.password, __version__ + login_str = ( + f"user {self.user} pass {self.password} vers apprise" + f" {__version__}\r\n" ) # Send the data & abort in case of error @@ -457,22 +438,19 @@ def aprsis_login(self): # ValueError is returned if there were not enough elements to # populate the response self.logger.warning( - "socket_login: " "received invalid response from APRS-IS" + "socket_login: received invalid response from APRS-IS" ) self.socket_close() return False if callsign != self.user: - self.logger.warning( - "socket_login: " "call signs differ: %s" % callsign - ) + self.logger.warning(f"socket_login: call signs differ: {callsign}") self.socket_close() return False if status.startswith("unverified"): self.logger.warning( - "socket_login: " - "invalid APRS-IS password for given call sign" + "socket_login: invalid APRS-IS password for given call sign" ) self.socket_close() return False @@ -481,9 +459,7 @@ def aprsis_login(self): return True def socket_send(self, tx_data): - """ - Generic "Send data to a socket" - """ + """Generic "Send data to a socket".""" self.logger.debug("socket_send: init") # Check if we are connected @@ -507,21 +483,17 @@ def socket_send(self, tx_data): self.sock.sendall(payload) except socket.gaierror as e: - self.logger.warning("Socket Exception socket_send: %s" % str(e)) + self.logger.warning(f"Socket Exception socket_send: {e!s}") self.sock = None return False except socket.timeout as e: - self.logger.warning( - "Socket Timeout Exception " "socket_send: %s" % str(e) - ) + self.logger.warning(f"Socket Timeout Exception socket_send: {e!s}") self.sock = None return False except Exception as e: - self.logger.warning( - "General Exception " "socket_send: %s" % str(e) - ) + self.logger.warning(f"General Exception socket_send: {e!s}") self.sock = None return False @@ -533,18 +505,14 @@ def socket_send(self, tx_data): return True def socket_reset(self): - """ - Resets the socket's buffer - """ + """Resets the socket's buffer.""" self.logger.debug("socket_reset: init") _ = self.socket_receive(0) self.logger.debug("socket_reset: successful") return True def socket_receive(self, rx_len): - """ - Generic "Receive data from a socket" - """ + """Generic "Receive data from a socket".""" self.logger.debug("socket_receive: init") # Check if we are connected @@ -564,43 +532,38 @@ def socket_receive(self, rx_len): rx_buf = self.sock.recv(rx_len) except socket.gaierror as e: - self.logger.warning( - "Socket Exception socket_receive: %s" % str(e) - ) + self.logger.warning(f"Socket Exception socket_receive: {e!s}") self.sock = None return False except socket.timeout as e: self.logger.warning( - "Socket Timeout Exception " "socket_receive: %s" % str(e) + f"Socket Timeout Exception socket_receive: {e!s}" ) self.sock = None return False except Exception as e: - self.logger.warning( - "General Exception " "socket_receive: %s" % str(e) - ) + self.logger.warning(f"General Exception socket_receive: {e!s}") self.sock = None return False rx_buf = ( rx_buf.decode(self.aprs_encoding) - if sys.version_info[0] >= 3 else rx_buf + if sys.version_info[0] >= 3 + else rx_buf ) # There will be no data in case we reset the socket if rx_len > 0: - self.logger.debug("Received content: {}".format(rx_buf)) + self.logger.debug(f"Received content: {rx_buf}") self.logger.debug("socket_receive: successful") return rx_buf.rstrip() def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): - """ - Perform APRS Notification - """ + """Perform APRS Notification.""" if not self.targets: # There is no one to notify; we're done @@ -643,11 +606,10 @@ def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): # First remove all characters from the # payload that would break APRS # see https://www.aprs.org/doc/APRS101.PDF pg. 71 - payload = re.sub("[{}|~]+", "", payload) + payload = re.sub(r"[{}|~]+", "", payload) - payload = ( # pragma: no branch - APRS_COMPILED_MAP.sub( - lambda x: APRS_BAD_CHARMAP[x.group()], payload) + payload = APRS_COMPILED_MAP.sub( # pragma: no branch + lambda x: APRS_BAD_CHARMAP[x.group()], payload ) # Finally, constrain output string to 67 characters as @@ -658,21 +620,21 @@ def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): # let's amend our payload respectively payload = payload.rstrip("\r\n") + "\r\n" - self.logger.debug("Payload setup complete: {}".format(payload)) + self.logger.debug(f"Payload setup complete: {payload}") # send the message to our target call sign(s) for index in range(0, len(targets)): # prepare the output string # Format: # Device ID/TOCALL - our call sign - target call sign - body - buffer = "{}>{}::{:9}:{}".format( - self.user, self.device_id, targets[index], payload + buffer = ( + f"{self.user}>{self.device_id}::{targets[index]:9}:{payload}" ) # and send the content to the socket # Note that there will be no response from APRS and # that all exceptions are handled within the 'send' method - self.logger.debug("Sending APRS message: {}".format(buffer)) + self.logger.debug(f"Sending APRS message: {buffer}") # send the content if not self.socket_send(buffer): @@ -687,24 +649,23 @@ def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): self.logger.debug("Closing socket.") self.socket_close() self.logger.info( - "Sent %d/%d APRS-IS notification(s)", index + 1, len(targets)) + "Sent %d/%d APRS-IS notification(s)", index + 1, len(targets) + ) return not has_error def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = {} if self.locale != NotifyAprs.template_args["locale"]["default"]: # Store our locale if not default - params['locale'] = self.locale + params["locale"] = self.locale if self.delay != NotifyAprs.template_args["delay"]["default"]: # Store our locale if not default - params['delay'] = "{:.2f}".format(self.delay) + params["delay"] = f"{self.delay:.2f}" # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) @@ -720,43 +681,40 @@ def url(self, privacy=False, *args, **kwargs): return "{schema}://{auth}{targets}?{params}".format( schema=self.secure_protocol, auth=auth, - targets="/".join(chain( - [self.pprint(x, privacy, safe="") for x in self.targets], - [self.pprint(x, privacy, safe="") - for x in self.invalid_targets], - )), + targets="/".join( + chain( + [self.pprint(x, privacy, safe="") for x in self.targets], + [ + self.pprint(x, privacy, safe="") + for x in self.invalid_targets + ], + ) + ), params=NotifyAprs.urlencode(params), ) @property def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. """ return (self.user, self.password, self.locale) def __len__(self): - """ - Returns the number of targets associated with this notification - """ + """Returns the number of targets associated with this notification.""" targets = len(self.targets) return targets if targets > 0 else 1 def __del__(self): - """ - Ensure we close any lingering connections - """ + """Ensure we close any lingering connections.""" self.socket_close() @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results @@ -769,8 +727,8 @@ def parse_url(url): results["targets"].extend(NotifyAprs.split_path(results["fullpath"])) # Get Delay (if set) - if 'delay' in results['qsd'] and len(results['qsd']['delay']): - results['delay'] = NotifyAprs.unquote(results['qsd']['delay']) + if "delay" in results["qsd"] and len(results["qsd"]["delay"]): + results["delay"] = NotifyAprs.unquote(results["qsd"]["delay"]) # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration diff --git a/libs/apprise/plugins/bark.py b/libs/apprise/plugins/bark.py index 0122598d43..6dad9f3dbf 100644 --- a/libs/apprise/plugins/bark.py +++ b/libs/apprise/plugins/bark.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -29,16 +28,15 @@ # # API: https://github.com/Finb/bark-server/blob/master/docs/API_V2.md#python # -import requests import json -from .base import NotifyBase -from ..url import PrivacyMode -from ..common import NotifyImageSize -from ..common import NotifyType -from ..utils.parse import parse_list, parse_bool -from ..locale import gettext_lazy as _ +import requests +from ..common import NotifyFormat, NotifyImageSize, NotifyType +from ..locale import gettext_lazy as _ +from ..url import PrivacyMode +from ..utils.parse import parse_bool, parse_list +from .base import NotifyBase # Sounds generated off of: https://github.com/Finb/Bark/tree/master/Sounds BARK_SOUNDS = ( @@ -79,16 +77,15 @@ # Supported Level Entries class NotifyBarkLevel: - """ - Defines the Bark Level options - """ - ACTIVE = 'active' + """Defines the Bark Level options.""" + + ACTIVE = "active" - TIME_SENSITIVE = 'timeSensitive' + TIME_SENSITIVE = "timeSensitive" - PASSIVE = 'passive' + PASSIVE = "passive" - CRITICAL = 'critical' + CRITICAL = "critical" BARK_LEVELS = ( @@ -100,24 +97,22 @@ class NotifyBarkLevel: class NotifyBark(NotifyBase): - """ - A wrapper for Notify Bark Notifications - """ + """A wrapper for Notify Bark Notifications.""" # The default descriptive name associated with the Notification - service_name = 'Bark' + service_name = "Bark" # The services URL - service_url = 'https://github.com/Finb/Bark' + service_url = "https://github.com/Finb/Bark" # The default protocol - protocol = 'bark' + protocol = "bark" # The default secure protocol - secure_protocol = 'barks' + secure_protocol = "barks" # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_bark' + setup_url = "https://appriseit.com/services/bark/" # Allows the user to specify the NotifyImageSize object; this is supported # through the webhook @@ -125,111 +120,138 @@ class NotifyBark(NotifyBase): # Define object templates templates = ( - '{schema}://{host}/{targets}', - '{schema}://{host}:{port}/{targets}', - '{schema}://{user}:{password}@{host}/{targets}', - '{schema}://{user}:{password}@{host}:{port}/{targets}', + "{schema}://{host}/{targets}", + "{schema}://{host}:{port}/{targets}", + "{schema}://{user}:{password}@{host}/{targets}", + "{schema}://{user}:{password}@{host}:{port}/{targets}", ) # Define our template arguments - template_tokens = dict(NotifyBase.template_tokens, **{ - 'host': { - 'name': _('Hostname'), - 'type': 'string', - 'required': True, + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "host": { + "name": _("Hostname"), + "type": "string", + "required": True, + }, + "port": { + "name": _("Port"), + "type": "int", + "min": 1, + "max": 65535, + }, + "user": { + "name": _("Username"), + "type": "string", + }, + "password": { + "name": _("Password"), + "type": "string", + "private": True, + }, + "target_device": { + "name": _("Target Device"), + "type": "string", + "map_to": "targets", + }, + "targets": { + "name": _("Targets"), + "type": "list:string", + "required": True, + }, }, - 'port': { - 'name': _('Port'), - 'type': 'int', - 'min': 1, - 'max': 65535, - }, - 'user': { - 'name': _('Username'), - 'type': 'string', - }, - 'password': { - 'name': _('Password'), - 'type': 'string', - 'private': True, - }, - 'target_device': { - 'name': _('Target Device'), - 'type': 'string', - 'map_to': 'targets', - }, - 'targets': { - 'name': _('Targets'), - 'type': 'list:string', - 'required': True, - }, - }) + ) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'to': { - 'alias_of': 'targets', - }, - 'sound': { - 'name': _('Sound'), - 'type': 'choice:string', - 'values': BARK_SOUNDS, - }, - 'level': { - 'name': _('Level'), - 'type': 'choice:string', - 'values': BARK_LEVELS, + template_args = dict( + NotifyBase.template_args, + **{ + "to": { + "alias_of": "targets", + }, + "sound": { + "name": _("Sound"), + "type": "choice:string", + "values": BARK_SOUNDS, + }, + "level": { + "name": _("Level"), + "type": "choice:string", + "values": BARK_LEVELS, + }, + "volume": { + "name": _("Volume"), + "type": "int", + "min": 0, + "max": 10, + }, + "click": { + "name": _("Click"), + "type": "string", + }, + "badge": { + "name": _("Badge"), + "type": "int", + "min": 0, + }, + "category": { + "name": _("Category"), + "type": "string", + }, + "group": { + "name": _("Group"), + "type": "string", + }, + "image": { + "name": _("Include Image"), + "type": "bool", + "default": True, + "map_to": "include_image", + }, + "icon": { + "name": _("Icon URL"), + "type": "string", + }, + "call": { + "name": _("Call"), + "type": "bool", + "default": False, + }, }, - 'volume': { - 'name': _('Volume'), - 'type': 'int', - 'min': 0, - 'max': 10, - }, - 'click': { - 'name': _('Click'), - 'type': 'string', - }, - 'badge': { - 'name': _('Badge'), - 'type': 'int', - 'min': 0, - }, - 'category': { - 'name': _('Category'), - 'type': 'string', - }, - 'group': { - 'name': _('Group'), - 'type': 'string', - }, - 'image': { - 'name': _('Include Image'), - 'type': 'bool', - 'default': True, - 'map_to': 'include_image', - }, - }) + ) - def __init__(self, targets=None, include_image=True, sound=None, - category=None, group=None, level=None, click=None, - badge=None, volume=None, **kwargs): - """ - Initialize Notify Bark Object - """ + def __init__( + self, + targets=None, + include_image=True, + sound=None, + category=None, + group=None, + level=None, + click=None, + badge=None, + volume=None, + icon=None, + call=None, + **kwargs, + ): + """Initialize Notify Bark Object.""" super().__init__(**kwargs) # Prepare our URL - self.notify_url = '%s://%s%s/push' % ( - 'https' if self.secure else 'http', + self.notify_url = "{}://{}{}/push".format( + "https" if self.secure else "http", self.host, - ':{}'.format(self.port) - if (self.port and isinstance(self.port, int)) else '', + ( + f":{self.port}" + if (self.port and isinstance(self.port, int)) + else "" + ), ) # Assign our category - self.category = \ - category if isinstance(category, str) else None + self.category = category if isinstance(category, str) else None # Assign our group self.group = group if isinstance(group, str) else None @@ -259,14 +281,21 @@ def __init__(self, targets=None, include_image=True, sound=None, except ValueError: self.badge = None self.logger.warning( - 'The specified Bark badge ({}) is not valid ', badge) + "The specified Bark badge ({}) is not valid ", badge + ) # Sound (easy-lookup) - self.sound = None if not sound else next( - (f for f in BARK_SOUNDS if f.startswith(sound.lower())), None) + self.sound = ( + None + if not sound + else next( + (f for f in BARK_SOUNDS if f.startswith(sound.lower())), None + ) + ) if sound and not self.sound: self.logger.warning( - 'The specified Bark sound ({}) was not found ', sound) + "The specified Bark sound ({}) was not found ", sound + ) # Volume self.volume = None @@ -278,40 +307,51 @@ def __init__(self, targets=None, include_image=True, sound=None, except (TypeError, ValueError): self.logger.warning( - 'The specified Bark volume ({}) is not valid. ' - 'Must be between 0 and 10', volume) + "The specified Bark volume ({}) is not valid. " + "Must be between 0 and 10", + volume, + ) + + # Call + self.call = parse_bool(call) + + # Icon URL + self.icon = icon if isinstance(icon, str) else None # Level - self.level = None if not level else next( - (f for f in BARK_LEVELS if f[0] == level[0]), None) + self.level = ( + None + if not level + else next((f for f in BARK_LEVELS if f[0] == level[0]), None) + ) if level and not self.level: self.logger.warning( - 'The specified Bark level ({}) is not valid ', level) + "The specified Bark level ({}) is not valid ", level + ) return - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): - """ - Perform Bark Notification - """ + def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): + """Perform Bark Notification.""" # error tracking (used for function return) has_error = False if not self.targets: # We have nothing to notify; we're done - self.logger.warning('There are no Bark devices to notify') + self.logger.warning("There are no Bark devices to notify") return False # Prepare our headers headers = { - 'User-Agent': self.app_id, - 'Content-Type': 'application/json; charset=utf-8', + "User-Agent": self.app_id, + "Content-Type": "application/json; charset=utf-8", } # Prepare our payload (sample below) # { # "body": "Test Bark Server", + # "markdown": "# Markdown Content", # "device_key": "nysrshcqielvoxsa", # "title": "bleem", # "category": "category", @@ -319,40 +359,54 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): # "badge": 1, # "icon": "https://day.app/assets/images/avatar.jpg", # "group": "test", + # "level": "active", + # "volume": 5, + # "call": 1, # "url": "https://mritd.com" # } payload = { - 'title': title if title else self.app_desc, - 'body': body, + "title": title if title else self.app_desc, } + if self.notify_format == NotifyFormat.MARKDOWN: + payload["markdown"] = body + else: + payload["body"] = body + # Acquire our image url if configured to do so - image_url = None if not self.include_image else \ - self.image_url(notify_type) + image_url = ( + None if not self.include_image else self.image_url(notify_type) + ) - if image_url: - payload['icon'] = image_url + # Use custom icon if provided, otherwise use default image + if self.icon: + payload["icon"] = self.icon + elif image_url: + payload["icon"] = image_url if self.sound: - payload['sound'] = self.sound + payload["sound"] = self.sound if self.click: - payload['url'] = self.click + payload["url"] = self.click if self.badge: - payload['badge'] = self.badge + payload["badge"] = self.badge if self.level: - payload['level'] = self.level + payload["level"] = self.level if self.category: - payload['category'] = self.category + payload["category"] = self.category if self.group: - payload['group'] = self.group + payload["group"] = self.group if self.volume: - payload['volume'] = self.volume + payload["volume"] = self.volume + + if self.call: + payload["call"] = 1 auth = None if self.user: @@ -365,11 +419,12 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): # Retrieve our device key target = targets.pop() - payload['device_key'] = target - self.logger.debug('Bark POST URL: %s (cert_verify=%r)' % ( - self.notify_url, self.verify_certificate, - )) - self.logger.debug('Bark Payload: %s' % str(payload)) + payload["device_key"] = target + self.logger.debug( + "Bark POST URL:" + f" {self.notify_url} (cert_verify={self.verify_certificate!r})" + ) + self.logger.debug(f"Bark Payload: {payload!s}") # Always call throttle before any remote server i/o is made self.throttle() @@ -384,34 +439,36 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): ) if r.status_code != requests.codes.ok: # We had a problem - status_str = \ - NotifyBark.http_response_code_lookup( - r.status_code) + status_str = NotifyBark.http_response_code_lookup( + r.status_code + ) self.logger.warning( - 'Failed to send Bark notification to {}: ' - '{}{}error={}.'.format( + "Failed to send Bark notification to {}: " + "{}{}error={}.".format( target, status_str, - ', ' if status_str else '', - r.status_code)) + ", " if status_str else "", + r.status_code, + ) + ) self.logger.debug( - 'Response Details:\r\n{}'.format(r.content)) + "Response Details:\r\n%r", (r.content or b"")[:2000]) # Mark our failure has_error = True continue else: - self.logger.info( - 'Sent Bark notification to {}.'.format(target)) + self.logger.info(f"Sent Bark notification to {target}.") except requests.RequestException as e: self.logger.warning( - 'A Connection error occurred sending Bark ' - 'notification to {}.'.format(target)) - self.logger.debug('Socket Exception: %s' % str(e)) + "A Connection error occurred sending Bark " + f"notification to {target}." + ) + self.logger.debug(f"Socket Exception: {e!s}") # Mark our failure has_error = True @@ -421,90 +478,94 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): @property def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, - self.user, self.password, self.host, self.port, + self.user, + self.password, + self.host, + self.port, ) def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = { - 'image': 'yes' if self.include_image else 'no', + "image": "yes" if self.include_image else "no", } if self.sound: - params['sound'] = self.sound + params["sound"] = self.sound if self.click: - params['click'] = self.click + params["click"] = self.click if self.badge: - params['badge'] = str(self.badge) + params["badge"] = str(self.badge) if self.level: - params['level'] = self.level + params["level"] = self.level if self.volume: - params['volume'] = str(self.volume) + params["volume"] = str(self.volume) if self.category: - params['category'] = self.category + params["category"] = self.category if self.group: - params['group'] = self.group + params["group"] = self.group + + if self.icon: + params["icon"] = self.icon + + if self.call: + params["call"] = "yes" # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Determine Authentication - auth = '' + auth = "" if self.user and self.password: - auth = '{user}:{password}@'.format( - user=NotifyBark.quote(self.user, safe=''), + auth = "{user}:{password}@".format( + user=NotifyBark.quote(self.user, safe=""), password=self.pprint( - self.password, privacy, mode=PrivacyMode.Secret, safe=''), + self.password, privacy, mode=PrivacyMode.Secret, safe="" + ), ) elif self.user: - auth = '{user}@'.format( - user=NotifyBark.quote(self.user, safe=''), + auth = "{user}@".format( + user=NotifyBark.quote(self.user, safe=""), ) default_port = 443 if self.secure else 80 - - return '{schema}://{auth}{hostname}{port}/{targets}?{params}'.format( + return "{schema}://{auth}{hostname}{port}/{targets}?{params}".format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, - port='' if self.port is None or self.port == default_port - else ':{}'.format(self.port), - targets='/'.join( - [NotifyBark.quote('{}'.format(x)) for x in self.targets]), + port=( + "" + if self.port is None or self.port == default_port + else f":{self.port}" + ), + targets="/".join([NotifyBark.quote(f"{x}") for x in self.targets]), params=NotifyBark.urlencode(params), ) def __len__(self): - """ - Returns the number of targets associated with this notification - """ + """Returns the number of targets associated with this notification.""" return len(self.targets) @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = NotifyBase.parse_url(url) if not results: @@ -512,50 +573,68 @@ def parse_url(url): return results # Apply our targets - results['targets'] = NotifyBark.split_path(results['fullpath']) + results["targets"] = NotifyBark.split_path(results["fullpath"]) # Category - if 'category' in results['qsd'] and results['qsd']['category']: - results['category'] = NotifyBark.unquote( - results['qsd']['category'].strip()) + if "category" in results["qsd"] and results["qsd"]["category"]: + results["category"] = NotifyBark.unquote( + results["qsd"]["category"].strip() + ) # Group - if 'group' in results['qsd'] and results['qsd']['group']: - results['group'] = NotifyBark.unquote( - results['qsd']['group'].strip()) + if "group" in results["qsd"] and results["qsd"]["group"]: + results["group"] = NotifyBark.unquote( + results["qsd"]["group"].strip() + ) # Badge - if 'badge' in results['qsd'] and results['qsd']['badge']: - results['badge'] = NotifyBark.unquote( - results['qsd']['badge'].strip()) + if "badge" in results["qsd"] and results["qsd"]["badge"]: + results["badge"] = NotifyBark.unquote( + results["qsd"]["badge"].strip() + ) # Volume - if 'volume' in results['qsd'] and results['qsd']['volume']: - results['volume'] = NotifyBark.unquote( - results['qsd']['volume'].strip()) + if "volume" in results["qsd"] and results["qsd"]["volume"]: + results["volume"] = NotifyBark.unquote( + results["qsd"]["volume"].strip() + ) # Level - if 'level' in results['qsd'] and results['qsd']['level']: - results['level'] = NotifyBark.unquote( - results['qsd']['level'].strip()) + if "level" in results["qsd"] and results["qsd"]["level"]: + results["level"] = NotifyBark.unquote( + results["qsd"]["level"].strip() + ) # Click (URL) - if 'click' in results['qsd'] and results['qsd']['click']: - results['click'] = NotifyBark.unquote( - results['qsd']['click'].strip()) + if "click" in results["qsd"] and results["qsd"]["click"]: + results["click"] = NotifyBark.unquote( + results["qsd"]["click"].strip() + ) # Sound - if 'sound' in results['qsd'] and results['qsd']['sound']: - results['sound'] = NotifyBark.unquote( - results['qsd']['sound'].strip()) + if "sound" in results["qsd"] and results["qsd"]["sound"]: + results["sound"] = NotifyBark.unquote( + results["qsd"]["sound"].strip() + ) # The 'to' makes it easier to use yaml configuration - if 'to' in results['qsd'] and len(results['qsd']['to']): - results['targets'] += \ - NotifyBark.parse_list(results['qsd']['to']) + if "to" in results["qsd"] and len(results["qsd"]["to"]): + results["targets"] += NotifyBark.parse_list(results["qsd"]["to"]) # use image= for consistency with the other plugins - results['include_image'] = \ - parse_bool(results['qsd'].get('image', True)) + results["include_image"] = parse_bool( + results["qsd"].get("image", True) + ) + + # Icon URL + if "icon" in results["qsd"] and results["qsd"]["icon"]: + results["icon"] = NotifyBark.unquote( + results["qsd"]["icon"].strip() + ) + + # Call + results["call"] = parse_bool( + results["qsd"].get("call", False) + ) return results diff --git a/libs/apprise/plugins/base.py b/libs/apprise/plugins/base.py index a50cdf0713..444728bd08 100644 --- a/libs/apprise/plugins/base.py +++ b/libs/apprise/plugins/base.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -27,27 +26,41 @@ # POSSIBILITY OF SUCH DAMAGE. import asyncio -import re +from collections.abc import Generator +from datetime import tzinfo from functools import partial +import re +from typing import Any, ClassVar, Optional, TypedDict, Union +from zoneinfo import ZoneInfo +from ..apprise_attachment import AppriseAttachment +from ..common import ( + NOTIFY_FORMATS, + OVERFLOW_MODES, + NotifyFormat, + NotifyImageSize, + NotifyType, + OverflowMode, + PersistentStoreMode, +) +from ..locale import Translatable, gettext_lazy as _ +from ..persistent_store import PersistentStore from ..url import URLBase -from ..common import NotifyType +from ..utils.format import smart_split from ..utils.parse import parse_bool -from ..common import NOTIFY_TYPES -from ..common import NotifyFormat -from ..common import NOTIFY_FORMATS -from ..common import OverflowMode -from ..common import OVERFLOW_MODES -from ..common import PersistentStoreMode -from ..locale import gettext_lazy as _ -from ..persistent_store import PersistentStore -from ..apprise_attachment import AppriseAttachment +from ..utils.time import zoneinfo + + +class RequirementsSpec(TypedDict, total=False): + """Defines our plugin requirements.""" + + packages_required: Optional[Union[str, list[str]]] + packages_recommended: Optional[Union[str, list[str]]] + details: Optional[Translatable] class NotifyBase(URLBase): - """ - This is the base class for all notification services - """ + """This is the base class for all notification services.""" # An internal flag used to test the state of the plugin. If set to # False, then the plugin is not used. Plugins can disable themselves @@ -63,16 +76,16 @@ class NotifyBase(URLBase): # native: Is a native plugin written/stored in `apprise/plugins/Notify*` # custom: Is a custom plugin written/stored in a users plugin directory # that they loaded at execution time. - category = 'native' + category = "native" # Some plugins may require additional packages above what is provided # already by Apprise. # # Use this section to relay this information to the users of the script to # help guide them with what they need to know if they plan on using your - # plugin. The below configuration should otherwise accomodate all normal + # plugin. The below configuration should otherwise accommodate all normal # situations and will not requrie any updating: - requirements = { + requirements: ClassVar[RequirementsSpec] = { # Use the description to provide a human interpretable description of # what is required to make the plugin work. This is only nessisary # if there are package dependencies. Setting this to default will @@ -83,8 +96,7 @@ class NotifyBase(URLBase): # from apprise.AppriseLocale import gettext_lazy as _ # # 'details': _('My detailed requirements') - 'details': None, - + "details": None, # Define any required packages needed for the plugin to run. This is # an array of strings that simply look like lines residing in a # `requirements.txt` file... @@ -93,17 +105,15 @@ class NotifyBase(URLBase): # 'packages_required': [ # 'cryptography < 3.4`, # ] - 'packages_required': [], - + "packages_required": [], # Recommended packages identify packages that are not required to make # your plugin work, but would improve it's use or grant it access to # full functionality (that might otherwise be limited). - # Similar to `packages_required`, you would identify each entry in # the array as you would in a `requirements.txt` file. # # - Do not re-provide entries already in the `packages_required` - 'packages_recommended': [], + "packages_recommended": [], } # The services URL @@ -135,6 +145,10 @@ class NotifyBase(URLBase): # Persistent storage default settings persistent_storage = True + # Timezone Default; by setting it to None, the timezone detected + # on the server is used + timezone = None + # Default Notify Format notify_format = NotifyFormat.TEXT @@ -165,58 +179,71 @@ class NotifyBase(URLBase): # titles, by default apprise tries to give a plesant view and convert the # title so that it can be placed into the body. The default is to just # use a tag. The below causes the title to get generated: - default_html_tag_id = 'b' + default_html_tag_id = "b" # Here is where we define all of the arguments we accept on the url # such as: schema://whatever/?overflow=upstream&format=text # These act the same way as tokens except they are optional and/or # have default values set if mandatory. This rule must be followed - template_args = dict(URLBase.template_args, **{ - 'overflow': { - 'name': _('Overflow Mode'), - 'type': 'choice:string', - 'values': OVERFLOW_MODES, - # Provide a default - 'default': overflow_mode, - # look up default using the following parent class value at - # runtime. The variable name identified here (in this case - # overflow_mode) is checked and it's result is placed over-top of - # the 'default'. This is done because once a parent class inherits - # this one, the overflow_mode already set as a default 'could' be - # potentially over-ridden and changed to a different value. - '_lookup_default': 'overflow_mode', + template_args = dict( + URLBase.template_args, + **{ + "overflow": { + "name": _("Overflow Mode"), + "type": "choice:string", + "values": OVERFLOW_MODES, + # Provide a default + "default": overflow_mode, + # look up default using the following parent class value at + # runtime. The variable name identified here (in this case + # overflow_mode) is checked and it's result is placed over-top + # of the 'default'. This is done because once a parent class + # inherits this one, the overflow_mode already set as a default + # 'could' be potentially over-ridden and changed to a different + # value. + "_lookup_default": "overflow_mode", + }, + "format": { + "name": _("Notify Format"), + "type": "choice:string", + "values": NOTIFY_FORMATS, + # Provide a default + "default": notify_format, + # look up default using the following parent class value at + # runtime. + "_lookup_default": "notify_format", + }, + "emojis": { + "name": _("Interpret Emojis"), + # SSL Certificate Authority Verification + "type": "bool", + # Provide a default + "default": interpret_emojis, + # look up default using the following parent class value at + # runtime. + "_lookup_default": "interpret_emojis", + }, + "store": { + "name": _("Persistent Storage"), + # Use Persistent Storage + "type": "bool", + # Provide a default + "default": persistent_storage, + # look up default using the following parent class value at + # runtime. + "_lookup_default": "persistent_storage", + }, + "tz": { + "name": _("Timezone"), + "type": "string", + # Provide a default + "default": timezone, + # look up default using the following parent class value at + # runtime. + "_lookup_default": "timezone", + }, }, - 'format': { - 'name': _('Notify Format'), - 'type': 'choice:string', - 'values': NOTIFY_FORMATS, - # Provide a default - 'default': notify_format, - # look up default using the following parent class value at - # runtime. - '_lookup_default': 'notify_format', - }, - 'emojis': { - 'name': _('Interpret Emojis'), - # SSL Certificate Authority Verification - 'type': 'bool', - # Provide a default - 'default': interpret_emojis, - # look up default using the following parent class value at - # runtime. - '_lookup_default': 'interpret_emojis', - }, - 'store': { - 'name': _('Persistent Storage'), - # Use Persistent Storage - 'type': 'bool', - # Provide a default - 'default': persistent_storage, - # look up default using the following parent class value at - # runtime. - '_lookup_default': 'persistent_storage', - }, - }) + ) # # Overflow Defaults / Configuration applicable to SPLIT mode only @@ -266,12 +293,16 @@ class NotifyBase(URLBase): # restrictions and that of body_maxlen overflow_amalgamate_title = False - def __init__(self, **kwargs): - """ - Initialize some general configuration that will keep things consistent - when working with the notifiers that will inherit this class. + # Identifies the timezone to use; if this is not over-ridden, then the + # timezone defined in the AppriseAsset() object is used instead. The + # Below is expected to be in a ZoneInfo type already. You can have this + # automatically initialized by specifying ?tz= on the Apprise URLs + __tzinfo = None - """ + def __init__(self, **kwargs): + """Initialize some general configuration that will keep things + consistent when working with the notifiers that will inherit this + class.""" super().__init__(**kwargs) @@ -292,123 +323,138 @@ def __init__(self, **kwargs): # Take a default self.interpret_emojis = self.asset.interpret_emojis - if 'emojis' in kwargs: + if "emojis" in kwargs: # possibly over-ride default - self.interpret_emojis = True if self.interpret_emojis \ - in (None, True) and \ - parse_bool( - kwargs.get('emojis', False), - default=NotifyBase.template_args['emojis']['default']) \ - else False - - if 'format' in kwargs: - # Store the specified format if specified - notify_format = kwargs.get('format', '') - if notify_format.lower() not in NOTIFY_FORMATS: - msg = 'Invalid notification format {}'.format(notify_format) - self.logger.error(msg) - raise TypeError(msg) - - # Provide override - self.notify_format = notify_format - - if 'overflow' in kwargs: - # Store the specified format if specified - overflow = kwargs.get('overflow', '') - if overflow.lower() not in OVERFLOW_MODES: - msg = 'Invalid overflow method {}'.format(overflow) - self.logger.error(msg) - raise TypeError(msg) - - # Provide override - self.overflow_mode = overflow + self.interpret_emojis = bool( + self.interpret_emojis in (None, True) + and parse_bool( + kwargs.get("emojis", False), + default=NotifyBase.template_args["emojis"]["default"], + ) + ) + + if "format" in kwargs: + value = kwargs["format"] + try: + self.notify_format = ( + value if isinstance(value, NotifyFormat) + else NotifyFormat(value.lower()) + ) + + except (AttributeError, ValueError): + err = ( + f"An invalid notification format ({value}) was " + "specified.") + self.logger.warning(err) + raise TypeError(err) from None + + if "tz" in kwargs: + value = kwargs["tz"] + self.__tzinfo = zoneinfo(value) + if not self.__tzinfo: + err = ( + f"An invalid notification timezone ({value}) was " + "specified.") + self.logger.warning(err) + raise TypeError(err) from None + + if "overflow" in kwargs: + value = kwargs["overflow"] + try: + self.overflow_mode = ( + value if isinstance(value, OverflowMode) + else OverflowMode(value.lower()) + ) + + except (AttributeError, ValueError): + err = f"An invalid overflow method ({value}) was specified." + self.logger.warning(err) + raise TypeError(err) from None # Prepare our Persistent Storage switch self.persistent_storage = parse_bool( - kwargs.get('store', NotifyBase.persistent_storage)) + kwargs.get("store", NotifyBase.persistent_storage) + ) if not self.persistent_storage: # Enforce the disabling of cache (ortherwise defaults are use) self.url_identifier = False self.__cached_url_identifier = None - def image_url(self, notify_type, logo=False, extension=None, - image_size=None): - """ - Returns Image URL if possible - """ - - if not self.image_size: - return None - - if notify_type not in NOTIFY_TYPES: + def image_url( + self, + notify_type: NotifyType, + image_size: Optional[NotifyImageSize] = None, + logo: bool = False, + extension: Optional[str] = None, + ) -> Optional[str]: + """Returns Image URL if possible.""" + + image_size = self.image_size if image_size is None else image_size + if not image_size: return None return self.asset.image_url( notify_type=notify_type, - image_size=self.image_size if image_size is None else image_size, + image_size=image_size, logo=logo, extension=extension, ) - def image_path(self, notify_type, extension=None): - """ - Returns the path of the image if it can - """ + def image_path( + self, + notify_type: NotifyType, + extension: Optional[str] = None, + ) -> Optional[str]: + """Returns the path of the image if it can.""" if not self.image_size: return None - if notify_type not in NOTIFY_TYPES: - return None - return self.asset.image_path( notify_type=notify_type, image_size=self.image_size, extension=extension, ) - def image_raw(self, notify_type, extension=None): - """ - Returns the raw image if it can - """ + def image_raw( + self, + notify_type: NotifyType, + extension: Optional[str] = None, + ) -> Optional[bytes]: + """Returns the raw image if it can.""" if not self.image_size: return None - if notify_type not in NOTIFY_TYPES: - return None - return self.asset.image_raw( notify_type=notify_type, image_size=self.image_size, extension=extension, ) - def color(self, notify_type, color_type=None): - """ - Returns the html color (hex code) associated with the notify_type - """ - if notify_type not in NOTIFY_TYPES: - return None + def color( + self, + notify_type: NotifyType, + color_type: Optional[type] = None, + ) -> Union[str, int, tuple[int, int, int]]: + """Returns the html color (hex code) associated with the + notify_type.""" return self.asset.color( notify_type=notify_type, color_type=color_type, ) - def ascii(self, notify_type): - """ - Returns the ascii characters associated with the notify_type - """ - if notify_type not in NOTIFY_TYPES: - return None + def ascii( + self, + notify_type: NotifyType, + ) -> str: + """Returns the ascii characters associated with the notify_type.""" return self.asset.ascii( notify_type=notify_type, ) - def notify(self, *args, **kwargs): - """ - Performs notification - """ + def notify(self, *args: Any, **kwargs: Any) -> bool: + """Performs notification.""" try: # Build a list of dictionaries that can be used to call send(). send_calls = list(self._build_send_calls(*args, **kwargs)) @@ -423,10 +469,8 @@ def notify(self, *args, **kwargs): the_calls = [self.send(**kwargs2) for kwargs2 in send_calls] return all(the_calls) - async def async_notify(self, *args, **kwargs): - """ - Performs notification for asynchronous callers - """ + async def async_notify(self, *args: Any, **kwargs: Any) -> bool: + """Performs notification for asynchronous callers.""" try: # Build a list of dictionaries that can be used to call send(). send_calls = list(self._build_send_calls(*args, **kwargs)) @@ -450,13 +494,18 @@ async def do_send(**kwargs2): the_cors = (do_send(**kwargs2) for kwargs2 in send_calls) return all(await asyncio.gather(*the_cors)) - def _build_send_calls(self, body=None, title=None, - notify_type=NotifyType.INFO, overflow=None, - attach=None, body_format=None, **kwargs): - """ - Get a list of dictionaries that can be used to call send() or - (in the future) async_send(). - """ + def _build_send_calls( + self, + body: Optional[str] = None, + title: Optional[str] = None, + notify_type: NotifyType = NotifyType.INFO, + overflow: Optional[Union[str, OverflowMode]] = None, + attach: Optional[Union[list[str], AppriseAttachment]] = None, + body_format: Optional[NotifyFormat] = None, + **kwargs: Any, + ) -> Generator[dict[str, Any], None, None]: + """Get a list of dictionaries that can be used to call send() or (in + the future) async_send().""" if not self.enabled: # Deny notifications issued to services that are disabled @@ -474,7 +523,7 @@ def _build_send_calls(self, body=None, title=None, raise # Handle situations where the body is None - body = '' if not body else body + body = body if body else "" elif not (body or attach): # If there is not an attachment at the very least, a body must be @@ -490,13 +539,15 @@ def _build_send_calls(self, body=None, title=None, # Knowing this, if the plugin itself doesn't support sending # attachments, there is nothing further to do here, just move # along. - msg = f"{self.service_name} does not support attachments; " \ + msg = ( + f"{self.service_name} does not support attachments; " " service skipped" + ) self.logger.warning(msg) raise TypeError(msg) # Handle situations where the title is None - title = '' if not title else title + title = title if title else "" # Truncate flag set with attachments ensures that only 1 # attachment passes through. In the event there could be many @@ -506,26 +557,35 @@ def _build_send_calls(self, body=None, title=None, overflow = self.overflow_mode if overflow is None else overflow if attach and len(attach) > 1 and overflow == OverflowMode.TRUNCATE: # Save first attachment - _attach = AppriseAttachment(attach[0], asset=self.asset) + attach_ = AppriseAttachment(attach[0], asset=self.asset) else: # reference same attachment - _attach = attach + attach_ = attach # Apply our overflow (if defined) for chunk in self._apply_overflow( - body=body, title=title, overflow=overflow, - body_format=body_format): + body=body, title=title, overflow=overflow, body_format=body_format + ): # Send notification - yield dict( - body=chunk['body'], title=chunk['title'], - notify_type=notify_type, attach=_attach, - body_format=body_format - ) - - def _apply_overflow(self, body, title=None, overflow=None, - body_format=None): + yield { + "body": chunk["body"], + "title": chunk["title"], + "notify_type": notify_type, + "attach": attach_, + "body_format": body_format, + } + + def _apply_overflow( + self, + body: Optional[str], + title: Optional[str] = None, + overflow: Optional[Union[str, OverflowMode]] = None, + body_format: Optional[NotifyFormat] = None, + ) -> list[dict[str, str]]: """ + Apply overflow behaviour (UPSTREAM, TRUNCATE, SPLIT) to title/body. + Takes the message body and title as input. This function then applies any defined overflow restrictions associated with the notification service and may alter the message if/as required. @@ -541,224 +601,288 @@ def _apply_overflow(self, body, title=None, overflow=None, title: 'the title goes here', body: 'the continued message body goes here', }, - ] """ + response: list[dict[str, str]] = [] - response = list() - - # tidy - title = '' if not title else title.strip() - body = '' if not body else body.rstrip() + # Tidy + title = "" if not title else title.strip() + body = "" if not body else body.rstrip() + # Default overflow mode if overflow is None: - # default overflow = self.overflow_mode + # Default effective body format + if body_format is None: + body_format = self.notify_format + + # If the service does not support a title, amalgamate into body if self.title_maxlen <= 0 and len(title) > 0: if self.notify_format == NotifyFormat.HTML: - # Content is appended to body as html - body = '<{open_tag}>{title}' \ - '
\r\n{body}'.format( - open_tag=self.default_html_tag_id, - title=title, - close_tag=self.default_html_tag_id, - body=body) - - elif self.notify_format == NotifyFormat.MARKDOWN and \ - body_format == NotifyFormat.TEXT: + body = ( + f"<{self.default_html_tag_id}>{title}" + f"" + f"
\r\n{body}" + ) + + elif ( + self.notify_format == NotifyFormat.MARKDOWN + and body_format == NotifyFormat.TEXT + ): # Content is appended to body as markdown - title = title.lstrip('\r\n \t\v\f#-') + title = title.lstrip("\r\n \t\v\f#-") if title: - # Content is appended to body as text - body = '# {}\r\n{}'.format(title, body) + body = f"# {title}\r\n{body}" else: - # Content is appended to body as text - body = '{}\r\n{}'.format(title, body) + body = f"{title}\r\n{body}" - title = '' + title = "" - # Enforce the line count first always + # Enforce line count if self.body_max_line_count > 0: - # Limit results to just the first 2 line otherwise - # there is just to much content to display - body = re.split(r'\r*\n', body) - body = '\r\n'.join(body[0:self.body_max_line_count]) + lines = re.split(r"\r*\n", body) + body = "\r\n".join(lines[0 : self.body_max_line_count]) + # UPSTREAM mode: do not touch content if overflow == OverflowMode.UPSTREAM: - # Nothing more to do - response.append({'body': body, 'title': title}) + response.append({"body": body, "title": title}) return response - # a value of '2' allows for the \r\n that is applied when - # amalgamating the title - overflow_buffer = max(2, self.overflow_buffer) \ - if (self.title_maxlen == 0 and len(title)) \ + # a value of '2' allows for the \r\n that is applied when amalgamating + overflow_buffer = ( + max(2, self.overflow_buffer) + if (self.title_maxlen == 0 and len(title)) else self.overflow_buffer + ) # - # If we reach here in our code, then we're using TRUNCATE, or SPLIT - # actions which require some math to handle the data + # TRUNCATE and SPLIT require sizing logic # - # Handle situations where our body and title are amalamated into one - # calculation - title_maxlen = self.title_maxlen \ - if not self.overflow_amalgamate_title \ - else min(len(title) + self.overflow_max_display_count_width, - self.title_maxlen, self.body_maxlen) + # Handle situations where body and title are amalgamated + title_maxlen = ( + self.title_maxlen + if not self.overflow_amalgamate_title + else min( + len(title) + self.overflow_max_display_count_width, + self.title_maxlen, + self.body_maxlen, + ) + ) if len(title) > title_maxlen: - # Truncate our Title + # Truncate our title title = title[:title_maxlen].rstrip() - if self.overflow_amalgamate_title and ( - self.body_maxlen - overflow_buffer) >= title_maxlen: - body_maxlen = (self.body_maxlen if not title else ( - self.body_maxlen - title_maxlen)) - overflow_buffer - else: + # Compute body_maxlen as per legacy logic + if ( + self.overflow_amalgamate_title + and (self.body_maxlen - overflow_buffer) >= title_maxlen + ): # status quo - body_maxlen = self.body_maxlen \ - if not self.overflow_amalgamate_title else \ - (self.body_maxlen - overflow_buffer) + body_maxlen = ( + self.body_maxlen + if not title + else (self.body_maxlen - title_maxlen) + ) - overflow_buffer + else: + # If the body fits, we're done + body_maxlen = ( + self.body_maxlen + if not self.overflow_amalgamate_title + else (self.body_maxlen - overflow_buffer) + ) + # If the body fits, we are done if body_maxlen > 0 and len(body) <= body_maxlen: - response.append({'body': body, 'title': title}) + response.append({"body": body, "title": title}) return response + # TRUNCATE mode: hard truncation (no smart-splitting) if overflow == OverflowMode.TRUNCATE: - # Truncate our body and return response.append({ - 'body': body[:body_maxlen].lstrip('\r\n\x0b\x0c').rstrip(), - 'title': title, + "body": body[:body_maxlen].lstrip("\r\n\x0b\x0c").rstrip(), + "title": title, }) - # For truncate mode, we're done now return response + # + # SPLIT mode + # + + # Detect if we only display our title once or not (legacy logic) if self.overflow_display_title_once is None: # Detect if we only display our title once or not: - overflow_display_title_once = \ - True if self.overflow_amalgamate_title and \ - body_maxlen < self.overflow_display_count_threshold \ - else False + overflow_display_title_once = bool( + self.overflow_amalgamate_title + and body_maxlen < self.overflow_display_count_threshold + ) else: - # Take on defined value - overflow_display_title_once = self.overflow_display_title_once - # If we reach here, then we are in SPLIT mode. - # For here, we want to split the message as many times as we have to - # in order to fit it within the designated limits. + # SPLIT mode with repeated title (with/without counter) if not overflow_display_title_once and not ( - # edge case that can occur when overflow_display_title_once is - # forced off, but no body exists - self.overflow_amalgamate_title and body_maxlen <= 0): - - show_counter = title and len(body) > body_maxlen and \ - ((self.overflow_amalgamate_title and - body_maxlen >= self.overflow_display_count_threshold) or - (not self.overflow_amalgamate_title and - title_maxlen > self.overflow_display_count_threshold)) and ( - title_maxlen > (self.overflow_max_display_count_width + - overflow_buffer) and - self.title_maxlen >= self.overflow_display_count_threshold) - - count = 0 - template = '' - if show_counter: - # introduce padding - body_maxlen -= overflow_buffer + # edge case: amalgamated title but no body space + self.overflow_amalgamate_title + and body_maxlen <= 0 + ): + # Decide whether to show a counter (legacy condition) + show_counter = ( + title + and len(body) > body_maxlen + and ( + ( + self.overflow_amalgamate_title + and body_maxlen >= + self.overflow_display_count_threshold + ) + or ( + not self.overflow_amalgamate_title + and title_maxlen > + self.overflow_display_count_threshold + ) + ) + and ( + title_maxlen > + (self.overflow_max_display_count_width + overflow_buffer) + and self.title_maxlen >= + self.overflow_display_count_threshold + ) + ) - count = int(len(body) / body_maxlen) \ - + (1 if len(body) % body_maxlen else 0) + effective_body_maxlen = body_maxlen + if show_counter: + # introduce padding for the counter + effective_body_maxlen -= overflow_buffer + + # Use smart splitting instead of naive slicing + chunks = smart_split( + body, + effective_body_maxlen, + body_format, + ) + count = len(chunks) - # Detect padding and prepare template + template = "" + if show_counter: digits = len(str(count)) - template = ' [{:0%d}/{:0%d}]' % (digits, digits) - - # Update our counter overflow_display_count_width = 4 + (digits * 2) - if overflow_display_count_width <= \ - self.overflow_max_display_count_width: - if len(title) > \ - title_maxlen - overflow_display_count_width: - # Truncate our title further - title = title[:title_maxlen - - overflow_display_count_width] - - else: # Way to many messages to display - show_counter = False - response = [{ - 'body': body[i: i + body_maxlen] - .lstrip('\r\n\x0b\x0c').rstrip(), - 'title': title + ( - '' if not show_counter else - template.format(idx, count))} for idx, i in - enumerate(range(0, len(body), body_maxlen), start=1)] + if ( + overflow_display_count_width + <= self.overflow_max_display_count_width + ): + # Truncate title further if needed to make room for counter + if ( + len(title) + > title_maxlen - overflow_display_count_width + ): + title = title[ + : title_maxlen - overflow_display_count_width + ] + template = f" [{{:0{digits}d}}/{{:0{digits}d}}]" + else: + # Too many messages; fall back to repeated title without + # counter displayed + show_counter = False - else: # Display title once and move on response = [] - try: - i = range(0, len(body), body_maxlen)[0] - + for idx, chunk_body in enumerate(chunks, start=1): + suffix = template.format(idx, count) if show_counter else "" response.append({ - 'body': body[i: i + body_maxlen] - .lstrip('\r\n\x0b\x0c').rstrip(), - 'title': title, + "body": chunk_body.lstrip("\r\n\x0b\x0c").rstrip(), + "title": f"{title}{suffix}", }) - except (ValueError, IndexError): - # IndexError: - # - This happens if there simply was no body to display - - # ValueError: - # - This happens when body_maxlen < 0 (due to title being - # so large) + else: + # + # SPLIT mode, display title once and move on + # (this covers both overflow_display_title_once=True + # and the edge case body_maxlen <= 0 with amalgamated title) + # + response = [] + consumed = 0 + remainder = body + + if body_maxlen > 0 and body: + # First chunk uses body_maxlen (which already accounts for + # the title) + first_chunks = smart_split( + body, + body_maxlen, + body_format, + ) + first_body = first_chunks[0] if first_chunks else "" + consumed = len(first_body) + remainder = body[consumed:] - # No worries; send title along response.append({ - 'body': '', - 'title': title, + "body": first_body.lstrip("\r\n\x0b\x0c").rstrip(), + "title": title, }) - # Ensure our start is set properly - body_maxlen = 0 - - # Now re-calculate based on the increased length - for i in range(body_maxlen, len(body), self.body_maxlen): + else: + # body_maxlen <= 0 or no body; send title only, still honouring + # body response.append({ - 'body': body[i: i + self.body_maxlen] - .lstrip('\r\n\x0b\x0c').rstrip(), - 'title': '', + "body": "", + "title": title, }) + # remainder stays as full body; will be split below + + # Remaining chunks: no title, use the full body_maxlen of the + # service + if remainder: + more_chunks = smart_split( + remainder, + self.body_maxlen, + body_format, + ) + for chunk_body in more_chunks: + response.append({ + "body": chunk_body.lstrip("\r\n\x0b\x0c").rstrip(), + "title": "", + }) return response - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): - """ - Should preform the actual notification itself. - - """ + def send( + self, + body: str, + title: str = "", + notify_type: NotifyType = NotifyType.INFO, + **kwargs: Any, + ) -> bool: + """Should preform the actual notification itself.""" raise NotImplementedError( - "send() is not implimented by the child class.") + "send() is not implimented by the child class." + ) - def url_parameters(self, *args, **kwargs): - """ - Provides a default set of parameters to work with. This can greatly - simplify URL construction in the acommpanied url() function in all - defined plugin services. + def url_parameters( + self, + *args: Any, + **kwargs: Any, + ) -> dict[str, Any]: + """Provides a default set of parameters to work with. + + This can greatly simplify URL construction in the acommpanied url() + function in all defined plugin services. """ params = { - 'format': self.notify_format, - 'overflow': self.overflow_mode, + "format": self.notify_format.value, + "overflow": self.overflow_mode.value, } + # Timezone Information (if ZoneInfo) + if self.__tzinfo and isinstance(self.__tzinfo, ZoneInfo): + params["tz"] = self.__tzinfo.key + # Persistent Storage Setting if self.persistent_storage != NotifyBase.persistent_storage: - params['store'] = 'yes' if self.persistent_storage else 'no' + params["store"] = "yes" if self.persistent_storage else "no" params.update(super().url_parameters(*args, **kwargs)) @@ -766,7 +890,11 @@ def url_parameters(self, *args, **kwargs): return params @staticmethod - def parse_url(url, verify_host=True, plus_to_space=False): + def parse_url( + url: str, + verify_host: bool = True, + plus_to_space: bool = False, + ) -> Optional[dict[str, Any]]: """Parses the URL and returns it broken apart into a dictionary. This is very specific and customized for Apprise. @@ -785,80 +913,92 @@ def parse_url(url, verify_host=True, plus_to_space=False): successful, otherwise None is returned. """ results = URLBase.parse_url( - url, verify_host=verify_host, plus_to_space=plus_to_space) + url, verify_host=verify_host, plus_to_space=plus_to_space + ) if not results: # We're done; we failed to parse our url return results # Allow overriding the default format - if 'format' in results['qsd']: - results['format'] = results['qsd'].get('format') - if results['format'] not in NOTIFY_FORMATS: + if "format" in results["qsd"]: + results["format"] = results["qsd"].get("format", "").lower() + if results["format"] not in NOTIFY_FORMATS: URLBase.logger.warning( - 'Unsupported format specified {}'.format( - results['format'])) - del results['format'] + "Unsupported format specified " + f"{results['qsd']['format']!r}" + ) + del results["format"] # Allow overriding the default overflow - if 'overflow' in results['qsd']: - results['overflow'] = results['qsd'].get('overflow') - if results['overflow'] not in OVERFLOW_MODES: + if "overflow" in results["qsd"]: + results["overflow"] = results["qsd"].get("overflow", "").lower() + if results["overflow"] not in OVERFLOW_MODES: URLBase.logger.warning( - 'Unsupported overflow specified {}'.format( - results['overflow'])) - del results['overflow'] + "Unsupported overflow mode specified " + f"{results['qsd']['overflow']!r}" + ) + del results["overflow"] # Allow emoji's override - if 'emojis' in results['qsd']: - results['emojis'] = parse_bool(results['qsd'].get('emojis')) + if "emojis" in results["qsd"]: + results["emojis"] = parse_bool(results["qsd"].get("emojis")) # Store our persistent storage boolean - if 'store' in results['qsd']: - results['store'] = results['qsd']['store'] + # Allow overriding the default timezone + if "tz" in results["qsd"]: + results["tz"] = results["qsd"].get("tz", "") + + if "store" in results["qsd"]: + results["store"] = results["qsd"]["store"] return results @staticmethod - def parse_native_url(url): - """ - This is a base class that can be optionally over-ridden by child - classes who can build their Apprise URL based on the one provided - by the notification service they choose to use. - - The intent of this is to make Apprise a little more userfriendly - to people who aren't familiar with constructing URLs and wish to - use the ones that were just provied by their notification serivice - that they're using. - - This function will return None if the passed in URL can't be matched - as belonging to the notification service. Otherwise this function - should return the same set of results that parse_url() does. + def parse_native_url(url: str) -> Optional[dict[str, Any]]: + """This is a base class that can be optionally over-ridden by child + classes who can build their Apprise URL based on the one provided by + the notification service they choose to use. + + The intent of this is to make Apprise a little more userfriendly to + people who aren't familiar with constructing URLs and wish to use the + ones that were just provied by their notification serivice that they're + using. + + This function will return None if the passed in URL can't be matched as + belonging to the notification service. Otherwise this function should + return the same set of results that parse_url() does. """ return None @property def store(self): - """ - Returns a pointer to our persistent store for use. - - The best use cases are: - self.store.get('key') - self.store.set('key', 'value') - self.store.delete('key1', 'key2', ...) + """Returns a pointer to our persistent store for use. - You can also access the keys this way: - self.store['key'] + The best use cases are: + self.store.get('key') + self.store.set('key', 'value') + self.store.delete('key1', 'key2', ...) - And clear them: - del self.store['key'] + You can also access the keys this way: + self.store['key'] + And clear them: + del self.store['key'] """ if self.__store is None: # Initialize our persistent store for use self.__store = PersistentStore( namespace=self.url_id(), path=self.asset.storage_path, - mode=self.asset.storage_mode) + mode=self.asset.storage_mode, + ) return self.__store + + @property + def tzinfo(self) -> tzinfo: + """Returns our tzinfo file associated with this plugin if set + otherwise the default timezone is returned. + """ + return self.__tzinfo if self.__tzinfo else self.asset.tzinfo diff --git a/libs/apprise/plugins/base.pyi b/libs/apprise/plugins/base.pyi deleted file mode 100644 index 9cf3e404c8..0000000000 --- a/libs/apprise/plugins/base.pyi +++ /dev/null @@ -1 +0,0 @@ -class NotifyBase: ... \ No newline at end of file diff --git a/libs/apprise/plugins/bluesky.py b/libs/apprise/plugins/bluesky.py new file mode 100644 index 0000000000..d52ac9e5e4 --- /dev/null +++ b/libs/apprise/plugins/bluesky.py @@ -0,0 +1,688 @@ +# BSD 2-Clause License +# +# Apprise - Push Notification Library. +# Copyright (c) 2026, Chris Caron +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +# 1. Create a BlueSky account +# 2. Access Settings -> Privacy and Security +# 3. Generate an App Password. Optionally grant yourself access to Direct +# Messages if you want to be able to send them +# 4. Assemble your Apprise URL like: +# bluesky://handle@you-token-here +# +from datetime import datetime, timedelta, timezone +import json +import re + +import requests + +from ..attachment.base import AttachBase +from ..common import NotifyType +from ..locale import gettext_lazy as _ +from ..url import PrivacyMode +from .base import NotifyBase + +# For parsing handles +HANDLE_HOST_PARSE_RE = re.compile(r"(?P[^.]+)\.+(?P.+)$") + +IS_USER = re.compile(r"^\s*@?(?P[A-Z0-9_]+)(\.+(?P.+))?$", re.I) + + +class NotifyBlueSky(NotifyBase): + """A wrapper for BlueSky Notifications.""" + + # The default descriptive name associated with the Notification + service_name = "BlueSky" + + # The services URL + service_url = "https://bluesky.us/" + + # Protocol + secure_protocol = ("bsky", "bluesky") + + # A URL that takes you to the setup/help of the specific protocol + setup_url = "https://appriseit.com/services/bluesky/" + + # Support attachments + attachment_support = True + + # XRPC Suffix URLs; Structured as: + # https://host/{suffix} + + # Taken right from google.auth.helpers: + clock_skew = timedelta(seconds=10) + + # 1 hour in seconds (the lifetime of our token) + access_token_lifetime_sec = timedelta(seconds=3600) + + # Detect your Decentralized Identitifer (DID), then you can get your Auth + # Token. + xrpc_suffix_did = "/xrpc/com.atproto.identity.resolveHandle" + xrpc_suffix_session = "/xrpc/com.atproto.server.createSession" + xrpc_suffix_record = "/xrpc/com.atproto.repo.createRecord" + xrpc_suffix_blob = "/xrpc/com.atproto.repo.uploadBlob" + plc_directory = "https://plc.directory/{did}" + + # BlueSky is kind enough to return how many more requests we're allowed to + # continue to make within it's header response as: + # RateLimit-Reset: The epoc time (in seconds) we can expect our + # rate-limit to be reset. + # RateLimit-Remaining: an integer identifying how many requests we're + # still allow to make. + request_rate_per_sec = 0 + + # For Tracking Purposes + ratelimit_reset = datetime.now(timezone.utc).replace(tzinfo=None) + + # Remaining messages + ratelimit_remaining = 1 + + # The default BlueSky host to use if one isn't specified + bluesky_default_host = "bsky.social" + + # Our message body size + body_maxlen = 280 + + # BlueSky does not support a title + title_maxlen = 0 + + # Define object templates + templates = ("{schema}://{user}@{password}",) + + # Define our template tokens + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "user": { + "name": _("Username"), + "type": "string", + "required": True, + }, + "password": { + "name": _("Password"), + "type": "string", + "private": True, + "required": True, + }, + }, + ) + + def __init__(self, **kwargs): + """Initialize BlueSky Object.""" + super().__init__(**kwargs) + + # Our access token + self.__access_token = self.store.get("access_token") + self.__refresh_token = None + self.__access_token_expiry = datetime.now(timezone.utc) + self.__endpoint = self.store.get("endpoint") + + if not self.user: + msg = "A BlueSky UserID/Handle must be specified." + self.logger.warning(msg) + raise TypeError(msg) + + # Set our default host + self.host = self.bluesky_default_host + self.__endpoint = ( + f"https://{self.host}" if not self.host else self.__endpoint + ) + + # Identify our Handle (if define) + results = HANDLE_HOST_PARSE_RE.match(self.user) + if results: + self.user = results.group("handle").strip() + self.host = results.group("host").strip() + + return + + def send( + self, + body, + title="", + notify_type=NotifyType.INFO, + attach=None, + **kwargs, + ): + """Perform BlueSky Notification.""" + + if not self.__access_token and not self.login(): + # We failed to authenticate - we're done + return False + + # Track our returning blob IDs as they're stored on the BlueSky server + blobs = [] + + if attach and self.attachment_support: + url = f"{self.__endpoint}{self.xrpc_suffix_blob}" + # We need to upload our payload first so that we can source it + # in remaining messages + for no, attachment in enumerate(attach, start=1): + + # Perform some simple error checking + if not attachment: + # We could not access the attachment + self.logger.error( + "Could not access attachment" + f" {attachment.url(privacy=True)}." + ) + return False + + if not re.match(r"^image/.*", attachment.mimetype, re.I): + # Only support images at this time + self.logger.warning( + "Ignoring unsupported BlueSky attachment" + f" {attachment.url(privacy=True)}." + ) + continue + + self.logger.debug( + "Preparing BlueSky attachment" + f" {attachment.url(privacy=True)}" + ) + + # Upload our image and get our blob associated with it + postokay, response = self._fetch( + url, + payload=attachment, + ) + + if not postokay: + # We can't post our attachment + return False + + # Prepare our filename + filename = ( + attachment.name if attachment.name else f"file{no:03}.dat" + ) + + if not (isinstance(response, dict) and response.get("blob")): + self.logger.debug( + "Could not attach the file to BlueSky: %s (mime=%s)", + filename, + attachment.mimetype, + ) + continue + + blobs.append((response.get("blob"), filename)) + + # Prepare our URL + did, endpoint = self.get_identifier() + url = f"{endpoint}{self.xrpc_suffix_record}" + + # prepare our batch of payloads to create + payloads = [] + + payload = { + "collection": "app.bsky.feed.post", + "repo": did, + "record": { + "text": body, + # 'YYYY-mm-ddTHH:MM:SSZ' + "createdAt": datetime.now(tz=timezone.utc).strftime("%FT%XZ"), + "$type": "app.bsky.feed.post", + }, + } + + if blobs: + for no, blob in enumerate(blobs, start=1): + payload_ = payload.copy() + if no > 1: + # + # multiple instances + # + # 1. update createdAt time + # 2. Change text to identify image no + payload_["record"]["createdAt"] = datetime.now( + tz=timezone.utc + ).strftime("%FT%XZ") + payload_["record"]["text"] = f"{no:02d}/{len(blobs):02d}" + + payload_["record"]["embed"] = { + "images": [{ + "image": blob[0], + "alt": blob[1], + }], + "$type": "app.bsky.embed.images", + } + payloads.append(payload_) + else: + payloads.append(payload) + + for payload in payloads: + # Send Login Information + postokay, response = self._fetch( + url, + payload=json.dumps(payload), + ) + if not postokay: + # We failed + # Bad responses look like: + # { + # 'error': 'InvalidRequest', + # 'message': 'reason' + # } + return False + return True + + def get_identifier(self, user=None, login=False): + """Performs a Decentralized User Lookup and returns the identifier.""" + + if user is None: + user = self.user + + user = f"{user}.{self.host}" if "." not in user else f"{user}" + did_key = f"did.{user}" + endpoint_key = f"endpoint.{user}" + + did = self.store.get(did_key) + endpoint = self.store.get(endpoint_key) + if did and endpoint: + # Early return + return did, endpoint + + # Step 1: Acquire DID from bsky.app + url = f"https://public.api.bsky.app{self.xrpc_suffix_did}" + params = {"handle": user} + + # Send Login Information + postokay, response = self._fetch( + url, + params=params, + method="GET", + # We set this boolean so internal recursion doesn't take place. + login=login, + ) + + if not postokay or not response or "did" not in response: + # We failed + return (False, False) + + # Store our DID + did = response.get("did") + + # Step 2: Use DID to find the PDS + if did.startswith("did:plc:"): + pds_url = self.plc_directory.format(did=did) + + # PDS Query + postokay, service_response = self._fetch( + pds_url, + method="GET", + # We set this boolean so internal recursion doesn't take place. + login=login, + ) + if ( + not postokay + or not service_response + or "service" not in service_response + ): + # We failed + return (False, False) + + endpoint = next( + ( + s["serviceEndpoint"] + for s in service_response.get("service", []) + if s["type"] == "AtprotoPersonalDataServer" + ), + None, + ) + + elif did.startswith("did:web:"): + # Convert to domain + domain = did[8:] + web_did_url = f"https://{domain}/.well-known/did.json" + postokay, service_response = self._fetch( + web_did_url, + method="GET", + # We set this boolean so internal recursion doesn't take place. + login=login, + ) + if ( + not postokay + or not service_response + or "service" not in service_response + ): + # We failed + self.logger.warning( + "Could not fetch DID document for did:web identity " + f"{did}; ensure {web_did_url} is available." + ) + return (False, False) + + endpoint = next( + ( + s["serviceEndpoint"] + for s in service_response.get("service", []) + if s["type"] == "AtprotoPersonalDataServer" + ), + None, + ) + + else: + self.logger.warning( + f"Unknown BlueSky DID scheme detected in {did}" + ) + return (False, False) + + # Step 3: Send to correct endpoint + if not endpoint: + self.logger.warning("Failed to resolve BlueSky PDS endpoint") + return (False, False) + + self.store.set(did_key, did) + self.store.set(endpoint_key, endpoint) + return (did, endpoint) + + def login(self): + """A simple wrapper to authenticate with the BlueSky Server.""" + + # Acquire our Decentralized Identitifer + did, self.__endpoint = self.get_identifier(self.user, login=True) + if not did: + return False + + url = f"{self.__endpoint}{self.xrpc_suffix_session}" + + payload = { + "identifier": did, + "password": self.password, + } + + # Send Login Information + postokay, response = self._fetch( + url, + payload=json.dumps(payload), + # We set this boolean so internal recursion doesn't take place. + login=True, + ) + + # Our response object looks like this (content has been altered for + # presentation purposes): + # { + # 'did': 'did:plc:ruk414jakghak402j1jqekj2', + # 'didDoc': { + # '@context': [ + # 'https://www.w3.org/ns/did/v1', + # 'https://w3id.org/security/multikey/v1', + # 'https://w3id.org/security/suites/secp256k1-2019/v1' + # ], + # 'id': 'did:plc:ruk414jakghak402j1jqekj2', + # 'alsoKnownAs': ['at://apprise.bsky.social'], + # 'verificationMethod': [ + # { + # 'id': 'did:plc:ruk414jakghak402j1jqekj2#atproto', + # 'type': 'Multikey', + # 'controller': 'did:plc:ruk414jakghak402j1jqekj2', + # 'publicKeyMultibase' 'redacted' + # } + # ], + # 'service': [ + # { + # 'id': '#atproto_pds', + # 'type': 'AtprotoPersonalDataServer', + # 'serviceEndpoint': + # 'https://woodtuft.us-west.host.bsky.network' + # } + # ] + # }, + # 'handle': 'apprise.bsky.social', + # 'email': 'whoami@gmail.com', + # 'emailConfirmed': True, + # 'emailAuthFactor': False, + # 'accessJwt': 'redacted', + # 'refreshJwt': 'redacted', + # 'active': True, + # } + + if not postokay or not response: + # We failed + return False + + # Acquire our Token + self.__access_token = response.get("accessJwt") + + # Handle other optional arguments we can use + self.__access_token_expiry = ( + self.access_token_lifetime_sec + + datetime.now(timezone.utc) + - self.clock_skew + ) + + # The Refresh Token + self.__refresh_token = response.get("refreshJwt", self.__refresh_token) + self.store.set( + "access_token", self.__access_token, self.__access_token_expiry + ) + self.store.set( + "refresh_token", self.__refresh_token, self.__access_token_expiry + ) + self.store.set("endpoint", self.__endpoint) + + self.logger.info( + f"Authenticated to BlueSky as {self.user}.{self.host}" + ) + return True + + def _fetch( + self, + url, + payload=None, + params=None, + method="POST", + content_type=None, + login=False, + ): + """Wrapper to BlueSky API requests object.""" + + # use what was specified, otherwise build headers dynamically + headers = { + "User-Agent": self.app_id, + "Content-Type": ( + payload.mimetype + if isinstance(payload, AttachBase) + else ( + "application/x-www-form-urlencoded; charset=utf-8" + if method == "GET" + else "application/json" + ) + ), + } + + if self.__access_token: + # Set our token + headers["Authorization"] = f"Bearer {self.__access_token}" + + # Some Debug Logging + self.logger.debug( + f"BlueSky {method} URL:" + f" {url} (cert_verify={self.verify_certificate})" + ) + self.logger.debug( + "BlueSky Payload: %s", + ( + str(payload) + if not isinstance(payload, AttachBase) + else "attach: " + payload.name + ), + ) + + # By default set wait to None + wait = None + + if self.ratelimit_remaining == 0: + # Determine how long we should wait for or if we should wait at + # all. This isn't fool-proof because we can't be sure the client + # time (calling this script) is completely synced up with the + # Twitter server. One would hope we're on NTP and our clocks are + # the same allowing this to role smoothly: + + now = datetime.now(timezone.utc).replace(tzinfo=None) + if now < self.ratelimit_reset: + # We need to throttle for the difference in seconds + # We add 0.3 seconds to the end just to allow a grace + # period. + wait = (self.ratelimit_reset - now).total_seconds() + 0.3 + + # Always call throttle before any remote server i/o is made; + self.throttle(wait=wait) + + # Initialize a default value for our content value + content = {} + + # acquire our request mode + fn = requests.post if method == "POST" else requests.get + try: + r = fn( + url, + data=( + payload + if not isinstance(payload, AttachBase) + else payload.open() + ), + params=params, + headers=headers, + verify=self.verify_certificate, + timeout=self.request_timeout, + ) + + # Get our JSON content if it's possible + try: + content = json.loads(r.content) + + except (TypeError, ValueError, AttributeError): + # TypeError = r.content is not a String + # ValueError = r.content is Unparsable + # AttributeError = r.content is None + content = {} + + # Rate limit handling... our header objects at this point are: + # 'RateLimit-Limit': '10', # Total # of requests per hour + # 'RateLimit-Remaining': '9', # Requests remaining + # 'RateLimit-Reset': '1741631362', # Epoch Time + # 'RateLimit-Policy': '10;w=86400' # NoEntries;w= + try: + # Capture rate limiting if possible + self.ratelimit_remaining = int( + r.headers.get("ratelimit-remaining") + ) + self.ratelimit_reset = datetime.fromtimestamp( + int(r.headers.get("ratelimit-reset")), timezone.utc + ).replace(tzinfo=None) + + except (TypeError, ValueError): + # This is returned if we could not retrieve this information + # gracefully accept this state and move on + pass + + if r.status_code != requests.codes.ok: + # We had a problem + status_str = NotifyBlueSky.http_response_code_lookup( + r.status_code + ) + + self.logger.warning( + "Failed to send BlueSky {} to {}: {}error={}.".format( + method, url, ", " if status_str else "", r.status_code + ) + ) + + self.logger.debug( + "Response Details:\r\n%r", (r.content or b"")[:2000]) + + # Mark our failure + return (False, content) + + except requests.RequestException as e: + self.logger.warning( + f"Exception received when sending BlueSky {method} to {url}: " + ) + self.logger.debug(f"Socket Exception: {e!s}") + + # Mark our failure + return (False, content) + + except OSError as e: + self.logger.warning( + "An I/O error occurred while handling {}.".format( + payload.name + if isinstance(payload, AttachBase) + else payload + ) + ) + self.logger.debug(f"I/O Exception: {e!s}") + return (False, content) + + return (True, content) + + @property + def url_identifier(self): + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. + """ + return ( + self.secure_protocol[0], + self.user, + self.password, + ) + + def url(self, privacy=False, *args, **kwargs): + """Returns the URL built dynamically based on specified arguments.""" + + # Apply our other parameters + params = self.url_parameters(privacy=privacy, *args, **kwargs) + + user = self.user + if self.host != self.bluesky_default_host: + user += f".{self.host}" + + # our URL + return "{schema}://{user}@{password}?{params}".format( + schema=self.secure_protocol[0], + user=NotifyBlueSky.quote(user, safe=""), + password=self.pprint( + self.password, privacy, mode=PrivacyMode.Secret, safe="" + ), + params=NotifyBlueSky.urlencode(params), + ) + + @staticmethod + def parse_url(url): + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" + results = NotifyBase.parse_url(url, verify_host=False) + if not results: + # We're done early as we couldn't load the results + return results + + if not results.get("password") and results["host"]: + results["password"] = NotifyBlueSky.unquote(results["host"]) + + # Do not use host field + results["host"] = None + return results diff --git a/libs/apprise/plugins/brevo.py b/libs/apprise/plugins/brevo.py new file mode 100644 index 0000000000..bc24a02a69 --- /dev/null +++ b/libs/apprise/plugins/brevo.py @@ -0,0 +1,573 @@ +# BSD 2-Clause License +# +# Apprise - Push Notification Library. +# Copyright (c) 2026, Chris Caron +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +# API Reference: https://developers.brevo.com/reference/getting-started-1 + +from json import dumps +import logging +from os.path import splitext + +import requests + +from .. import exception +from ..common import NotifyFormat, NotifyType +from ..conversion import convert_between +from ..locale import gettext_lazy as _ +from ..utils.parse import is_email, parse_list, validate_regex +from ..utils.sanitize import sanitize_payload +from .base import NotifyBase + +# Extend HTTP Error Messages (most common Brevo SMTP errors) +BREVO_HTTP_ERROR_MAP = { + 400: "Bad Request - Invalid payload or missing parameters.", + 401: "Unauthorized - Invalid Brevo API key.", + 402: "Payment Required - Plan limitation or credit issue.", + 429: "Too Many Requests - Rate limit exceeded.", +} + +# Comprehensive list of Brevo-supported extensions for Transactional Emails +# Source: Brevo API Documentation & Transactional Attachment Guidelines +BREVO_VALID_EXTENSIONS = ( + # Documents & Text + "xlsx", "xls", "ods", "docx", "docm", "doc", "csv", "pdf", "txt", + "rtf", "msg", "pub", "mobi", "ppt", "pptx", "eps", "odt", "ics", + "xml", "css", "html", "htm", "shtml", + # Images + "gif", "jpg", "jpeg", "png", "tif", "tiff", "bmp", "cgm", + # Archives + "zip", "tar", "ez", "pkpass", + # Audio + "mp3", "m4a", "m4v", "wma", "ogg", "flac", "wav", "aif", "aifc", "aiff", + # Video + "mp4", "mov", "avi", "mkv", "mpeg", "mpg", "wmv" +) + + +class NotifyBrevo(NotifyBase): + """A wrapper for Notify Brevo Notifications.""" + + # The default descriptive name associated with the Notification + service_name = "Brevo" + + # The services URL + service_url = "https://www.brevo.com/" + + # The default secure protocol + secure_protocol = "brevo" + + # A URL that takes you to the setup/help of the specific protocol + setup_url = "https://appriseit.com/services/brevo/" + + # Default to markdown + notify_format = NotifyFormat.HTML + + # The default Email API URL to use + notify_url = "https://api.brevo.com/v3/smtp/email" + + # Support attachments + attachment_support = True + + # Allow 300 requests per minute. + # 60/300 = 0.2 + request_rate_per_sec = 0.2 + + # The default subject to use if one isn't specified. + default_empty_subject = "" + + # Define object templates + templates = ( + "{schema}://{apikey}:{from_email}", + "{schema}://{apikey}:{from_email}/{targets}", + ) + + # Define our template arguments + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "apikey": { + "name": _("API Key"), + "type": "string", + "private": True, + "required": True, + "regex": (r"^[a-zA-Z0-9._-]+$", "i"), + }, + "from_email": { + "name": _("Source Email"), + "type": "string", + "required": True, + }, + "target_email": { + "name": _("Target Email"), + "type": "string", + "map_to": "targets", + }, + "targets": { + "name": _("Targets"), + "type": "list:string", + }, + }, + ) + + # Define our template arguments + template_args = dict( + NotifyBase.template_args, + **{ + "to": { + "alias_of": "targets", + }, + "cc": { + "name": _("Carbon Copy"), + "type": "list:string", + }, + "bcc": { + "name": _("Blind Carbon Copy"), + "type": "list:string", + }, + "reply": { + "name": _("Reply To Email"), + "type": "string", + "map_to": "reply_to", + }, + }, + ) + + def __init__( + self, + apikey, + from_email, + targets=None, + reply_to=None, + cc=None, + bcc=None, + **kwargs, + ): + """Initialize Notify Brevo Object.""" + super().__init__(**kwargs) + + # API Key (associated with project) + self.apikey = validate_regex( + apikey, *self.template_tokens["apikey"]["regex"] + ) + if not self.apikey: + msg = f"An invalid Brevo API Key ({apikey}) was specified." + self.logger.warning(msg) + raise TypeError(msg) + + result = is_email(from_email) + if not result: + msg = f"Invalid ~From~ email specified: {from_email}" + self.logger.warning(msg) + raise TypeError(msg) + + # Store email address + self.from_email = result["full_email"] + + # Reply-to + self.reply_to = None + if reply_to: + result = is_email(reply_to) + if not result: + msg = "An invalid Brevo Reply To ({}) was specified.".format( + f"{reply_to}") + self.logger.warning(msg) + raise TypeError(msg) + + self.reply_to = ( + result["name"] if result["name"] else False, + result["full_email"], + ) + + # Acquire Targets (To Emails) + self.targets = [] + + # Acquire Carbon Copies + self.cc = set() + + # Acquire Blind Carbon Copies + self.bcc = set() + + # Validate recipients (to:) and drop bad ones: + if targets: + for recipient in parse_list(targets): + + result = is_email(recipient) + if result: + self.targets.append(result["full_email"]) + continue + + self.logger.warning( + f"Dropped invalid email ({recipient}) specified.", + ) + else: + # add ourselves + self.targets.append(self.from_email) + + # Validate recipients (cc:) and drop bad ones: + for recipient in parse_list(cc): + + result = is_email(recipient) + if result: + self.cc.add(result["full_email"]) + continue + + self.logger.warning( + f"Dropped invalid Carbon Copy email ({recipient}) specified.", + ) + + # Validate recipients (bcc:) and drop bad ones: + for recipient in parse_list(bcc): + + result = is_email(recipient) + if result: + self.bcc.add(result["full_email"]) + continue + + self.logger.warning( + "Dropped invalid Blind Carbon Copy email " + f"({recipient}) specified.", + ) + + return + + @property + def url_identifier(self): + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. + """ + return (self.secure_protocol, self.apikey, self.from_email) + + def url(self, privacy=False, *args, **kwargs): + """Returns the URL built dynamically based on specified arguments.""" + + # Our URL parameters + params = self.url_parameters(privacy=privacy, *args, **kwargs) + + if len(self.cc) > 0: + # Handle our Carbon Copy Addresses + params["cc"] = ",".join(self.cc) + + if len(self.bcc) > 0: + # Handle our Blind Carbon Copy Addresses + params["bcc"] = ",".join(self.bcc) + + if self.reply_to: + # Handle our reply to address + params["reply"] = ( + "{} <{}>".format(*self.reply_to) + if self.reply_to[0] + else self.reply_to[1] + ) + + # a simple boolean check as to whether we display our target emails + # or not + has_targets = not ( + len(self.targets) == 1 and self.targets[0] == self.from_email + ) + + return "{schema}://{apikey}:{from_email}/{targets}?{params}".format( + schema=self.secure_protocol, + apikey=self.pprint(self.apikey, privacy, safe=""), + # never encode email since it plays a huge role in our hostname + from_email=self.from_email, + targets=( + "" + if not has_targets + else "/".join( + [NotifyBrevo.quote(x, safe="") for x in self.targets] + ) + ), + params=NotifyBrevo.urlencode(params), + ) + + def __len__(self): + """Returns the number of targets associated with this notification.""" + return max(len(self.targets), 1) + + def send( + self, + body, + title="", + notify_type=NotifyType.INFO, + attach=None, + **kwargs, + ): + """Perform Brevo Notification.""" + + if not self.targets: + # There is no one to email; we're done + self.logger.warning( + "There are no Brevo email recipients to notify") + return False + + headers = { + "User-Agent": self.app_id, + "Content-Type": "application/json", + "Accept": "application/json", + "api-key": self.apikey, + } + + # error tracking (used for function return) + has_error = False + + # A Simple Email Payload Template + payload_ = { + "sender": { + "email": self.from_email, + }, + # Placeholder, filled per target + "to": [{"email": None}], + "subject": title if title else self.default_empty_subject, + } + # Body selection + use_html = self.notify_format == NotifyFormat.HTML + + if use_html: + # body already normalised; keep your existing logic + payload_["htmlContent"] = body + payload_["textContent"] = convert_between( + NotifyFormat.HTML, NotifyFormat.TEXT, body + ) + else: + # Plain text requested, but Brevo still wants HTML + payload_["textContent"] = body + payload_["htmlContent"] = convert_between( + NotifyFormat.TEXT, NotifyFormat.HTML, body + ) + + if attach and self.attachment_support: + attachments = [] + + # Send our attachments + for no, attachment in enumerate(attach, start=1): + # Perform some simple error checking + if not attachment: + # We could not access the attachment + self.logger.error( + "Could not access Brevo attachment" + f" {attachment.url(privacy=True)}." + ) + return False + + # Brevo does not track content/mime type and relies 100% + # entirely on the filename extension as to whether or not it + # will accept it or not. + # + # The below prepares a safe_name (which can't be .dat like + # other plugins since Brevo rejects that type). For this + # reason .txt is chosen intentionally for this circumstance. + + # Use the attachment name if available, otherwise default to a + # generic name + raw_name = attachment.name \ + if attachment.name else f"file{no:03}.txt" + + # If the filename does NOT match a supported extension, append + # .txt + _, ext = splitext(raw_name) + safe_name = f"{raw_name}.txt" if ( + not ext or ext[1:].lower() + not in BREVO_VALID_EXTENSIONS) else raw_name + + try: + attachments.append({ + "content": attachment.base64(), + "name": safe_name, + }) + + except exception.AppriseException: + # We could not access the attachment + self.logger.error( + "Could not access Brevo attachment" + f" {attachment.url(privacy=True)}." + ) + return False + + self.logger.debug( + "Appending Brevo attachment" + f" {attachment.url(privacy=True)}" + ) + + # Append our attachments to the payload + payload_.update({ + "attachment": attachments, + }) + + if self.reply_to: + payload_["replyTo"] = {"email": self.reply_to[1]} + + targets = list(self.targets) + while len(targets) > 0: + target = targets.pop(0) + + # Create a copy of our template + payload = payload_.copy() + + # the cc, bcc, to field must be unique or SendMail will fail, the + # below code prepares this by ensuring the target isn't in the cc + # list or bcc list. It also makes sure the cc list does not contain + # any of the bcc entries + cc = self.cc - self.bcc - {target} + bcc = self.bcc - {target} + + # Set our main recipient + payload["to"] = [{"email": target}] + + if len(cc): + payload["cc"] = [{"email": email} for email in cc] + + if len(bcc): + payload["bcc"] = [{"email": email} for email in bcc] + + # Some Debug Logging + if self.logger.isEnabledFor(logging.DEBUG): + # Due to attachments; output can be quite heavy and io + # intensive. + # To accommodate this, we only show our debug payload + # information if required. + self.logger.debug( + "Brevo POST URL:" + f" {self.notify_url} " + f"(cert_verify={self.verify_certificate!r})" + ) + self.logger.debug( + "Brevo Payload: %s", sanitize_payload(payload)) + + # Always call throttle before any remote server i/o is made + self.throttle() + try: + r = requests.post( + self.notify_url, + data=dumps(payload), + headers=headers, + verify=self.verify_certificate, + timeout=self.request_timeout, + ) + if r.status_code not in ( + requests.codes.ok, + requests.codes.accepted, + requests.codes.created, + ): + # We had a problem + status_str = NotifyBrevo.http_response_code_lookup( + r.status_code, BREVO_HTTP_ERROR_MAP + ) + + self.logger.warning( + "Failed to send Brevo notification to {}: " + "{}{}error={}.".format( + target, + status_str, + ", " if status_str else "", + r.status_code, + ) + ) + + self.logger.debug( + "Response Details:\r\n%r", (r.content or b"")[:2000]) + + # Mark our failure + has_error = True + continue + + else: + self.logger.info( + f"Sent Brevo notification to {target}." + ) + + except requests.RequestException as e: + self.logger.warning( + "A Connection error occurred sending Brevo " + f"notification to {target}." + ) + self.logger.debug(f"Socket Exception: {e!s}") + + # Mark our failure + has_error = True + continue + + return not has_error + + @staticmethod + def parse_url(url): + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" + + results = NotifyBase.parse_url(url) + if not results: + # We're done early as we couldn't load the results + return results + + # Our URL looks like this: + # {schema}://{apikey}:{from_email}/{targets} + # + # which actually equates to: + # {schema}://{user}:{password}@{host}/{email1}/{email2}/etc.. + # ^ ^ ^ + # | | | + # apikey -from addr- + + if not results.get("user"): + # An API Key as not properly specified + return None + + if not results.get("password"): + # A From Email was not correctly specified + return None + + # Prepare our API Key + results["apikey"] = NotifyBrevo.unquote(results["user"]) + + # Prepare our From Email Address + results["from_email"] = "{}@{}".format( + NotifyBrevo.unquote(results["password"]), + NotifyBrevo.unquote(results["host"]), + ) + + # Acquire our targets + results["targets"] = NotifyBrevo.split_path(results["fullpath"]) + + # The 'to' makes it easier to use yaml configuration + if "to" in results["qsd"] and len(results["qsd"]["to"]): + results["targets"] += NotifyBrevo.parse_list( + results["qsd"]["to"] + ) + + # Handle Carbon Copy Addresses + if "cc" in results["qsd"] and len(results["qsd"]["cc"]): + results["cc"] = NotifyBrevo.parse_list(results["qsd"]["cc"]) + + # Handle Blind Carbon Copy Addresses + if "bcc" in results["qsd"] and len(results["qsd"]["bcc"]): + results["bcc"] = NotifyBrevo.parse_list(results["qsd"]["bcc"]) + + # Handle Reply To Address + if "reply" in results["qsd"] and len(results["qsd"]["reply"]): + results["reply_to"] = NotifyBrevo.unquote(results["qsd"]["reply"]) + + return results diff --git a/libs/apprise/plugins/bulksms.py b/libs/apprise/plugins/bulksms.py index c5608fa686..372c8fce6c 100644 --- a/libs/apprise/plugins/bulksms.py +++ b/libs/apprise/plugins/bulksms.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -32,27 +31,27 @@ # # API is documented here: # - https://www.bulksms.com/developer/json/v1/#tag/Message +from itertools import chain +import json import re + import requests -import json -from itertools import chain -from .base import NotifyBase -from ..url import PrivacyMode + from ..common import NotifyType -from ..utils.parse import is_phone_no, parse_phone_no, parse_bool from ..locale import gettext_lazy as _ - +from ..url import PrivacyMode +from ..utils.parse import is_phone_no, parse_bool, parse_phone_no +from .base import NotifyBase IS_GROUP_RE = re.compile( - r'^(@?(?P[A-Z0-9_-]+))$', + r"^(@?(?P[A-Z0-9_-]+))$", re.IGNORECASE, ) -class BulkSMSRoutingGroup(object): - """ - The different categories of routing - """ +class BulkSMSRoutingGroup: + """The different categories of routing.""" + ECONOMY = "ECONOMY" STANDARD = "STANDARD" PREMIUM = "PREMIUM" @@ -66,34 +65,31 @@ class BulkSMSRoutingGroup(object): ) -class BulkSMSEncoding(object): - """ - The different categories of routing - """ +class BulkSMSEncoding: + """The different categories of routing.""" + TEXT = "TEXT" UNICODE = "UNICODE" BINARY = "BINARY" class NotifyBulkSMS(NotifyBase): - """ - A wrapper for BulkSMS Notifications - """ + """A wrapper for BulkSMS Notifications.""" # The default descriptive name associated with the Notification - service_name = 'BulkSMS' + service_name = "BulkSMS" # The services URL - service_url = 'https://bulksms.com/' + service_url = "https://bulksms.com/" # All notification requests are secure - secure_protocol = 'bulksms' + secure_protocol = "bulksms" # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_bulksms' + setup_url = "https://appriseit.com/services/bulksms/" # BulkSMS uses the http protocol with JSON requests - notify_url = 'https://api.bulksms.com/v1/messages' + notify_url = "https://api.bulksms.com/v1/messages" # The maximum length of the body body_maxlen = 160 @@ -106,147 +102,165 @@ class NotifyBulkSMS(NotifyBase): title_maxlen = 0 # Define object templates - templates = ( - '{schema}://{user}:{password}@{targets}', - ) + templates = ("{schema}://{user}:{password}@{targets}",) # Define our template tokens - template_tokens = dict(NotifyBase.template_tokens, **{ - 'user': { - 'name': _('User Name'), - 'type': 'string', - 'required': True, + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "user": { + "name": _("User Name"), + "type": "string", + "required": True, + }, + "password": { + "name": _("Password"), + "type": "string", + "private": True, + "required": True, + }, + "target_phone": { + "name": _("Target Phone No"), + "type": "string", + "prefix": "+", + "regex": (r"^[0-9\s)(+-]+$", "i"), + "map_to": "targets", + }, + "target_group": { + "name": _("Target Group"), + "type": "string", + "prefix": "@", + "regex": (r"^[A-Z0-9 _-]+$", "i"), + "map_to": "targets", + }, + "targets": { + "name": _("Targets"), + "type": "list:string", + "required": True, + }, }, - 'password': { - 'name': _('Password'), - 'type': 'string', - 'private': True, - 'required': True, - }, - 'target_phone': { - 'name': _('Target Phone No'), - 'type': 'string', - 'prefix': '+', - 'regex': (r'^[0-9\s)(+-]+$', 'i'), - 'map_to': 'targets', - }, - 'target_group': { - 'name': _('Target Group'), - 'type': 'string', - 'prefix': '+', - 'regex': (r'^[A-Z0-9 _-]+$', 'i'), - 'map_to': 'targets', - }, - 'targets': { - 'name': _('Targets'), - 'type': 'list:string', - 'required': True, - }, - }) + ) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'to': { - 'alias_of': 'targets', - }, - 'from': { - 'name': _('From Phone No'), - 'type': 'string', - 'regex': (r'^\+?[0-9\s)(+-]+$', 'i'), - 'map_to': 'source', - }, - 'route': { - 'name': _('Route Group'), - 'type': 'choice:string', - 'values': BULKSMS_ROUTING_GROUPS, - 'default': BulkSMSRoutingGroup.STANDARD, + template_args = dict( + NotifyBase.template_args, + **{ + "to": { + "alias_of": "targets", + }, + "from": { + "name": _("From Phone No"), + "type": "string", + "regex": (r"^\+?[0-9\s)(+-]+$", "i"), + "map_to": "source", + }, + "route": { + "name": _("Route Group"), + "type": "choice:string", + "values": BULKSMS_ROUTING_GROUPS, + "default": BulkSMSRoutingGroup.STANDARD, + }, + "unicode": { + # Unicode characters + "name": _("Unicode Characters"), + "type": "bool", + "default": True, + }, + "batch": { + "name": _("Batch Mode"), + "type": "bool", + "default": False, + }, }, - 'unicode': { - # Unicode characters - 'name': _('Unicode Characters'), - 'type': 'bool', - 'default': True, - }, - 'batch': { - 'name': _('Batch Mode'), - 'type': 'bool', - 'default': False, - }, - }) + ) - def __init__(self, source=None, targets=None, unicode=None, batch=None, - route=None, **kwargs): - """ - Initialize BulkSMS Object - """ - super(NotifyBulkSMS, self).__init__(**kwargs) + def __init__( + self, + source=None, + targets=None, + unicode=None, + batch=None, + route=None, + **kwargs, + ): + """Initialize BulkSMS Object.""" + super().__init__(**kwargs) self.source = None if source: result = is_phone_no(source) if not result: - msg = 'The Account (From) Phone # specified ' \ - '({}) is invalid.'.format(source) + msg = ( + "The Account (From) Phone # specified " + f"({source}) is invalid." + ) self.logger.warning(msg) raise TypeError(msg) # Tidy source - self.source = '+{}'.format(result['full']) + self.source = "+{}".format(result["full"]) # Setup our route - self.route = self.template_args['route']['default'] \ - if not isinstance(route, str) else route.upper() + self.route = ( + self.template_args["route"]["default"] + if not isinstance(route, str) + else route.upper() + ) if self.route not in BULKSMS_ROUTING_GROUPS: - msg = 'The route specified ({}) is invalid.'.format(route) + msg = f"The route specified ({route}) is invalid." self.logger.warning(msg) raise TypeError(msg) # Define whether or not we should set the unicode flag - self.unicode = self.template_args['unicode']['default'] \ - if unicode is None else bool(unicode) + self.unicode = ( + self.template_args["unicode"]["default"] + if unicode is None + else bool(unicode) + ) # Define whether or not we should operate in a batch mode - self.batch = self.template_args['batch']['default'] \ - if batch is None else bool(batch) + self.batch = ( + self.template_args["batch"]["default"] + if batch is None + else bool(batch) + ) # Parse our targets - self.targets = list() - self.groups = list() + self.targets = [] + self.groups = [] for target in parse_phone_no(targets): # Parse each phone number we found result = is_phone_no(target) if result: - self.targets.append('+{}'.format(result['full'])) + self.targets.append("+{}".format(result["full"])) continue group_re = IS_GROUP_RE.match(target) if group_re and not target.isdigit(): # If the target specified is all digits, it MUST have a @ # in front of it to eliminate any ambiguity - self.groups.append(group_re.group('group')) + self.groups.append(group_re.group("group")) continue self.logger.warning( - 'Dropped invalid phone # and/or Group ' - '({}) specified.'.format(target), + f"Dropped invalid phone # and/or Group ({target}) specified.", ) return - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): - """ - Perform BulkSMS Notification - """ + def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): + """Perform BulkSMS Notification.""" if not (self.password and self.user): self.logger.warning( - 'There were no valid login credentials provided') + "There were no valid login credentials provided" + ) return False if not (self.targets or self.groups): # We have nothing to notify - self.logger.warning('There are no BulkSMS targets to notify') + self.logger.warning("There are no BulkSMS targets to notify") return False # Send in batches if identified to do so @@ -257,34 +271,42 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): # Prepare our headers headers = { - 'User-Agent': self.app_id, - 'Content-Type': 'application/json', + "User-Agent": self.app_id, + "Content-Type": "application/json", } # Prepare our payload payload = { # The To gets populated in the loop below - 'to': None, - 'body': body, - 'routingGroup': self.route, - 'encoding': BulkSMSEncoding.UNICODE - if self.unicode else BulkSMSEncoding.TEXT, + "to": None, + "body": body, + "routingGroup": self.route, + "encoding": ( + BulkSMSEncoding.UNICODE + if self.unicode + else BulkSMSEncoding.TEXT + ), # Options are NONE, ALL and ERRORS - 'deliveryReports': "ERRORS" + "deliveryReports": "ERRORS", } if self.source: payload.update({ - 'from': self.source, + "from": self.source, }) # Authentication auth = (self.user, self.password) # Prepare our targets - targets = list(self.targets) if batch_size == 1 else \ - [self.targets[index:index + batch_size] - for index in range(0, len(self.targets), batch_size)] + targets = ( + list(self.targets) + if batch_size == 1 + else [ + self.targets[index : index + batch_size] + for index in range(0, len(self.targets), batch_size) + ] + ) targets += [{"type": "GROUP", "name": g} for g in self.groups] while len(targets): @@ -292,22 +314,24 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): target = targets.pop(0) # Prepare our user - payload['to'] = target + payload["to"] = target # Printable reference if isinstance(target, dict): - p_target = target['name'] + p_target = target["name"] elif isinstance(target, list): - p_target = '{} targets'.format(len(target)) + p_target = f"{len(target)} targets" else: p_target = target # Some Debug Logging - self.logger.debug('BulkSMS POST URL: {} (cert_verify={})'.format( - self.notify_url, self.verify_certificate)) - self.logger.debug('BulkSMS Payload: {}' .format(payload)) + self.logger.debug( + "BulkSMS POST URL:" + f" {self.notify_url} (cert_verify={self.verify_certificate})" + ) + self.logger.debug(f"BulkSMS Payload: {payload}") # Always call throttle before any remote server i/o is made self.throttle() @@ -342,24 +366,29 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): # ] if r.status_code not in ( - requests.codes.created, requests.codes.ok): + requests.codes.created, + requests.codes.ok, + ): # We had a problem - status_str = \ - NotifyBase.http_response_code_lookup(r.status_code) + status_str = NotifyBase.http_response_code_lookup( + r.status_code + ) # set up our status code to use status_code = r.status_code self.logger.warning( - 'Failed to send BulkSMS notification to {}: ' - '{}{}error={}.'.format( + "Failed to send BulkSMS notification to {}: " + "{}{}error={}.".format( p_target, status_str, - ', ' if status_str else '', - status_code)) + ", " if status_str else "", + status_code, + ) + ) self.logger.debug( - 'Response Details:\r\n{}'.format(r.content)) + "Response Details:\r\n%r", (r.content or b"")[:2000]) # Mark our failure has_error = True @@ -367,13 +396,15 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): else: self.logger.info( - 'Sent BulkSMS notification to {}.'.format(p_target)) + f"Sent BulkSMS notification to {p_target}." + ) except requests.RequestException as e: self.logger.warning( - 'A Connection error occurred sending BulkSMS: to %s ', - p_target) - self.logger.debug('Socket Exception: %s' % str(e)) + "A Connection error occurred sending BulkSMS: to %s ", + p_target, + ) + self.logger.debug(f"Socket Exception: {e!s}") # Mark our failure has_error = True @@ -382,41 +413,48 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): return not has_error def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = { - 'unicode': 'yes' if self.unicode else 'no', - 'batch': 'yes' if self.batch else 'no', - 'route': self.route, + "unicode": "yes" if self.unicode else "no", + "batch": "yes" if self.batch else "no", + "route": self.route, } if self.source: - params['from'] = self.source + params["from"] = self.source # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) - return '{schema}://{user}:{password}@{targets}/?{params}'.format( + return "{schema}://{user}:{password}@{targets}/?{params}".format( schema=self.secure_protocol, - user=self.pprint(self.user, privacy, safe=''), + user=self.pprint(self.user, privacy, safe=""), password=self.pprint( - self.password, privacy, mode=PrivacyMode.Secret, safe=''), - targets='/'.join(chain( - [NotifyBulkSMS.quote('{}'.format(x), safe='+') - for x in self.targets], - [NotifyBulkSMS.quote('@{}'.format(x), safe='@') - for x in self.groups])), - params=NotifyBulkSMS.urlencode(params)) + self.password, privacy, mode=PrivacyMode.Secret, safe="" + ), + targets="/".join( + chain( + [ + NotifyBulkSMS.quote(f"{x}", safe="+") + for x in self.targets + ], + [ + NotifyBulkSMS.quote(f"@{x}", safe="@") + for x in self.groups + ], + ) + ), + params=NotifyBulkSMS.urlencode(params), + ) @property def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. """ return ( self.secure_protocol, @@ -425,9 +463,7 @@ def url_identifier(self): ) def __len__(self): - """ - Returns the number of targets associated with this notification - """ + """Returns the number of targets associated with this notification.""" # # Factor batch into calculation @@ -437,18 +473,16 @@ def __len__(self): batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: - targets = int(targets / batch_size) + \ - (1 if targets % batch_size else 0) + targets = int(targets / batch_size) + ( + 1 if targets % batch_size else 0 + ) return targets + len(self.groups) @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results @@ -456,36 +490,40 @@ def parse_url(url): # Get our entries; split_path() looks after unquoting content for us # by default - results['targets'] = [ - NotifyBulkSMS.unquote(results['host']), - *NotifyBulkSMS.split_path(results['fullpath'])] + results["targets"] = [ + NotifyBulkSMS.unquote(results["host"]), + *NotifyBulkSMS.split_path(results["fullpath"]), + ] # Support the 'from' and 'source' variable so that we can support # targets this way too. # The 'from' makes it easier to use yaml configuration - if 'from' in results['qsd'] and len(results['qsd']['from']): - results['source'] = \ - NotifyBulkSMS.unquote(results['qsd']['from']) + if "from" in results["qsd"] and len(results["qsd"]["from"]): + results["source"] = NotifyBulkSMS.unquote(results["qsd"]["from"]) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration - if 'to' in results['qsd'] and len(results['qsd']['to']): - results['targets'] += \ - NotifyBulkSMS.parse_phone_no(results['qsd']['to']) + if "to" in results["qsd"] and len(results["qsd"]["to"]): + results["targets"] += NotifyBulkSMS.parse_phone_no( + results["qsd"]["to"] + ) # Unicode Characters - results['unicode'] = \ - parse_bool(results['qsd'].get( - 'unicode', NotifyBulkSMS.template_args['unicode']['default'])) + results["unicode"] = parse_bool( + results["qsd"].get( + "unicode", NotifyBulkSMS.template_args["unicode"]["default"] + ) + ) # Get Batch Mode Flag - results['batch'] = \ - parse_bool(results['qsd'].get( - 'batch', NotifyBulkSMS.template_args['batch']['default'])) + results["batch"] = parse_bool( + results["qsd"].get( + "batch", NotifyBulkSMS.template_args["batch"]["default"] + ) + ) # Allow one to define a route group - if 'route' in results['qsd'] and len(results['qsd']['route']): - results['route'] = \ - NotifyBulkSMS.unquote(results['qsd']['route']) + if "route" in results["qsd"] and len(results["qsd"]["route"]): + results["route"] = NotifyBulkSMS.unquote(results["qsd"]["route"]) return results diff --git a/libs/apprise/plugins/bulkvs.py b/libs/apprise/plugins/bulkvs.py index 835887a904..244f7164de 100644 --- a/libs/apprise/plugins/bulkvs.py +++ b/libs/apprise/plugins/bulkvs.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -33,34 +32,34 @@ # API is documented here: # - https://portal.bulkvs.com/api/v1.0/documentation#/\ # Messaging/post_messageSend -import requests import json -from .base import NotifyBase -from ..url import PrivacyMode + +import requests + from ..common import NotifyType -from ..utils.parse import is_phone_no, parse_phone_no, parse_bool from ..locale import gettext_lazy as _ +from ..url import PrivacyMode +from ..utils.parse import is_phone_no, parse_bool, parse_phone_no +from .base import NotifyBase class NotifyBulkVS(NotifyBase): - """ - A wrapper for BulkVS Notifications - """ + """A wrapper for BulkVS Notifications.""" # The default descriptive name associated with the Notification - service_name = 'BulkVS' + service_name = "BulkVS" # The services URL - service_url = 'https://www.bulkvs.com/' + service_url = "https://www.bulkvs.com/" # All notification requests are secure - secure_protocol = 'bulkvs' + secure_protocol = "bulkvs" # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_bulkvs' + setup_url = "https://appriseit.com/services/bulkvs/" # BulkVS uses the http protocol with JSON requests - notify_url = 'https://portal.bulkvs.com/api/v1.0/messageSend' + notify_url = "https://portal.bulkvs.com/api/v1.0/messageSend" # The maximum length of the body body_maxlen = 160 @@ -74,101 +73,109 @@ class NotifyBulkVS(NotifyBase): # Define object templates templates = ( - '{schema}://{user}:{password}@{from_phone}/{targets}', - '{schema}://{user}:{password}@{from_phone}', + "{schema}://{user}:{password}@{from_phone}/{targets}", + "{schema}://{user}:{password}@{from_phone}", ) # Define our template tokens - template_tokens = dict(NotifyBase.template_tokens, **{ - 'user': { - 'name': _('User Name'), - 'type': 'string', - 'required': True, - }, - 'password': { - 'name': _('Password'), - 'type': 'string', - 'private': True, - 'required': True, - }, - 'from_phone': { - 'name': _('From Phone No'), - 'type': 'string', - 'regex': (r'^\+?[0-9\s)(+-]+$', 'i'), - 'map_to': 'source', - 'required': True, + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "user": { + "name": _("User Name"), + "type": "string", + "required": True, + }, + "password": { + "name": _("Password"), + "type": "string", + "private": True, + "required": True, + }, + "from_phone": { + "name": _("From Phone No"), + "type": "string", + "regex": (r"^\+?[0-9\s)(+-]+$", "i"), + "map_to": "source", + "required": True, + }, + "target_phone": { + "name": _("Target Phone No"), + "type": "string", + "prefix": "+", + "regex": (r"^[0-9\s)(+-]+$", "i"), + "map_to": "targets", + }, + "targets": { + "name": _("Targets"), + "type": "list:string", + "required": True, + }, }, - 'target_phone': { - 'name': _('Target Phone No'), - 'type': 'string', - 'prefix': '+', - 'regex': (r'^[0-9\s)(+-]+$', 'i'), - 'map_to': 'targets', - }, - 'targets': { - 'name': _('Targets'), - 'type': 'list:string', - 'required': True, - }, - }) + ) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'to': { - 'alias_of': 'targets', - }, - 'from': { - 'name': _('From Phone No'), - 'type': 'string', - 'regex': (r'^\+?[0-9\s)(+-]+$', 'i'), - 'map_to': 'source', - }, - 'batch': { - 'name': _('Batch Mode'), - 'type': 'bool', - 'default': False, + template_args = dict( + NotifyBase.template_args, + **{ + "to": { + "alias_of": "targets", + }, + "from": { + "name": _("From Phone No"), + "type": "string", + "regex": (r"^\+?[0-9\s)(+-]+$", "i"), + "map_to": "source", + }, + "batch": { + "name": _("Batch Mode"), + "type": "bool", + "default": False, + }, }, - }) + ) def __init__(self, source=None, targets=None, batch=None, **kwargs): - """ - Initialize BulkVS Object - """ - super(NotifyBulkVS, self).__init__(**kwargs) + """Initialize BulkVS Object.""" + super().__init__(**kwargs) if not (self.user and self.password): - msg = 'A BulkVS user/pass was not provided.' + msg = "A BulkVS user/pass was not provided." self.logger.warning(msg) raise TypeError(msg) result = is_phone_no(source) if not result: - msg = 'The Account (From) Phone # specified ' \ - '({}) is invalid.'.format(source) + msg = ( + f"The Account (From) Phone # specified ({source}) is invalid." + ) self.logger.warning(msg) raise TypeError(msg) # Tidy source - self.source = result['full'] + self.source = result["full"] # Define whether or not we should operate in a batch mode - self.batch = self.template_args['batch']['default'] \ - if batch is None else bool(batch) + self.batch = ( + self.template_args["batch"]["default"] + if batch is None + else bool(batch) + ) # Parse our targets - self.targets = list() + self.targets = [] has_error = False for target in parse_phone_no(targets): # Parse each phone number we found result = is_phone_no(target) if result: - self.targets.append(result['full']) + self.targets.append(result["full"]) continue has_error = True self.logger.warning( - 'Dropped invalid phone # ({}) specified.'.format(target), + f"Dropped invalid phone # ({target}) specified.", ) if not targets and not has_error: @@ -177,14 +184,12 @@ def __init__(self, source=None, targets=None, batch=None, **kwargs): return - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): - """ - Perform BulkVS Notification - """ + def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): + """Perform BulkVS Notification.""" if not self.targets: # We have nothing to notify - self.logger.warning('There are no BulkVS targets to notify') + self.logger.warning("There are no BulkVS targets to notify") return False # Send in batches if identified to do so @@ -195,45 +200,52 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): # Prepare our headers headers = { - 'User-Agent': self.app_id, - 'Accept': 'application/json', - 'Content-Type': 'application/json', + "User-Agent": self.app_id, + "Accept": "application/json", + "Content-Type": "application/json", } # Prepare our payload payload = { # The To gets populated in the loop below - 'From': self.source, - 'To': None, - 'Message': body, + "From": self.source, + "To": None, + "Message": body, } # Authentication auth = (self.user, self.password) # Prepare our targets - targets = list(self.targets) if batch_size == 1 else \ - [self.targets[index:index + batch_size] - for index in range(0, len(self.targets), batch_size)] + targets = ( + list(self.targets) + if batch_size == 1 + else [ + self.targets[index : index + batch_size] + for index in range(0, len(self.targets), batch_size) + ] + ) while len(targets): # Get our target to notify target = targets.pop(0) # Prepare our user - payload['To'] = target + payload["To"] = target # Printable reference if isinstance(target, list): - p_target = '{} targets'.format(len(target)) + p_target = f"{len(target)} targets" else: p_target = target # Some Debug Logging - self.logger.debug('BulkVS POST URL: {} (cert_verify={})'.format( - self.notify_url, self.verify_certificate)) - self.logger.debug('BulkVS Payload: {}' .format(payload)) + self.logger.debug( + "BulkVS POST URL:" + f" {self.notify_url} (cert_verify={self.verify_certificate})" + ) + self.logger.debug(f"BulkVS Payload: {payload}") # Always call throttle before any remote server i/o is made self.throttle() @@ -265,22 +277,25 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): # } if r.status_code != requests.codes.ok: # We had a problem - status_str = \ - NotifyBase.http_response_code_lookup(r.status_code) + status_str = NotifyBase.http_response_code_lookup( + r.status_code + ) # set up our status code to use status_code = r.status_code self.logger.warning( - 'Failed to send BulkVS notification to {}: ' - '{}{}error={}.'.format( + "Failed to send BulkVS notification to {}: " + "{}{}error={}.".format( p_target, status_str, - ', ' if status_str else '', - status_code)) + ", " if status_str else "", + status_code, + ) + ) self.logger.debug( - 'Response Details:\r\n{}'.format(r.content)) + "Response Details:\r\n%r", (r.content or b"")[:2000]) # Mark our failure has_error = True @@ -288,13 +303,15 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): else: self.logger.info( - 'Sent BulkVS notification to {}.'.format(p_target)) + f"Sent BulkVS notification to {p_target}." + ) except requests.RequestException as e: self.logger.warning( - 'A Connection error occurred sending BulkVS: to %s ', - p_target) - self.logger.debug('Socket Exception: %s' % str(e)) + "A Connection error occurred sending BulkVS: to %s ", + p_target, + ) + self.logger.debug(f"Socket Exception: {e!s}") # Mark our failure has_error = True @@ -304,46 +321,48 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): @property def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. """ return (self.secure_protocol, self.source, self.user, self.password) def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = { - 'batch': 'yes' if self.batch else 'no', + "batch": "yes" if self.batch else "no", } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # A nice way of cleaning up the URL length a bit - targets = [] if len(self.targets) == 1 \ - and self.targets[0] == self.source else self.targets - - return '{schema}://{user}:{password}@{source}/{targets}' \ - '?{params}'.format( + targets = ( + [] + if len(self.targets) == 1 and self.targets[0] == self.source + else self.targets + ) + + return ( + "{schema}://{user}:{password}@{source}/{targets}?{params}".format( schema=self.secure_protocol, source=self.source, - user=self.pprint(self.user, privacy, safe=''), + user=self.pprint(self.user, privacy, safe=""), password=self.pprint( - self.password, privacy, mode=PrivacyMode.Secret, safe=''), - targets='/'.join([ - NotifyBulkVS.quote('{}'.format(x), safe='+') - for x in targets]), - params=NotifyBulkVS.urlencode(params)) + self.password, privacy, mode=PrivacyMode.Secret, safe="" + ), + targets="/".join( + [NotifyBulkVS.quote(f"{x}", safe="+") for x in targets] + ), + params=NotifyBulkVS.urlencode(params), + ) + ) def __len__(self): - """ - Returns the number of targets associated with this notification - """ + """Returns the number of targets associated with this notification.""" # # Factor batch into calculation @@ -351,18 +370,16 @@ def __len__(self): batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if self.targets else 1 if batch_size > 1: - targets = int(targets / batch_size) + \ - (1 if targets % batch_size else 0) + targets = int(targets / batch_size) + ( + 1 if targets % batch_size else 0 + ) return targets @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results @@ -371,31 +388,34 @@ def parse_url(url): # Support the 'from' and 'source' variable so that we can support # targets this way too. # The 'from' makes it easier to use yaml configuration - if 'from' in results['qsd'] and len(results['qsd']['from']): - results['source'] = \ - NotifyBulkVS.unquote(results['qsd']['from']) + if "from" in results["qsd"] and len(results["qsd"]["from"]): + results["source"] = NotifyBulkVS.unquote(results["qsd"]["from"]) # hostname will also be a target in this case - results['targets'] = [ - *NotifyBulkVS.parse_phone_no(results['host']), - *NotifyBulkVS.split_path(results['fullpath'])] + results["targets"] = [ + *NotifyBulkVS.parse_phone_no(results["host"]), + *NotifyBulkVS.split_path(results["fullpath"]), + ] else: # store our source - results['source'] = NotifyBulkVS.unquote(results['host']) + results["source"] = NotifyBulkVS.unquote(results["host"]) # store targets - results['targets'] = NotifyBulkVS.split_path(results['fullpath']) + results["targets"] = NotifyBulkVS.split_path(results["fullpath"]) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration - if 'to' in results['qsd'] and len(results['qsd']['to']): - results['targets'] += \ - NotifyBulkVS.parse_phone_no(results['qsd']['to']) + if "to" in results["qsd"] and len(results["qsd"]["to"]): + results["targets"] += NotifyBulkVS.parse_phone_no( + results["qsd"]["to"] + ) # Get Batch Mode Flag - results['batch'] = \ - parse_bool(results['qsd'].get( - 'batch', NotifyBulkVS.template_args['batch']['default'])) + results["batch"] = parse_bool( + results["qsd"].get( + "batch", NotifyBulkVS.template_args["batch"]["default"] + ) + ) return results diff --git a/libs/apprise/plugins/burstsms.py b/libs/apprise/plugins/burstsms.py index 0cebddabbd..154366ffb8 100644 --- a/libs/apprise/plugins/burstsms.py +++ b/libs/apprise/plugins/burstsms.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -33,23 +32,27 @@ # import requests -from .base import NotifyBase -from ..url import PrivacyMode from ..common import NotifyType -from ..utils.parse import ( - is_phone_no, parse_phone_no, parse_bool, validate_regex) from ..locale import gettext_lazy as _ +from ..url import PrivacyMode +from ..utils.parse import ( + is_phone_no, + parse_bool, + parse_phone_no, + validate_regex, +) +from .base import NotifyBase class BurstSMSCountryCode: # Australia - AU = 'au' + AU = "au" # New Zeland - NZ = 'nz' + NZ = "nz" # United Kingdom - UK = 'gb' + UK = "gb" # United States - US = 'us' + US = "us" BURST_SMS_COUNTRY_CODES = ( @@ -61,18 +64,16 @@ class BurstSMSCountryCode: class NotifyBurstSMS(NotifyBase): - """ - A wrapper for Burst SMS Notifications - """ + """A wrapper for Burst SMS Notifications.""" # The default descriptive name associated with the Notification - service_name = 'Burst SMS' + service_name = "Burst SMS" # The services URL - service_url = 'https://burstsms.com/' + service_url = "https://burstsms.com/" # The default protocol - secure_protocol = 'burstsms' + secure_protocol = "burstsms" # The maximum amount of SMS Messages that can reside within a single # batch transfer based on: @@ -80,10 +81,10 @@ class NotifyBurstSMS(NotifyBase): default_batch_size = 500 # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_burst_sms' + setup_url = "https://appriseit.com/services/burstsms/" # Burst SMS uses the http protocol with JSON requests - notify_url = 'https://api.transmitsms.com/send-sms.json' + notify_url = "https://api.transmitsms.com/send-sms.json" # The maximum length of the body body_maxlen = 160 @@ -93,168 +94,176 @@ class NotifyBurstSMS(NotifyBase): title_maxlen = 0 # Define object templates - templates = ( - '{schema}://{apikey}:{secret}@{sender_id}/{targets}', - ) + templates = ("{schema}://{apikey}:{secret}@{sender_id}/{targets}",) # Define our template tokens - template_tokens = dict(NotifyBase.template_tokens, **{ - 'apikey': { - 'name': _('API Key'), - 'type': 'string', - 'required': True, - 'regex': (r'^[a-z0-9]+$', 'i'), - 'private': True, - }, - 'secret': { - 'name': _('API Secret'), - 'type': 'string', - 'private': True, - 'required': True, - 'regex': (r'^[a-z0-9]+$', 'i'), - }, - 'sender_id': { - 'name': _('Sender ID'), - 'type': 'string', - 'required': True, - 'map_to': 'source', + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "apikey": { + "name": _("API Key"), + "type": "string", + "required": True, + "regex": (r"^[a-z0-9]+$", "i"), + "private": True, + }, + "secret": { + "name": _("API Secret"), + "type": "string", + "private": True, + "required": True, + "regex": (r"^[a-z0-9]+$", "i"), + }, + "sender_id": { + "name": _("Sender ID"), + "type": "string", + "required": True, + "map_to": "source", + }, + "target_phone": { + "name": _("Target Phone No"), + "type": "string", + "prefix": "+", + "regex": (r"^[0-9\s)(+-]+$", "i"), + "map_to": "targets", + }, + "targets": { + "name": _("Targets"), + "type": "list:string", + "required": True, + }, }, - 'target_phone': { - 'name': _('Target Phone No'), - 'type': 'string', - 'prefix': '+', - 'regex': (r'^[0-9\s)(+-]+$', 'i'), - 'map_to': 'targets', - }, - 'targets': { - 'name': _('Targets'), - 'type': 'list:string', - 'required': True, - }, - }) + ) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'to': { - 'alias_of': 'targets', - }, - 'from': { - 'alias_of': 'sender_id', - }, - 'key': { - 'alias_of': 'apikey', + template_args = dict( + NotifyBase.template_args, + **{ + "to": { + "alias_of": "targets", + }, + "from": { + "alias_of": "sender_id", + }, + "key": { + "alias_of": "apikey", + }, + "secret": { + "alias_of": "secret", + }, + "country": { + "name": _("Country"), + "type": "choice:string", + "values": BURST_SMS_COUNTRY_CODES, + "default": BurstSMSCountryCode.US, + }, + # Validity + # Expire a message send if it is undeliverable (defined in minutes) + # If set to Zero (0); this is the default and sets the max validity + # period + "validity": {"name": _("validity"), "type": "int", "default": 0}, + "batch": { + "name": _("Batch Mode"), + "type": "bool", + "default": False, + }, }, - 'secret': { - 'alias_of': 'secret', - }, - 'country': { - 'name': _('Country'), - 'type': 'choice:string', - 'values': BURST_SMS_COUNTRY_CODES, - 'default': BurstSMSCountryCode.US, - }, - # Validity - # Expire a message send if it is undeliverable (defined in minutes) - # If set to Zero (0); this is the default and sets the max validity - # period - 'validity': { - 'name': _('validity'), - 'type': 'int', - 'default': 0 - }, - 'batch': { - 'name': _('Batch Mode'), - 'type': 'bool', - 'default': False, - }, - }) + ) - def __init__(self, apikey, secret, source, targets=None, country=None, - validity=None, batch=None, **kwargs): - """ - Initialize Burst SMS Object - """ + def __init__( + self, + apikey, + secret, + source, + targets=None, + country=None, + validity=None, + batch=None, + **kwargs, + ): + """Initialize Burst SMS Object.""" super().__init__(**kwargs) # API Key (associated with project) self.apikey = validate_regex( - apikey, *self.template_tokens['apikey']['regex']) + apikey, *self.template_tokens["apikey"]["regex"] + ) if not self.apikey: - msg = 'An invalid Burst SMS API Key ' \ - '({}) was specified.'.format(apikey) + msg = f"An invalid Burst SMS API Key ({apikey}) was specified." self.logger.warning(msg) raise TypeError(msg) # API Secret (associated with project) self.secret = validate_regex( - secret, *self.template_tokens['secret']['regex']) + secret, *self.template_tokens["secret"]["regex"] + ) if not self.secret: - msg = 'An invalid Burst SMS API Secret ' \ - '({}) was specified.'.format(secret) + msg = f"An invalid Burst SMS API Secret ({secret}) was specified." self.logger.warning(msg) raise TypeError(msg) if not country: - self.country = self.template_args['country']['default'] + self.country = self.template_args["country"]["default"] else: self.country = country.lower().strip() if country not in BURST_SMS_COUNTRY_CODES: - msg = 'An invalid Burst SMS country ' \ - '({}) was specified.'.format(country) + msg = ( + f"An invalid Burst SMS country ({country}) was specified." + ) self.logger.warning(msg) raise TypeError(msg) # Set our Validity - self.validity = self.template_args['validity']['default'] + self.validity = self.template_args["validity"]["default"] if validity: try: self.validity = int(validity) except (ValueError, TypeError): - msg = 'The Burst SMS Validity specified ({}) is invalid.'\ - .format(validity) + msg = ( + f"The Burst SMS Validity specified ({validity}) is" + " invalid." + ) self.logger.warning(msg) - raise TypeError(msg) + raise TypeError(msg) from None # Prepare Batch Mode Flag - self.batch = self.template_args['batch']['default'] \ - if batch is None else batch + self.batch = ( + self.template_args["batch"]["default"] if batch is None else batch + ) # The Sender ID self.source = validate_regex(source) if not self.source: - msg = 'The Account Sender ID specified ' \ - '({}) is invalid.'.format(source) + msg = f"The Account Sender ID specified ({source}) is invalid." self.logger.warning(msg) raise TypeError(msg) # Parse our targets - self.targets = list() + self.targets = [] for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = is_phone_no(target) if not result: self.logger.warning( - 'Dropped invalid phone # ' - '({}) specified.'.format(target), + f"Dropped invalid phone # ({target}) specified.", ) continue # store valid phone number - self.targets.append(result['full']) + self.targets.append(result["full"]) return - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): - """ - Perform Burst SMS Notification - """ + def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): + """Perform Burst SMS Notification.""" if not self.targets: self.logger.warning( - 'There are no valid Burst SMS targets to notify.') + "There are no valid Burst SMS targets to notify." + ) return False # error tracking (used for function return) @@ -262,8 +271,8 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): # Prepare our headers headers = { - 'User-Agent': self.app_id, - 'Accept': 'application/json', + "User-Agent": self.app_id, + "Accept": "application/json", } # Prepare our authentication @@ -271,14 +280,12 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): # Prepare our payload payload = { - 'countrycode': self.country, - 'message': body, - + "countrycode": self.country, + "message": body, # Sender ID - 'from': self.source, - + "from": self.source, # The to gets populated in the loop below - 'to': None, + "to": None, } # Send in batches if identified to do so @@ -290,12 +297,14 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): for index in range(0, len(targets), batch_size): # Prepare our user - payload['to'] = ','.join(self.targets[index:index + batch_size]) + payload["to"] = ",".join(self.targets[index : index + batch_size]) # Some Debug Logging - self.logger.debug('Burst SMS POST URL: {} (cert_verify={})'.format( - self.notify_url, self.verify_certificate)) - self.logger.debug('Burst SMS Payload: {}' .format(payload)) + self.logger.debug( + "Burst SMS POST URL:" + f" {self.notify_url} (cert_verify={self.verify_certificate})" + ) + self.logger.debug(f"Burst SMS Payload: {payload}") # Always call throttle before any remote server i/o is made self.throttle() @@ -312,20 +321,22 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): if r.status_code != requests.codes.ok: # We had a problem - status_str = \ - NotifyBurstSMS.http_response_code_lookup( - r.status_code) + status_str = NotifyBurstSMS.http_response_code_lookup( + r.status_code + ) self.logger.warning( - 'Failed to send Burst SMS notification to {} ' - 'target(s): {}{}error={}.'.format( - len(self.targets[index:index + batch_size]), + "Failed to send Burst SMS notification to {} " + "target(s): {}{}error={}.".format( + len(self.targets[index : index + batch_size]), status_str, - ', ' if status_str else '', - r.status_code)) + ", " if status_str else "", + r.status_code, + ) + ) self.logger.debug( - 'Response Details:\r\n{}'.format(r.content)) + "Response Details:\r\n%r", (r.content or b"")[:2000]) # Mark our failure has_error = True @@ -333,15 +344,19 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): else: self.logger.info( - 'Sent Burst SMS notification to %d target(s).' % - len(self.targets[index:index + batch_size])) + "Sent Burst SMS notification to " + f"{len(self.targets[index : index + batch_size])} " + "target(s)." + ) except requests.RequestException as e: self.logger.warning( - 'A Connection error occurred sending Burst SMS ' - 'notification to %d target(s).' % - len(self.targets[index:index + batch_size])) - self.logger.debug('Socket Exception: %s' % str(e)) + f"A Connection error occurred sending Burst SMS " + "notification to " + f"{len(self.targets[index : index + batch_size])} " + "target(s)." + ) + self.logger.debug(f"Socket Exception: {e!s}") # Mark our failure has_error = True @@ -350,118 +365,118 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): return not has_error def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = { - 'country': self.country, - 'batch': 'yes' if self.batch else 'no', + "country": self.country, + "batch": "yes" if self.batch else "no", } if self.validity: - params['validity'] = str(self.validity) + params["validity"] = str(self.validity) # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) - return '{schema}://{key}:{secret}@{source}/{targets}/?{params}'.format( + return "{schema}://{key}:{secret}@{source}/{targets}/?{params}".format( schema=self.secure_protocol, - key=self.pprint(self.apikey, privacy, safe=''), + key=self.pprint(self.apikey, privacy, safe=""), secret=self.pprint( - self.secret, privacy, mode=PrivacyMode.Secret, safe=''), - source=NotifyBurstSMS.quote(self.source, safe=''), - targets='/'.join( - [NotifyBurstSMS.quote(x, safe='') for x in self.targets]), - params=NotifyBurstSMS.urlencode(params)) + self.secret, privacy, mode=PrivacyMode.Secret, safe="" + ), + source=NotifyBurstSMS.quote(self.source, safe=""), + targets="/".join( + [NotifyBurstSMS.quote(x, safe="") for x in self.targets] + ), + params=NotifyBurstSMS.urlencode(params), + ) @property def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. """ return (self.secure_protocol, self.apikey, self.secret, self.source) def __len__(self): - """ - Returns the number of targets associated with this notification - """ + """Returns the number of targets associated with this notification.""" # # Factor batch into calculation # batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: - targets = int(targets / batch_size) + \ - (1 if targets % batch_size else 0) + targets = int(targets / batch_size) + ( + 1 if targets % batch_size else 0 + ) return targets if targets > 0 else 1 @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # The hostname is our source (Sender ID) - results['source'] = NotifyBurstSMS.unquote(results['host']) + results["source"] = NotifyBurstSMS.unquote(results["host"]) # Get any remaining targets - results['targets'] = NotifyBurstSMS.split_path(results['fullpath']) + results["targets"] = NotifyBurstSMS.split_path(results["fullpath"]) # Get our account_side and auth_token from the user/pass config - results['apikey'] = NotifyBurstSMS.unquote(results['user']) - results['secret'] = NotifyBurstSMS.unquote(results['password']) + results["apikey"] = NotifyBurstSMS.unquote(results["user"]) + results["secret"] = NotifyBurstSMS.unquote(results["password"]) # API Key - if 'key' in results['qsd'] and len(results['qsd']['key']): + if "key" in results["qsd"] and len(results["qsd"]["key"]): # Extract the API Key from an argument - results['apikey'] = \ - NotifyBurstSMS.unquote(results['qsd']['key']) + results["apikey"] = NotifyBurstSMS.unquote(results["qsd"]["key"]) # API Secret - if 'secret' in results['qsd'] and len(results['qsd']['secret']): + if "secret" in results["qsd"] and len(results["qsd"]["secret"]): # Extract the API Secret from an argument - results['secret'] = \ - NotifyBurstSMS.unquote(results['qsd']['secret']) + results["secret"] = NotifyBurstSMS.unquote( + results["qsd"]["secret"] + ) # Support the 'from' and 'source' variable so that we can support # targets this way too. # The 'from' makes it easier to use yaml configuration - if 'from' in results['qsd'] and len(results['qsd']['from']): - results['source'] = \ - NotifyBurstSMS.unquote(results['qsd']['from']) - if 'source' in results['qsd'] and len(results['qsd']['source']): - results['source'] = \ - NotifyBurstSMS.unquote(results['qsd']['source']) + if "from" in results["qsd"] and len(results["qsd"]["from"]): + results["source"] = NotifyBurstSMS.unquote(results["qsd"]["from"]) + if "source" in results["qsd"] and len(results["qsd"]["source"]): + results["source"] = NotifyBurstSMS.unquote( + results["qsd"]["source"] + ) # Support country - if 'country' in results['qsd'] and len(results['qsd']['country']): - results['country'] = \ - NotifyBurstSMS.unquote(results['qsd']['country']) + if "country" in results["qsd"] and len(results["qsd"]["country"]): + results["country"] = NotifyBurstSMS.unquote( + results["qsd"]["country"] + ) # Support validity value - if 'validity' in results['qsd'] and len(results['qsd']['validity']): - results['validity'] = \ - NotifyBurstSMS.unquote(results['qsd']['validity']) + if "validity" in results["qsd"] and len(results["qsd"]["validity"]): + results["validity"] = NotifyBurstSMS.unquote( + results["qsd"]["validity"] + ) # Get Batch Mode Flag - if 'batch' in results['qsd'] and len(results['qsd']['batch']): - results['batch'] = parse_bool(results['qsd']['batch']) + if "batch" in results["qsd"] and len(results["qsd"]["batch"]): + results["batch"] = parse_bool(results["qsd"]["batch"]) # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration - if 'to' in results['qsd'] and len(results['qsd']['to']): - results['targets'] += \ - NotifyBurstSMS.parse_phone_no(results['qsd']['to']) + if "to" in results["qsd"] and len(results["qsd"]["to"]): + results["targets"] += NotifyBurstSMS.parse_phone_no( + results["qsd"]["to"] + ) return results diff --git a/libs/apprise/plugins/chanify.py b/libs/apprise/plugins/chanify.py index caf314942f..70d3c62b64 100644 --- a/libs/apprise/plugins/chanify.py +++ b/libs/apprise/plugins/chanify.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -35,36 +34,32 @@ import requests -from .base import NotifyBase from ..common import NotifyType -from ..utils.parse import validate_regex from ..locale import gettext_lazy as _ +from ..utils.parse import validate_regex +from .base import NotifyBase class NotifyChanify(NotifyBase): - """ - A wrapper for Chanify Notifications - """ + """A wrapper for Chanify Notifications.""" # The default descriptive name associated with the Notification - service_name = _('Chanify') + service_name = _("Chanify") # The services URL - service_url = 'https://chanify.net/' + service_url = "https://chanify.net/" # The default secure protocol - secure_protocol = 'chanify' + secure_protocol = "chanify" # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_chanify' + setup_url = "https://appriseit.com/services/chanify/" # Notification URL - notify_url = 'https://api.chanify.net/v1/sender/{token}/' + notify_url = "https://api.chanify.net/v1/sender/{token}/" # Define object templates - templates = ( - '{schema}://{token}', - ) + templates = ("{schema}://{token}",) # The title is not used title_maxlen = 0 @@ -72,58 +67,60 @@ class NotifyChanify(NotifyBase): # Define our tokens; these are the minimum tokens required required to # be passed into this function (as arguments). The syntax appends any # previously defined in the base package and builds onto them - template_tokens = dict(NotifyBase.template_tokens, **{ - 'token': { - 'name': _('Token'), - 'type': 'string', - 'private': True, - 'required': True, - 'regex': (r'^[A-Z0-9_-]+$', 'i'), + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "token": { + "name": _("Token"), + "type": "string", + "private": True, + "required": True, + "regex": (r"^[A-Z0-9._-]+$", "i"), + }, }, - }) + ) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'token': { - 'alias_of': 'token', + template_args = dict( + NotifyBase.template_args, + **{ + "token": { + "alias_of": "token", + }, }, - }) + ) def __init__(self, token, **kwargs): - """ - Initialize Chanify Object - """ + """Initialize Chanify Object.""" super().__init__(**kwargs) self.token = validate_regex( - token, *self.template_tokens['token']['regex']) + token, *self.template_tokens["token"]["regex"] + ) if not self.token: - msg = 'The Chanify token specified ({}) is invalid.'\ - .format(token) + msg = f"The Chanify token specified ({token}) is invalid." self.logger.warning(msg) raise TypeError(msg) return - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): - """ - Send our notification - """ + def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): + """Send our notification.""" # prepare our headers headers = { - 'User-Agent': self.app_id, - 'Content-Type': 'application/x-www-form-urlencoded', + "User-Agent": self.app_id, + "Content-Type": "application/x-www-form-urlencoded", } # Our Message - payload = { - 'text': body - } + payload = {"text": body} - self.logger.debug('Chanify GET URL: %s (cert_verify=%r)' % ( - self.notify_url, self.verify_certificate)) - self.logger.debug('Chanify Payload: %s' % str(payload)) + self.logger.debug( + "Chanify GET URL:" + f" {self.notify_url} (cert_verify={self.verify_certificate!r})" + ) + self.logger.debug(f"Chanify Payload: {payload!s}") # Always call throttle before any remote server i/o is made self.throttle() @@ -138,29 +135,31 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): ) if r.status_code != requests.codes.ok: # We had a problem - status_str = \ - NotifyChanify.http_response_code_lookup(r.status_code) + status_str = NotifyChanify.http_response_code_lookup( + r.status_code + ) self.logger.warning( - 'Failed to send Chanify notification: ' - '{}{}error={}.'.format( - status_str, - ', ' if status_str else '', - r.status_code)) + "Failed to send Chanify notification: " + "{}{}error={}.".format( + status_str, ", " if status_str else "", r.status_code + ) + ) - self.logger.debug('Response Details:\r\n{}'.format(r.content)) + self.logger.debug( + "Response Details:\r\n%r", (r.content or b"")[:2000]) # Return; we're done return False else: - self.logger.info('Sent Chanify notification.') + self.logger.info("Sent Chanify notification.") except requests.RequestException as e: self.logger.warning( - 'A Connection error occurred sending Chanify ' - 'notification.') - self.logger.debug('Socket Exception: %s' % str(e)) + "A Connection error occurred sending Chanify notification." + ) + self.logger.debug(f"Socket Exception: {e!s}") # Return; we're done return False @@ -168,35 +167,30 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): return True def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Prepare our parameters params = self.url_parameters(privacy=privacy, *args, **kwargs) - return '{schema}://{token}/?{params}'.format( + return "{schema}://{token}/?{params}".format( schema=self.secure_protocol, - token=self.pprint(self.token, privacy, safe=''), + token=self.pprint(self.token, privacy, safe=""), params=NotifyChanify.urlencode(params), ) @property def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. """ return (self.secure_protocol, self.token) @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" # parse_url already handles getting the `user` and `password` fields # populated. @@ -206,10 +200,10 @@ def parse_url(url): return results # Allow over-ride - if 'token' in results['qsd'] and len(results['qsd']['token']): - results['token'] = NotifyChanify.unquote(results['qsd']['token']) + if "token" in results["qsd"] and len(results["qsd"]["token"]): + results["token"] = NotifyChanify.unquote(results["qsd"]["token"]) else: - results['token'] = NotifyChanify.unquote(results['host']) + results["token"] = NotifyChanify.unquote(results["host"]) return results diff --git a/libs/apprise/plugins/clickatell.py b/libs/apprise/plugins/clickatell.py new file mode 100644 index 0000000000..8233854a0e --- /dev/null +++ b/libs/apprise/plugins/clickatell.py @@ -0,0 +1,303 @@ +# BSD 2-Clause License +# +# Apprise - Push Notification Library. +# Copyright (c) 2026, Chris Caron +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +from itertools import chain + +# To use this service you will need a Clickatell account to which you can get +# your API_TOKEN at: +# https://www.clickatell.com/ +import requests + +from ..common import NotifyType +from ..locale import gettext_lazy as _ +from ..utils.parse import is_phone_no, parse_phone_no, validate_regex +from .base import NotifyBase + + +class NotifyClickatell(NotifyBase): + """A wrapper for Clickatell Notifications.""" + + # The default descriptive name associated with the Notification + service_name = _("Clickatell") + + # The services URL + service_url = "https://www.clickatell.com/" + + # All notification requests are secure + secure_protocol = "clickatell" + + # A URL that takes you to the setup/help of the specific protocol + setup_url = "https://appriseit.com/services/clickatell/" + + # Clickatell API Endpoint + notify_url = "https://platform.clickatell.com/messages/http/send" + + # A title can not be used for SMS Messages. Setting this to zero will + # cause any title (if defined) to get placed into the message body. + title_maxlen = 0 + + templates = ( + "{schema}://{apikey}/{targets}", + "{schema}://{source}@{apikey}/{targets}", + ) + + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "apikey": { + "name": _("API Token"), + "type": "string", + "private": True, + "required": True, + }, + "source": { + "name": _("From Phone No"), + "type": "string", + "regex": (r"^[0-9\s)(+-]+$", "i"), + }, + "target_phone": { + "name": _("Target Phone No"), + "type": "string", + "prefix": "+", + "regex": (r"^[0-9\s)(+-]+$", "i"), + "map_to": "targets", + }, + "targets": { + "name": _("Targets"), + "type": "list:string", + "required": True, + }, + }, + ) + + template_args = dict( + NotifyBase.template_args, + **{ + "apikey": {"alias_of": "apikey"}, + "to": { + "alias_of": "targets", + }, + "from": { + "alias_of": "source", + }, + }, + ) + + def __init__(self, apikey, source=None, targets=None, **kwargs): + """Initialize Clickatell Object.""" + + super().__init__(**kwargs) + + self.apikey = validate_regex(apikey) + if not self.apikey: + msg = f"An invalid Clickatell API Token ({apikey}) was specified." + self.logger.warning(msg) + raise TypeError(msg) + + self.source = None + if source: + result = is_phone_no(source) + if not result: + msg = ( + "The Account (From) Phone # specified " + f"({source}) is invalid." + ) + self.logger.warning(msg) + + raise TypeError(msg) + + # Tidy source + self.source = result["full"] + + # Used for URL generation afterwards only + self._invalid_targets = [] + + # Parse our targets + self.targets = [] + + for target in parse_phone_no(targets, prefix=True): + # Validate targets and drop bad ones: + result = is_phone_no(target) + if not result: + self.logger.warning( + f"Dropped invalid phone # ({target}) specified.", + ) + self._invalid_targets.append(target) + continue + + # store valid phone number + self.targets.append(result["full"]) + + @property + def url_identifier(self): + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. + """ + return (self.apikey, self.source) + + def url(self, privacy=False, *args, **kwargs): + """Returns the URL built dynamically based on specified arguments.""" + + params = self.url_parameters(privacy=privacy, *args, **kwargs) + + return "{schema}://{source}{apikey}/{targets}/?{params}".format( + schema=self.secure_protocol, + source=f"{self.source}@" if self.source else "", + apikey=self.pprint(self.apikey, privacy, safe="="), + targets="/".join([ + NotifyClickatell.quote(t, safe="") + for t in chain(self.targets, self._invalid_targets) + ]), + params=self.urlencode(params), + ) + + def __len__(self): + """Returns the number of targets associated with this notification. + + Always return 1 at least + """ + return len(self.targets) if self.targets else 1 + + def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): + """Perform Clickatell Notification.""" + + if not self.targets: + # There were no targets to notify + self.logger.warning("There were no Clickatell targets to notify") + return False + + headers = { + "User-Agent": self.app_id, + "Accept": "application/json", + "Content-Type": "application/json", + } + + params_base = { + "apiKey": self.apikey, + "from": self.source, + "content": body, + } + + # error tracking (used for function return) + has_error = False + + for target in self.targets: + params = params_base.copy() + params["to"] = target + + # Some Debug Logging + self.logger.debug( + "Clickatell GET URL:" + f" {self.notify_url} (cert_verify={self.verify_certificate})" + ) + self.logger.debug(f"Clickatell Payload: {params}") + + # Always call throttle before any remote server i/o is made + self.throttle() + + try: + r = requests.get( + self.notify_url, + params=params, + headers=headers, + verify=self.verify_certificate, + timeout=self.request_timeout, + ) + + if ( + r.status_code != requests.codes.ok + and r.status_code != requests.codes.accepted + ): + # We had a problem + status_str = self.http_response_code_lookup(r.status_code) + + self.logger.warning( + "Failed to send Clickatell notification: " + "{}{}error={}.".format( + status_str, + ", " if status_str else "", + r.status_code, + ) + ) + + self.logger.debug( + "Response Details:\r\n%r", (r.content or b"")[:2000]) + + # Mark our failure + has_error = True + continue + + else: + self.logger.info( + "Sent Clickatell notification to %s", target + ) + + except requests.RequestException as e: + self.logger.warning( + "A Connection error occurred sending Clickatell: to %s ", + target, + ) + self.logger.debug(f"Socket Exception: {e!s}") + # Mark our failure + has_error = True + continue + + return not has_error + + @staticmethod + def parse_url(url): + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" + results = NotifyBase.parse_url(url, verify_host=False) + if not results: + # We're done early as we couldn't parse the URL + return results + + results["targets"] = NotifyClickatell.split_path(results["fullpath"]) + results["apikey"] = NotifyClickatell.unquote(results["host"]) + + if results["user"]: + results["source"] = NotifyClickatell.unquote(results["user"]) + + # Support the 'to' variable so that we can support targets this way too + # The 'to' makes it easier to use yaml configuration + if "to" in results["qsd"] and len(results["qsd"]["to"]): + results["targets"] += NotifyClickatell.parse_phone_no( + results["qsd"]["to"] + ) + + # Support the 'from' and 'source' variable so that we can support + # targets this way too. + # The 'from' makes it easier to use yaml configuration + if "from" in results["qsd"] and len(results["qsd"]["from"]): + results["source"] = NotifyClickatell.unquote( + results["qsd"]["from"] + ) + + return results diff --git a/libs/apprise/plugins/clicksend.py b/libs/apprise/plugins/clicksend.py index f37bd283ae..cada8e690b 100644 --- a/libs/apprise/plugins/clicksend.py +++ b/libs/apprise/plugins/clicksend.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -39,40 +38,39 @@ # The API reference used to build this plugin was documented here: # https://developers.clicksend.com/docs/rest/v3/ # -import requests from json import dumps -from .base import NotifyBase -from ..url import PrivacyMode +import requests + from ..common import NotifyType -from ..utils.parse import is_phone_no, parse_phone_no, parse_bool from ..locale import gettext_lazy as _ +from ..url import PrivacyMode +from ..utils.parse import is_phone_no, parse_bool, parse_phone_no +from .base import NotifyBase # Extend HTTP Error Messages CLICKSEND_HTTP_ERROR_MAP = { - 401: 'Unauthorized - Invalid Token.', + 401: "Unauthorized - Invalid Token.", } class NotifyClickSend(NotifyBase): - """ - A wrapper for ClickSend Notifications - """ + """A wrapper for ClickSend Notifications.""" # The default descriptive name associated with the Notification - service_name = 'ClickSend' + service_name = "ClickSend" # The services URL - service_url = 'https://clicksend.com/' + service_url = "https://clicksend.com/" # The default secure protocol - secure_protocol = 'clicksend' + secure_protocol = "clicksend" # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_clicksend' + setup_url = "https://appriseit.com/services/clicksend/" # ClickSend uses the http protocol with JSON requests - notify_url = 'https://rest.clicksend.com/v3/sms/send' + notify_url = "https://rest.clicksend.com/v3/sms/send" # The maximum length of the body body_maxlen = 160 @@ -85,67 +83,69 @@ class NotifyClickSend(NotifyBase): default_batch_size = 1000 # Define object templates - templates = ( - '{schema}://{user}:{apikey}@{targets}', - ) + templates = ("{schema}://{user}:{apikey}@{targets}",) # Define our template tokens - template_tokens = dict(NotifyBase.template_tokens, **{ - 'user': { - 'name': _('User Name'), - 'type': 'string', - 'required': True, + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "user": { + "name": _("User Name"), + "type": "string", + "required": True, + }, + "apikey": { + "name": _("API Key"), + "type": "string", + "private": True, + "required": True, + "map_to": "password", + }, + "target_phone": { + "name": _("Target Phone No"), + "type": "string", + "prefix": "+", + "regex": (r"^[0-9\s)(+-]+$", "i"), + "map_to": "targets", + }, + "targets": { + "name": _("Targets"), + "type": "list:string", + "required": True, + }, }, - 'apikey': { - 'name': _('API Key'), - 'type': 'string', - 'private': True, - 'required': True, - 'map_to': 'password', - }, - 'target_phone': { - 'name': _('Target Phone No'), - 'type': 'string', - 'prefix': '+', - 'regex': (r'^[0-9\s)(+-]+$', 'i'), - 'map_to': 'targets', - }, - 'targets': { - 'name': _('Targets'), - 'type': 'list:string', - 'required': True, - }, - }) + ) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'to': { - 'alias_of': 'targets', + template_args = dict( + NotifyBase.template_args, + **{ + "to": { + "alias_of": "targets", + }, + "key": { + "alias_of": "apikey", + }, + "batch": { + "name": _("Batch Mode"), + "type": "bool", + "default": False, + }, }, - 'key': { - 'alias_of': 'apikey', - }, - 'batch': { - 'name': _('Batch Mode'), - 'type': 'bool', - 'default': False, - }, - }) + ) def __init__(self, targets=None, batch=False, **kwargs): - """ - Initialize ClickSend Object - """ + """Initialize ClickSend Object.""" super().__init__(**kwargs) # Prepare Batch Mode Flag self.batch = batch # Parse our targets - self.targets = list() + self.targets = [] if not (self.user and self.password): - msg = 'A ClickSend user/pass was not provided.' + msg = "A ClickSend user/pass was not provided." self.logger.warning(msg) raise TypeError(msg) @@ -154,51 +154,50 @@ def __init__(self, targets=None, batch=False, **kwargs): result = is_phone_no(target) if not result: self.logger.warning( - 'Dropped invalid phone # ' - '({}) specified.'.format(target), + f"Dropped invalid phone # ({target}) specified.", ) continue # store valid phone number - self.targets.append(result['full']) + self.targets.append(result["full"]) - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): - """ - Perform ClickSend Notification - """ + def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): + """Perform ClickSend Notification.""" if len(self.targets) == 0: # There were no services to notify - self.logger.warning('There were no ClickSend targets to notify.') + self.logger.warning("There were no ClickSend targets to notify.") return False headers = { - 'User-Agent': self.app_id, - 'Content-Type': 'application/json; charset=utf-8', + "User-Agent": self.app_id, + "Content-Type": "application/json; charset=utf-8", } # error tracking (used for function return) has_error = False # prepare JSON Object - payload = { - 'messages': [] - } + payload = {"messages": []} # Send in batches if identified to do so default_batch_size = 1 if not self.batch else self.default_batch_size for index in range(0, len(self.targets), default_batch_size): - payload['messages'] = [{ - 'source': 'php', - 'body': body, - 'to': '+{}'.format(to), - } for to in self.targets[index:index + default_batch_size]] - - self.logger.debug('ClickSend POST URL: %s (cert_verify=%r)' % ( - self.notify_url, self.verify_certificate, - )) - self.logger.debug('ClickSend Payload: %s' % str(payload)) + payload["messages"] = [ + { + "source": "php", + "body": body, + "to": f"+{to}", + } + for to in self.targets[index : index + default_batch_size] + ] + + self.logger.debug( + "ClickSend POST URL:" + f" {self.notify_url} (cert_verify={self.verify_certificate!r})" + ) + self.logger.debug(f"ClickSend Payload: {payload!s}") # Always call throttle before any remote server i/o is made self.throttle() @@ -213,22 +212,27 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): ) if r.status_code != requests.codes.ok: # We had a problem - status_str = \ - NotifyClickSend.http_response_code_lookup( - r.status_code, CLICKSEND_HTTP_ERROR_MAP) + status_str = NotifyClickSend.http_response_code_lookup( + r.status_code, CLICKSEND_HTTP_ERROR_MAP + ) self.logger.warning( - 'Failed to send {} ClickSend notification{}: ' - '{}{}error={}.'.format( - len(payload['messages']), - ' to {}'.format(self.targets[index]) - if default_batch_size == 1 else '(s)', + "Failed to send {} ClickSend notification{}: " + "{}{}error={}.".format( + len(payload["messages"]), + ( + f" to {self.targets[index]}" + if default_batch_size == 1 + else "(s)" + ), status_str, - ', ' if status_str else '', - r.status_code)) + ", " if status_str else "", + r.status_code, + ) + ) self.logger.debug( - 'Response Details:\r\n{}'.format(r.content)) + "Response Details:\r\n%r", (r.content or b"")[:2000]) # Mark our failure has_error = True @@ -236,18 +240,22 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): else: self.logger.info( - 'Sent {} ClickSend notification{}.' - .format( - len(payload['messages']), - ' to {}'.format(self.targets[index]) - if default_batch_size == 1 else '(s)', - )) + "Sent {} ClickSend notification{}.".format( + len(payload["messages"]), + ( + f" to {self.targets[index]}" + if default_batch_size == 1 + else "(s)" + ), + ) + ) except requests.RequestException as e: self.logger.warning( - 'A Connection error occurred sending {} ClickSend ' - 'notification(s).'.format(len(payload['messages']))) - self.logger.debug('Socket Exception: %s' % str(e)) + "A Connection error occurred sending {} ClickSend " + "notification(s).".format(len(payload["messages"])) + ) + self.logger.debug(f"Socket Exception: {e!s}") # Mark our failure has_error = True @@ -256,90 +264,88 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): return not has_error def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = { - 'batch': 'yes' if self.batch else 'no', + "batch": "yes" if self.batch else "no", } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Setup Authentication - auth = '{user}:{password}@'.format( - user=NotifyClickSend.quote(self.user, safe=''), + auth = "{user}:{password}@".format( + user=NotifyClickSend.quote(self.user, safe=""), password=self.pprint( - self.password, privacy, mode=PrivacyMode.Secret, safe=''), + self.password, privacy, mode=PrivacyMode.Secret, safe="" + ), ) - return '{schema}://{auth}{targets}?{params}'.format( + return "{schema}://{auth}{targets}?{params}".format( schema=self.secure_protocol, auth=auth, - targets='/'.join( - [NotifyClickSend.quote(x, safe='') for x in self.targets]), + targets="/".join( + [NotifyClickSend.quote(x, safe="") for x in self.targets] + ), params=NotifyClickSend.urlencode(params), ) @property def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. """ return (self.secure_protocol, self.user, self.password) def __len__(self): - """ - Returns the number of targets associated with this notification - """ + """Returns the number of targets associated with this notification.""" # # Factor batch into calculation # batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: - targets = int(targets / batch_size) + \ - (1 if targets % batch_size else 0) + targets = int(targets / batch_size) + ( + 1 if targets % batch_size else 0 + ) return targets @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # All elements are targets - results['targets'] = [NotifyClickSend.unquote(results['host'])] + results["targets"] = [NotifyClickSend.unquote(results["host"])] # All entries after the hostname are additional targets - results['targets'].extend( - NotifyClickSend.split_path(results['fullpath'])) + results["targets"].extend( + NotifyClickSend.split_path(results["fullpath"]) + ) # Get Batch Mode Flag - results['batch'] = \ - parse_bool(results['qsd'].get('batch', False)) + results["batch"] = parse_bool(results["qsd"].get("batch", False)) # API Key - if 'key' in results['qsd'] and len(results['qsd']['key']): + if "key" in results["qsd"] and len(results["qsd"]["key"]): # Extract the API Key from an argument - results['password'] = \ - NotifyClickSend.unquote(results['qsd']['key']) + results["password"] = NotifyClickSend.unquote( + results["qsd"]["key"] + ) # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration - if 'to' in results['qsd'] and len(results['qsd']['to']): - results['targets'] += \ - NotifyClickSend.parse_phone_no(results['qsd']['to']) + if "to" in results["qsd"] and len(results["qsd"]["to"]): + results["targets"] += NotifyClickSend.parse_phone_no( + results["qsd"]["to"] + ) return results diff --git a/libs/apprise/plugins/custom_form.py b/libs/apprise/plugins/custom_form.py index 0a1ef96a45..5cae8c8f26 100644 --- a/libs/apprise/plugins/custom_form.py +++ b/libs/apprise/plugins/custom_form.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -27,40 +26,30 @@ # POSSIBILITY OF SUCH DAMAGE. import re + import requests -from .base import NotifyBase -from ..url import PrivacyMode -from ..common import NotifyImageSize -from ..common import NotifyType +from ..common import NotifyImageSize, NotifyType from ..locale import gettext_lazy as _ +from ..url import PrivacyMode +from .base import NotifyBase class FORMPayloadField: - """ - Identifies the fields available in the FORM Payload - """ - VERSION = 'version' - TITLE = 'title' - MESSAGE = 'message' - MESSAGETYPE = 'type' + """Identifies the fields available in the FORM Payload.""" + + VERSION = "version" + TITLE = "title" + MESSAGE = "message" + MESSAGETYPE = "type" # Defines the method to send the notification -METHODS = ( - 'POST', - 'GET', - 'DELETE', - 'PUT', - 'HEAD', - 'PATCH' -) +METHODS = ("POST", "GET", "DELETE", "PUT", "HEAD", "PATCH") class NotifyForm(NotifyBase): - """ - A wrapper for Form Notifications - """ + """A wrapper for Form Notifications.""" # Support # - file* @@ -72,28 +61,29 @@ class NotifyForm(NotifyBase): # - file # The code will convert the ? or * to the digit increments __attach_as_re = re.compile( - r'((?P(?P[a-z0-9_-]+)?' - r'(?P[*?+$:.%]+)(?P[a-z0-9_-]+))' - r'|(?P(?P[a-z0-9_-]+)(?P[*?+$:.%]?)))', - re.IGNORECASE) + r"((?P(?P[a-z0-9_-]+)?" + r"(?P[*?+$:.%]+)(?P[a-z0-9_-]+))" + r"|(?P(?P[a-z0-9_-]+)(?P[*?+$:.%]?)))", + re.IGNORECASE, + ) # Our count - attach_as_count = '{:02d}' + attach_as_count = "{:02d}" # the default attach_as value - attach_as_default = f'file{attach_as_count}' + attach_as_default = f"file{attach_as_count}" # The default descriptive name associated with the Notification - service_name = 'Form' + service_name = "Form" # The default protocol - protocol = 'form' + protocol = "form" # The default secure protocol - secure_protocol = 'forms' + secure_protocol = "forms" # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_Custom_Form' + setup_url = "https://appriseit.com/services/form/" # Support attachments attachment_support = True @@ -109,97 +99,110 @@ class NotifyForm(NotifyBase): # Version: Major.Minor, Major is only updated if the entire schema is # changed. If just adding new items (or removing old ones, only increment # the Minor! - form_version = '1.0' + form_version = "1.0" # Define object templates templates = ( - '{schema}://{host}', - '{schema}://{host}:{port}', - '{schema}://{user}@{host}', - '{schema}://{user}@{host}:{port}', - '{schema}://{user}:{password}@{host}', - '{schema}://{user}:{password}@{host}:{port}', + "{schema}://{host}", + "{schema}://{host}:{port}", + "{schema}://{user}@{host}", + "{schema}://{user}@{host}:{port}", + "{schema}://{user}:{password}@{host}", + "{schema}://{user}:{password}@{host}:{port}", ) # Define our tokens; these are the minimum tokens required required to # be passed into this function (as arguments). The syntax appends any # previously defined in the base package and builds onto them - template_tokens = dict(NotifyBase.template_tokens, **{ - 'host': { - 'name': _('Hostname'), - 'type': 'string', - 'required': True, - }, - 'port': { - 'name': _('Port'), - 'type': 'int', - 'min': 1, - 'max': 65535, - }, - 'user': { - 'name': _('Username'), - 'type': 'string', + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "host": { + "name": _("Hostname"), + "type": "string", + "required": True, + }, + "port": { + "name": _("Port"), + "type": "int", + "min": 1, + "max": 65535, + }, + "user": { + "name": _("Username"), + "type": "string", + }, + "password": { + "name": _("Password"), + "type": "string", + "private": True, + }, }, - 'password': { - 'name': _('Password'), - 'type': 'string', - 'private': True, - }, - - }) + ) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'method': { - 'name': _('Fetch Method'), - 'type': 'choice:string', - 'values': METHODS, - 'default': METHODS[0], - }, - 'attach-as': { - 'name': _('Attach File As'), - 'type': 'string', - 'default': 'file*', - 'map_to': 'attach_as', + template_args = dict( + NotifyBase.template_args, + **{ + "method": { + "name": _("Fetch Method"), + "type": "choice:string", + "values": METHODS, + "default": METHODS[0], + }, + "attach-as": { + "name": _("Attach File As"), + "type": "string", + "default": "file*", + "map_to": "attach_as", + }, }, - }) + ) # Define any kwargs we're using template_kwargs = { - 'headers': { - 'name': _('HTTP Header'), - 'prefix': '+', + "headers": { + "name": _("HTTP Header"), + "prefix": "+", }, - 'payload': { - 'name': _('Payload Extras'), - 'prefix': ':', + "payload": { + "name": _("Payload Extras"), + "prefix": ":", }, - 'params': { - 'name': _('GET Params'), - 'prefix': '-', + "params": { + "name": _("GET Params"), + "prefix": "-", }, } - def __init__(self, headers=None, method=None, payload=None, params=None, - attach_as=None, **kwargs): - """ - Initialize Form Object + def __init__( + self, + headers=None, + method=None, + payload=None, + params=None, + attach_as=None, + **kwargs, + ): + """Initialize Form Object. headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with - """ super().__init__(**kwargs) - self.fullpath = kwargs.get('fullpath') + self.fullpath = kwargs.get("fullpath") if not isinstance(self.fullpath, str): - self.fullpath = '' + self.fullpath = "" - self.method = self.template_args['method']['default'] \ - if not isinstance(method, str) else method.upper() + self.method = ( + self.template_args["method"]["default"] + if not isinstance(method, str) + else method.upper() + ) if self.method not in METHODS: - msg = 'The method specified ({}) is invalid.'.format(method) + msg = f"The method specified ({method}) is invalid." self.logger.warning(msg) raise TypeError(msg) @@ -212,24 +215,23 @@ def __init__(self, headers=None, method=None, payload=None, params=None, else: result = self.__attach_as_re.match(attach_as.strip()) if not result: - msg = 'The attach-as specified ({}) is invalid.'.format( - attach_as) + msg = f"The attach-as specified ({attach_as}) is invalid." self.logger.warning(msg) raise TypeError(msg) - self.attach_as = '' + self.attach_as = "" self.attach_multi_support = False - if result.group('match1'): - if result.group('id1a'): - self.attach_as += result.group('id1a') + if result.group("match1"): + if result.group("id1a"): + self.attach_as += result.group("id1a") self.attach_as += self.attach_as_count self.attach_multi_support = True - self.attach_as += result.group('id1b') + self.attach_as += result.group("id1b") else: # result.group('match2'): - self.attach_as += result.group('id2') - if result.group('wc2'): + self.attach_as += result.group("id2") + if result.group("wc2"): self.attach_as += self.attach_as_count self.attach_multi_support = True @@ -272,15 +274,19 @@ def __init__(self, headers=None, method=None, payload=None, params=None, return - def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, - **kwargs): - """ - Perform Form Notification - """ + def send( + self, + body, + title="", + notify_type=NotifyType.INFO, + attach=None, + **kwargs, + ): + """Perform Form Notification.""" # Prepare HTTP Headers headers = { - 'User-Agent': self.app_id, + "User-Agent": self.app_id, } # Apply any/all header over-rides defined @@ -294,40 +300,55 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, if not attachment: # We could not access the attachment self.logger.error( - 'Could not access attachment {}.'.format( - attachment.url(privacy=True))) + "Could not access attachment" + f" {attachment.url(privacy=True)}." + ) return False try: files.append(( - self.attach_as.format(no) - if self.attach_multi_support else self.attach_as, ( - attachment.name - if attachment.name else f'file{no:03}.dat', - open(attachment.path, 'rb'), - attachment.mimetype) + ( + self.attach_as.format(no) + if self.attach_multi_support + else self.attach_as + ), + ( + ( + attachment.name + if attachment.name + else f"file{no:03}.dat" + ), + # file handle is safely closed in `finally`; inline + # open is intentional + open(attachment.path, "rb"), # noqa: SIM115 + attachment.mimetype, + ), )) - except (OSError, IOError) as e: + except OSError as e: self.logger.warning( - 'An I/O error occurred while opening {}.'.format( - attachment.name if attachment else 'attachment')) - self.logger.debug('I/O Exception: %s' % str(e)) + "An I/O error occurred while opening {}.".format( + attachment.name if attachment else "attachment" + ) + ) + self.logger.debug(f"I/O Exception: {e!s}") return False if not self.attach_multi_support and no > 1: self.logger.warning( - 'Multiple attachments provided while ' - 'form:// Multi-Attachment Support not enabled') + "Multiple attachments provided while " + "form:// Multi-Attachment Support not enabled" + ) # prepare Form Object payload = {} for key, value in ( - (FORMPayloadField.VERSION, self.form_version), - (FORMPayloadField.TITLE, title), - (FORMPayloadField.MESSAGE, body), - (FORMPayloadField.MESSAGETYPE, notify_type)): + (FORMPayloadField.VERSION, self.form_version), + (FORMPayloadField.TITLE, title), + (FORMPayloadField.MESSAGE, body), + (FORMPayloadField.MESSAGETYPE, notify_type.value), + ): if not self.payload_map[key]: # Do not store element in payload response @@ -342,36 +363,37 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, auth = (self.user, self.password) # Set our schema - schema = 'https' if self.secure else 'http' + schema = "https" if self.secure else "http" - url = '%s://%s' % (schema, self.host) + url = f"{schema}://{self.host}" if isinstance(self.port, int): - url += ':%d' % self.port + url += f":{self.port}" url += self.fullpath - self.logger.debug('Form %s URL: %s (cert_verify=%r)' % ( - self.method, url, self.verify_certificate, - )) - self.logger.debug('Form Payload: %s' % str(payload)) + self.logger.debug( + f"Form {self.method} URL:" + f" {url} (cert_verify={self.verify_certificate!r})" + ) + self.logger.debug(f"Form Payload: {payload!s}") # Always call throttle before any remote server i/o is made self.throttle() - if self.method == 'GET': + if self.method == "GET": method = requests.get payload.update(self.params) - elif self.method == 'PUT': + elif self.method == "PUT": method = requests.put - elif self.method == 'PATCH': + elif self.method == "PATCH": method = requests.patch - elif self.method == 'DELETE': + elif self.method == "DELETE": method = requests.delete - elif self.method == 'HEAD': + elif self.method == "HEAD": method = requests.head else: # POST @@ -380,9 +402,9 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, try: r = method( url, - files=None if not files else files, - data=payload if self.method != 'GET' else None, - params=payload if self.method == 'GET' else self.params, + files=files if files else None, + data=payload if self.method != "GET" else None, + params=payload if self.method == "GET" else self.params, headers=headers, auth=auth, verify=self.verify_certificate, @@ -390,38 +412,43 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, ) if r.status_code < 200 or r.status_code >= 300: # We had a problem - status_str = \ - NotifyForm.http_response_code_lookup(r.status_code) + status_str = NotifyForm.http_response_code_lookup( + r.status_code + ) self.logger.warning( - 'Failed to send Form %s notification: %s%serror=%s.', + "Failed to send Form %s notification: %s%serror=%s.", self.method, status_str, - ', ' if status_str else '', - str(r.status_code)) + ", " if status_str else "", + r.status_code, + ) - self.logger.debug('Response Details:\r\n{}'.format(r.content)) + self.logger.debug( + "Response Details:\r\n%r", (r.content or b"")[:2000]) # Return; we're done return False else: - self.logger.info('Sent Form %s notification.', self.method) + self.logger.info("Sent Form %s notification.", self.method) except requests.RequestException as e: self.logger.warning( - 'A Connection error occurred sending Form ' - 'notification to %s.' % self.host) - self.logger.debug('Socket Exception: %s' % str(e)) + "A Connection error occurred sending Form " + f"notification to {self.host}." + ) + self.logger.debug(f"Socket Exception: {e!s}") # Return; we're done return False - except (OSError, IOError) as e: + except OSError as e: self.logger.warning( - 'An I/O error occurred while reading one of the ' - 'attached files.') - self.logger.debug('I/O Exception: %s' % str(e)) + "An I/O error occurred while reading one of the " + "attached files." + ) + self.logger.debug(f"I/O Exception: {e!s}") return False finally: @@ -433,107 +460,114 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, @property def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, - self.user, self.password, self.host, + self.user, + self.password, + self.host, self.port if self.port else (443 if self.secure else 80), - self.fullpath.rstrip('/'), + self.fullpath.rstrip("/"), ) def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = { - 'method': self.method, + "method": self.method, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Append our headers into our parameters - params.update({'+{}'.format(k): v for k, v in self.headers.items()}) + params.update({f"+{k}": v for k, v in self.headers.items()}) # Append our GET params into our parameters - params.update({'-{}'.format(k): v for k, v in self.params.items()}) + params.update({f"-{k}": v for k, v in self.params.items()}) # Append our payload extra's into our parameters - params.update( - {':{}'.format(k): v for k, v in self.payload_extras.items()}) - params.update( - {':{}'.format(k): v for k, v in self.payload_overrides.items()}) + params.update({f":{k}": v for k, v in self.payload_extras.items()}) + params.update({f":{k}": v for k, v in self.payload_overrides.items()}) if self.attach_as != self.attach_as_default: # Provide Attach-As extension details - params['attach-as'] = self.attach_as + params["attach-as"] = self.attach_as # Determine Authentication - auth = '' + auth = "" if self.user and self.password: - auth = '{user}:{password}@'.format( - user=NotifyForm.quote(self.user, safe=''), + auth = "{user}:{password}@".format( + user=NotifyForm.quote(self.user, safe=""), password=self.pprint( - self.password, privacy, mode=PrivacyMode.Secret, safe=''), + self.password, privacy, mode=PrivacyMode.Secret, safe="" + ), ) elif self.user: - auth = '{user}@'.format( - user=NotifyForm.quote(self.user, safe=''), + auth = "{user}@".format( + user=NotifyForm.quote(self.user, safe=""), ) default_port = 443 if self.secure else 80 - - return '{schema}://{auth}{hostname}{port}{fullpath}?{params}'.format( + return "{schema}://{auth}{hostname}{port}{fullpath}?{params}".format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, - port='' if self.port is None or self.port == default_port - else ':{}'.format(self.port), - fullpath=NotifyForm.quote(self.fullpath, safe='/') - if self.fullpath else '/', + port=( + "" + if self.port is None or self.port == default_port + else f":{self.port}" + ), + fullpath=( + NotifyForm.quote(self.fullpath, safe="/") + if self.fullpath + else "/" + ), params=NotifyForm.urlencode(params), ) @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # store any additional payload extra's defined - results['payload'] = {NotifyForm.unquote(x): NotifyForm.unquote(y) - for x, y in results['qsd:'].items()} + results["payload"] = { + NotifyForm.unquote(x): NotifyForm.unquote(y) + for x, y in results["qsd:"].items() + } # Add our headers that the user can potentially over-ride if they wish # to to our returned result set and tidy entries by unquoting them - results['headers'] = {NotifyForm.unquote(x): NotifyForm.unquote(y) - for x, y in results['qsd+'].items()} + results["headers"] = { + NotifyForm.unquote(x): NotifyForm.unquote(y) + for x, y in results["qsd+"].items() + } # Add our GET paramters in the event the user wants to pass these along - results['params'] = {NotifyForm.unquote(x): NotifyForm.unquote(y) - for x, y in results['qsd-'].items()} + results["params"] = { + NotifyForm.unquote(x): NotifyForm.unquote(y) + for x, y in results["qsd-"].items() + } # Allow Attach-As Support which over-rides the name of the filename # posted with the form:// # the default is file01, file02, file03, etc - if 'attach-as' in results['qsd'] and len(results['qsd']['attach-as']): - results['attach_as'] = results['qsd']['attach-as'] + if "attach-as" in results["qsd"] and len(results["qsd"]["attach-as"]): + results["attach_as"] = results["qsd"]["attach-as"] # Set method if not otherwise set - if 'method' in results['qsd'] and len(results['qsd']['method']): - results['method'] = NotifyForm.unquote(results['qsd']['method']) + if "method" in results["qsd"] and len(results["qsd"]["method"]): + results["method"] = NotifyForm.unquote(results["qsd"]["method"]) return results diff --git a/libs/apprise/plugins/custom_json.py b/libs/apprise/plugins/custom_json.py index bd232f6e95..3a7c8dc162 100644 --- a/libs/apprise/plugins/custom_json.py +++ b/libs/apprise/plugins/custom_json.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,55 +25,47 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -import requests from json import dumps +import logging + +import requests from .. import exception -from .base import NotifyBase -from ..url import PrivacyMode -from ..common import NotifyImageSize -from ..common import NotifyType +from ..common import NotifyImageSize, NotifyType from ..locale import gettext_lazy as _ +from ..url import PrivacyMode +from ..utils.sanitize import sanitize_payload +from .base import NotifyBase class JSONPayloadField: - """ - Identifies the fields available in the JSON Payload - """ - VERSION = 'version' - TITLE = 'title' - MESSAGE = 'message' - ATTACHMENTS = 'attachments' - MESSAGETYPE = 'type' + """Identifies the fields available in the JSON Payload.""" + + VERSION = "version" + TITLE = "title" + MESSAGE = "message" + ATTACHMENTS = "attachments" + MESSAGETYPE = "type" # Defines the method to send the notification -METHODS = ( - 'POST', - 'GET', - 'DELETE', - 'PUT', - 'HEAD', - 'PATCH' -) +METHODS = ("POST", "GET", "DELETE", "PUT", "HEAD", "PATCH") class NotifyJSON(NotifyBase): - """ - A wrapper for JSON Notifications - """ + """A wrapper for JSON Notifications.""" # The default descriptive name associated with the Notification - service_name = 'JSON' + service_name = "JSON" # The default protocol - protocol = 'json' + protocol = "json" # The default secure protocol - secure_protocol = 'jsons' + secure_protocol = "jsons" # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_Custom_JSON' + setup_url = "https://appriseit.com/services/json/" # Support attachments attachment_support = True @@ -90,91 +81,98 @@ class NotifyJSON(NotifyBase): # Version: Major.Minor, Major is only updated if the entire schema is # changed. If just adding new items (or removing old ones, only increment # the Minor! - json_version = '1.0' + json_version = "1.0" # Define object templates templates = ( - '{schema}://{host}', - '{schema}://{host}:{port}', - '{schema}://{user}@{host}', - '{schema}://{user}@{host}:{port}', - '{schema}://{user}:{password}@{host}', - '{schema}://{user}:{password}@{host}:{port}', + "{schema}://{host}", + "{schema}://{host}:{port}", + "{schema}://{user}@{host}", + "{schema}://{user}@{host}:{port}", + "{schema}://{user}:{password}@{host}", + "{schema}://{user}:{password}@{host}:{port}", ) # Define our tokens; these are the minimum tokens required required to # be passed into this function (as arguments). The syntax appends any # previously defined in the base package and builds onto them - template_tokens = dict(NotifyBase.template_tokens, **{ - 'host': { - 'name': _('Hostname'), - 'type': 'string', - 'required': True, - }, - 'port': { - 'name': _('Port'), - 'type': 'int', - 'min': 1, - 'max': 65535, + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "host": { + "name": _("Hostname"), + "type": "string", + "required": True, + }, + "port": { + "name": _("Port"), + "type": "int", + "min": 1, + "max": 65535, + }, + "user": { + "name": _("Username"), + "type": "string", + }, + "password": { + "name": _("Password"), + "type": "string", + "private": True, + }, }, - 'user': { - 'name': _('Username'), - 'type': 'string', - }, - 'password': { - 'name': _('Password'), - 'type': 'string', - 'private': True, - }, - - }) + ) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'method': { - 'name': _('Fetch Method'), - 'type': 'choice:string', - 'values': METHODS, - 'default': METHODS[0], + template_args = dict( + NotifyBase.template_args, + **{ + "method": { + "name": _("Fetch Method"), + "type": "choice:string", + "values": METHODS, + "default": METHODS[0], + }, }, - }) + ) # Define any kwargs we're using template_kwargs = { - 'headers': { - 'name': _('HTTP Header'), - 'prefix': '+', + "headers": { + "name": _("HTTP Header"), + "prefix": "+", }, - 'payload': { - 'name': _('Payload Extras'), - 'prefix': ':', + "payload": { + "name": _("Payload Extras"), + "prefix": ":", }, - 'params': { - 'name': _('GET Params'), - 'prefix': '-', + "params": { + "name": _("GET Params"), + "prefix": "-", }, } - def __init__(self, headers=None, method=None, payload=None, params=None, - **kwargs): - """ - Initialize JSON Object + def __init__( + self, headers=None, method=None, payload=None, params=None, **kwargs + ): + """Initialize JSON Object. headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with - """ super().__init__(**kwargs) - self.fullpath = kwargs.get('fullpath') + self.fullpath = kwargs.get("fullpath") if not isinstance(self.fullpath, str): - self.fullpath = '' + self.fullpath = "" - self.method = self.template_args['method']['default'] \ - if not isinstance(method, str) else method.upper() + self.method = ( + self.template_args["method"]["default"] + if not isinstance(method, str) + else method.upper() + ) if self.method not in METHODS: - msg = 'The method specified ({}) is invalid.'.format(method) + msg = f"The method specified ({method}) is invalid." self.logger.warning(msg) raise TypeError(msg) @@ -195,16 +193,20 @@ def __init__(self, headers=None, method=None, payload=None, params=None, return - def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, - **kwargs): - """ - Perform JSON Notification - """ + def send( + self, + body, + title="", + notify_type=NotifyType.INFO, + attach=None, + **kwargs, + ): + """Perform JSON Notification.""" # Prepare HTTP Headers headers = { - 'User-Agent': self.app_id, - 'Content-Type': 'application/json' + "User-Agent": self.app_id, + "Content-Type": "application/json", } # Apply any/all header over-rides defined @@ -218,28 +220,34 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, if not attachment: # We could not access the attachment self.logger.error( - 'Could not access Custom JSON attachment {}.'.format( - attachment.url(privacy=True))) + "Could not access Custom JSON attachment" + f" {attachment.url(privacy=True)}." + ) return False try: attachments.append({ - "filename": attachment.name - if attachment.name else f'file{no:03}.dat', - 'base64': attachment.base64(), - 'mimetype': attachment.mimetype, + "filename": ( + attachment.name + if attachment.name + else f"file{no:03}.dat" + ), + "base64": attachment.base64(), + "mimetype": attachment.mimetype, }) except exception.AppriseException: # We could not access the attachment self.logger.error( - 'Could not access Custom JSON attachment {}.'.format( - attachment.url(privacy=True))) + "Could not access Custom JSON attachment" + f" {attachment.url(privacy=True)}." + ) return False self.logger.debug( - 'Appending Custom JSON attachment {}'.format( - attachment.url(privacy=True))) + "Appending Custom JSON attachment" + f" {attachment.url(privacy=True)}" + ) # Prepare JSON Object payload = { @@ -247,7 +255,7 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, JSONPayloadField.TITLE: title, JSONPayloadField.MESSAGE: body, JSONPayloadField.ATTACHMENTS: attachments, - JSONPayloadField.MESSAGETYPE: notify_type, + JSONPayloadField.MESSAGETYPE: notify_type.value, } for key, value in self.payload_extras.items(): @@ -271,35 +279,41 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, auth = (self.user, self.password) # Set our schema - schema = 'https' if self.secure else 'http' + schema = "https" if self.secure else "http" - url = '%s://%s' % (schema, self.host) + url = f"{schema}://{self.host}" if isinstance(self.port, int): - url += ':%d' % self.port + url += f":{self.port}" url += self.fullpath - self.logger.debug('JSON POST URL: %s (cert_verify=%r)' % ( - url, self.verify_certificate, - )) - self.logger.debug('JSON Payload: %s' % str(payload)) + # Some Debug Logging + if self.logger.isEnabledFor(logging.DEBUG): + # Due to attachments; output can be quite heavy and io intensive + # To accommodate this, we only show our debug payload information + # if required. + self.logger.debug( + f"JSON POST URL: {url} " + f"(cert_verify={self.verify_certificate!r})" + ) + self.logger.debug("JSON Payload: %s", sanitize_payload(payload)) # Always call throttle before any remote server i/o is made self.throttle() - if self.method == 'GET': + if self.method == "GET": method = requests.get - elif self.method == 'PUT': + elif self.method == "PUT": method = requests.put - elif self.method == 'PATCH': + elif self.method == "PATCH": method = requests.patch - elif self.method == 'DELETE': + elif self.method == "DELETE": method = requests.delete - elif self.method == 'HEAD': + elif self.method == "HEAD": method = requests.head else: # POST @@ -317,29 +331,33 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, ) if r.status_code < 200 or r.status_code >= 300: # We had a problem - status_str = \ - NotifyJSON.http_response_code_lookup(r.status_code) + status_str = NotifyJSON.http_response_code_lookup( + r.status_code + ) self.logger.warning( - 'Failed to send JSON %s notification: %s%serror=%s.', + "Failed to send JSON %s notification: %s%serror=%s.", self.method, status_str, - ', ' if status_str else '', - str(r.status_code)) + ", " if status_str else "", + r.status_code, + ) - self.logger.debug('Response Details:\r\n{}'.format(r.content)) + self.logger.debug( + "Response Details:\r\n%r", (r.content or b"")[:2000]) # Return; we're done return False else: - self.logger.info('Sent JSON %s notification.', self.method) + self.logger.info("Sent JSON %s notification.", self.method) except requests.RequestException as e: self.logger.warning( - 'A Connection error occurred sending JSON ' - 'notification to %s.' % self.host) - self.logger.debug('Socket Exception: %s' % str(e)) + "A Connection error occurred sending JSON " + f"notification to {self.host}." + ) + self.logger.debug(f"Socket Exception: {e!s}") # Return; we're done return False @@ -348,95 +366,103 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, @property def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, - self.user, self.password, self.host, + self.user, + self.password, + self.host, self.port if self.port else (443 if self.secure else 80), - self.fullpath.rstrip('/'), + self.fullpath.rstrip("/"), ) def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = { - 'method': self.method, + "method": self.method, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Append our headers into our parameters - params.update({'+{}'.format(k): v for k, v in self.headers.items()}) + params.update({f"+{k}": v for k, v in self.headers.items()}) # Append our GET params into our parameters - params.update({'-{}'.format(k): v for k, v in self.params.items()}) + params.update({f"-{k}": v for k, v in self.params.items()}) # Append our payload extra's into our parameters - params.update( - {':{}'.format(k): v for k, v in self.payload_extras.items()}) + params.update({f":{k}": v for k, v in self.payload_extras.items()}) # Determine Authentication - auth = '' + auth = "" if self.user and self.password: - auth = '{user}:{password}@'.format( - user=NotifyJSON.quote(self.user, safe=''), + auth = "{user}:{password}@".format( + user=NotifyJSON.quote(self.user, safe=""), password=self.pprint( - self.password, privacy, mode=PrivacyMode.Secret, safe=''), + self.password, privacy, mode=PrivacyMode.Secret, safe="" + ), ) elif self.user: - auth = '{user}@'.format( - user=NotifyJSON.quote(self.user, safe=''), + auth = "{user}@".format( + user=NotifyJSON.quote(self.user, safe=""), ) default_port = 443 if self.secure else 80 - - return '{schema}://{auth}{hostname}{port}{fullpath}?{params}'.format( + return "{schema}://{auth}{hostname}{port}{fullpath}?{params}".format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, - port='' if self.port is None or self.port == default_port - else ':{}'.format(self.port), - fullpath=NotifyJSON.quote(self.fullpath, safe='/') - if self.fullpath else '/', + port=( + "" + if self.port is None or self.port == default_port + else f":{self.port}" + ), + fullpath=( + NotifyJSON.quote(self.fullpath, safe="/") + if self.fullpath + else "/" + ), params=NotifyJSON.urlencode(params), ) @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # store any additional payload extra's defined - results['payload'] = {NotifyJSON.unquote(x): NotifyJSON.unquote(y) - for x, y in results['qsd:'].items()} + results["payload"] = { + NotifyJSON.unquote(x): NotifyJSON.unquote(y) + for x, y in results["qsd:"].items() + } # Add our headers that the user can potentially over-ride if they wish # to to our returned result set and tidy entries by unquoting them - results['headers'] = {NotifyJSON.unquote(x): NotifyJSON.unquote(y) - for x, y in results['qsd+'].items()} + results["headers"] = { + NotifyJSON.unquote(x): NotifyJSON.unquote(y) + for x, y in results["qsd+"].items() + } # Add our GET paramters in the event the user wants to pass these along - results['params'] = {NotifyJSON.unquote(x): NotifyJSON.unquote(y) - for x, y in results['qsd-'].items()} + results["params"] = { + NotifyJSON.unquote(x): NotifyJSON.unquote(y) + for x, y in results["qsd-"].items() + } # Set method if not otherwise set - if 'method' in results['qsd'] and len(results['qsd']['method']): - results['method'] = NotifyJSON.unquote(results['qsd']['method']) + if "method" in results["qsd"] and len(results["qsd"]["method"]): + results["method"] = NotifyJSON.unquote(results["qsd"]["method"]) return results diff --git a/libs/apprise/plugins/custom_xml.py b/libs/apprise/plugins/custom_xml.py index ba062f1102..9cbcbcc36f 100644 --- a/libs/apprise/plugins/custom_xml.py +++ b/libs/apprise/plugins/custom_xml.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,54 +25,46 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +import logging import re + import requests from .. import exception -from .base import NotifyBase -from ..url import PrivacyMode -from ..common import NotifyImageSize -from ..common import NotifyType +from ..common import NotifyImageSize, NotifyType from ..locale import gettext_lazy as _ +from ..url import PrivacyMode +from ..utils.sanitize import sanitize_payload +from .base import NotifyBase class XMLPayloadField: - """ - Identifies the fields available in the JSON Payload - """ - VERSION = 'Version' - TITLE = 'Subject' - MESSAGE = 'Message' - MESSAGETYPE = 'MessageType' + """Identifies the fields available in the JSON Payload.""" + + VERSION = "Version" + TITLE = "Subject" + MESSAGE = "Message" + MESSAGETYPE = "MessageType" # Defines the method to send the notification -METHODS = ( - 'POST', - 'GET', - 'DELETE', - 'PUT', - 'HEAD', - 'PATCH' -) +METHODS = ("POST", "GET", "DELETE", "PUT", "HEAD", "PATCH") class NotifyXML(NotifyBase): - """ - A wrapper for XML Notifications - """ + """A wrapper for XML Notifications.""" # The default descriptive name associated with the Notification - service_name = 'XML' + service_name = "XML" # The default protocol - protocol = 'xml' + protocol = "xml" # The default secure protocol - secure_protocol = 'xmls' + secure_protocol = "xmls" # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_Custom_XML' + setup_url = "https://appriseit.com/services/xml/" # Support attachments attachment_support = True @@ -86,82 +77,87 @@ class NotifyXML(NotifyBase): request_rate_per_sec = 0 # XSD Information - xsd_ver = '1.1' - xsd_default_url = \ - 'https://raw.githubusercontent.com/caronc/apprise/master' \ - '/apprise/assets/NotifyXML-{version}.xsd' + xsd_ver = "1.1" + xsd_default_url = ( + "https://raw.githubusercontent.com/caronc/apprise/master" + "/apprise/assets/NotifyXML-{version}.xsd" + ) # Define object templates templates = ( - '{schema}://{host}', - '{schema}://{host}:{port}', - '{schema}://{user}@{host}', - '{schema}://{user}@{host}:{port}', - '{schema}://{user}:{password}@{host}', - '{schema}://{user}:{password}@{host}:{port}', + "{schema}://{host}", + "{schema}://{host}:{port}", + "{schema}://{user}@{host}", + "{schema}://{user}@{host}:{port}", + "{schema}://{user}:{password}@{host}", + "{schema}://{user}:{password}@{host}:{port}", ) # Define our tokens; these are the minimum tokens required required to # be passed into this function (as arguments). The syntax appends any # previously defined in the base package and builds onto them - template_tokens = dict(NotifyBase.template_tokens, **{ - 'host': { - 'name': _('Hostname'), - 'type': 'string', - 'required': True, - }, - 'port': { - 'name': _('Port'), - 'type': 'int', - 'min': 1, - 'max': 65535, - }, - 'user': { - 'name': _('Username'), - 'type': 'string', + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "host": { + "name": _("Hostname"), + "type": "string", + "required": True, + }, + "port": { + "name": _("Port"), + "type": "int", + "min": 1, + "max": 65535, + }, + "user": { + "name": _("Username"), + "type": "string", + }, + "password": { + "name": _("Password"), + "type": "string", + "private": True, + }, }, - 'password': { - 'name': _('Password'), - 'type': 'string', - 'private': True, - }, - - }) + ) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'method': { - 'name': _('Fetch Method'), - 'type': 'choice:string', - 'values': METHODS, - 'default': METHODS[0], + template_args = dict( + NotifyBase.template_args, + **{ + "method": { + "name": _("Fetch Method"), + "type": "choice:string", + "values": METHODS, + "default": METHODS[0], + }, }, - }) + ) # Define any kwargs we're using template_kwargs = { - 'headers': { - 'name': _('HTTP Header'), - 'prefix': '+', + "headers": { + "name": _("HTTP Header"), + "prefix": "+", }, - 'payload': { - 'name': _('Payload Extras'), - 'prefix': ':', + "payload": { + "name": _("Payload Extras"), + "prefix": ":", }, - 'params': { - 'name': _('GET Params'), - 'prefix': '-', + "params": { + "name": _("GET Params"), + "prefix": "-", }, } - def __init__(self, headers=None, method=None, payload=None, params=None, - **kwargs): - """ - Initialize XML Object + def __init__( + self, headers=None, method=None, payload=None, params=None, **kwargs + ): + """Initialize XML Object. headers can be a dictionary of key/value pairs that you want to additionally include as part of the server headers to post with - """ super().__init__(**kwargs) @@ -178,15 +174,18 @@ def __init__(self, headers=None, method=None, payload=None, params=None, """ - self.fullpath = kwargs.get('fullpath') + self.fullpath = kwargs.get("fullpath") if not isinstance(self.fullpath, str): - self.fullpath = '' + self.fullpath = "" - self.method = self.template_args['method']['default'] \ - if not isinstance(method, str) else method.upper() + self.method = ( + self.template_args["method"]["default"] + if not isinstance(method, str) + else method.upper() + ) if self.method not in METHODS: - msg = 'The method specified ({}) is invalid.'.format(method) + msg = f"The method specified ({method}) is invalid." self.logger.warning(msg) raise TypeError(msg) @@ -218,11 +217,11 @@ def __init__(self, headers=None, method=None, payload=None, params=None, # Store our extra payload entries (but tidy them up since they will # become XML Keys (they can't contain certain characters for k, v in payload.items(): - key = re.sub(r'[^A-Za-z0-9_-]*', '', k) + key = re.sub(r"[^A-Za-z0-9_-]*", "", k) if not key: self.logger.warning( - 'Ignoring invalid XML Stanza element name({})' - .format(k)) + f"Ignoring invalid XML Stanza element name({k})" + ) continue # Any values set in the payload to alter a system related one @@ -237,39 +236,53 @@ def __init__(self, headers=None, method=None, payload=None, params=None, self.payload_extras[key] = v # Set our xsd url - self.xsd_url = None if self.payload_overrides or self.payload_extras \ + self.xsd_url = ( + None + if self.payload_overrides or self.payload_extras else self.xsd_default_url.format(version=self.xsd_ver) + ) return - def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, - **kwargs): - """ - Perform XML Notification - """ + def send( + self, + body, + title="", + notify_type=NotifyType.INFO, + attach=None, + **kwargs, + ): + """Perform XML Notification.""" # Prepare HTTP Headers headers = { - 'User-Agent': self.app_id, - 'Content-Type': 'application/xml' + "User-Agent": self.app_id, + "Content-Type": "application/xml", } # Apply any/all header over-rides defined headers.update(self.headers) # Our XML Attachmement subsitution - xml_attachments = '' + xml_attachments = "" payload_base = {} for key, value in ( - (XMLPayloadField.VERSION, self.xsd_ver), - (XMLPayloadField.TITLE, NotifyXML.escape_html( - title, whitespace=False)), - (XMLPayloadField.MESSAGE, NotifyXML.escape_html( - body, whitespace=False)), - (XMLPayloadField.MESSAGETYPE, NotifyXML.escape_html( - notify_type, whitespace=False))): + (XMLPayloadField.VERSION, self.xsd_ver), + ( + XMLPayloadField.TITLE, + NotifyXML.escape_html(title, whitespace=False), + ), + ( + XMLPayloadField.MESSAGE, + NotifyXML.escape_html(body, whitespace=False), + ), + ( + XMLPayloadField.MESSAGETYPE, + NotifyXML.escape_html(notify_type.value, whitespace=False), + ), + ): if not self.payload_map[key]: # Do not store element in payload response @@ -277,13 +290,15 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, payload_base[self.payload_map[key]] = value # Apply our payload extras - payload_base.update( - {k: NotifyXML.escape_html(v, whitespace=False) - for k, v in self.payload_extras.items()}) + payload_base.update({ + k: NotifyXML.escape_html(v, whitespace=False) + for k, v in self.payload_extras.items() + }) # Base Entres - xml_base = ''.join( - ['<{}>{}'.format(k, v, k) for k, v in payload_base.items()]) + xml_base = "".join( + [f"<{k}>{v}" for k, v in payload_base.items()] + ) attachments = [] if attach and self.attachment_support: @@ -292,49 +307,61 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, if not attachment: # We could not access the attachment self.logger.error( - 'Could not access Custom XML attachment {}.'.format( - attachment.url(privacy=True))) + "Could not access Custom XML attachment" + f" {attachment.url(privacy=True)}." + ) return False try: # Prepare our Attachment in Base64 - entry = \ - ''.format( - NotifyXML.escape_html( - attachment.name if attachment.name - else f'file{no:03}.dat', whitespace=False), - NotifyXML.escape_html( - attachment.mimetype, whitespace=False)) + entry = ''.format( + NotifyXML.escape_html( + ( + attachment.name + if attachment.name + else f"file{no:03}.dat" + ), + whitespace=False, + ), + NotifyXML.escape_html( + attachment.mimetype, whitespace=False + ), + ) entry += attachment.base64() - entry += '' + entry += "" attachments.append(entry) except exception.AppriseException: # We could not access the attachment self.logger.error( - 'Could not access Custom XML attachment {}.'.format( - attachment.url(privacy=True))) + "Could not access Custom XML attachment" + f" {attachment.url(privacy=True)}." + ) return False self.logger.debug( - 'Appending Custom XML attachment {}'.format( - attachment.url(privacy=True))) + "Appending Custom XML attachment" + f" {attachment.url(privacy=True)}" + ) # Update our xml_attachments record: - xml_attachments = \ - '' + \ - ''.join(attachments) + '' + xml_attachments = ( + '' + + "".join(attachments) + + "" + ) re_map = { - '{{XSD_URL}}': - f' xmlns:xsi="{self.xsd_url}"' if self.xsd_url else '', - '{{ATTACHMENTS}}': xml_attachments, - '{{CORE}}': xml_base, + "{{XSD_URL}}": ( + f' xmlns:xsi="{self.xsd_url}"' if self.xsd_url else "" + ), + "{{ATTACHMENTS}}": xml_attachments, + "{{CORE}}": xml_base, } # Iterate over above list and store content accordingly re_table = re.compile( - r'(' + '|'.join(re_map.keys()) + r')', + r"(" + "|".join(re_map.keys()) + r")", re.IGNORECASE, ) @@ -343,37 +370,43 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, auth = (self.user, self.password) # Set our schema - schema = 'https' if self.secure else 'http' + schema = "https" if self.secure else "http" - url = '%s://%s' % (schema, self.host) + url = f"{schema}://{self.host}" if isinstance(self.port, int): - url += ':%d' % self.port + url += f":{self.port}" url += self.fullpath payload = re_table.sub(lambda x: re_map[x.group()], self.payload) - self.logger.debug('XML POST URL: %s (cert_verify=%r)' % ( - url, self.verify_certificate, - )) - - self.logger.debug('XML Payload: %s' % str(payload)) + # Some Debug Logging + if self.logger.isEnabledFor(logging.DEBUG): + # Due to attachments; output can be quite heavy and io intensive + # To accommodate this, we only show our debug payload information + # if required. + self.logger.debug( + f"XML POST URL: {url} " + f"(cert_verify={self.verify_certificate!r})" + ) + self.logger.debug( + "XML Payload: %s", sanitize_payload(payload)) # Always call throttle before any remote server i/o is made self.throttle() - if self.method == 'GET': + if self.method == "GET": method = requests.get - elif self.method == 'PUT': + elif self.method == "PUT": method = requests.put - elif self.method == 'PATCH': + elif self.method == "PATCH": method = requests.patch - elif self.method == 'DELETE': + elif self.method == "DELETE": method = requests.delete - elif self.method == 'HEAD': + elif self.method == "HEAD": method = requests.head else: # POST @@ -390,29 +423,31 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, ) if r.status_code < 200 or r.status_code >= 300: # We had a problem - status_str = \ - NotifyXML.http_response_code_lookup(r.status_code) + status_str = NotifyXML.http_response_code_lookup(r.status_code) self.logger.warning( - 'Failed to send JSON %s notification: %s%serror=%s.', + "Failed to send JSON %s notification: %s%serror=%s.", self.method, status_str, - ', ' if status_str else '', - str(r.status_code)) + ", " if status_str else "", + r.status_code, + ) - self.logger.debug('Response Details:\r\n{}'.format(r.content)) + self.logger.debug( + "Response Details:\r\n%r", (r.content or b"")[:2000]) # Return; we're done return False else: - self.logger.info('Sent XML %s notification.', self.method) + self.logger.info("Sent XML %s notification.", self.method) except requests.RequestException as e: self.logger.warning( - 'A Connection error occurred sending XML ' - 'notification to %s.' % self.host) - self.logger.debug('Socket Exception: %s' % str(e)) + "A Connection error occurred sending XML " + f"notification to {self.host}." + ) + self.logger.debug(f"Socket Exception: {e!s}") # Return; we're done return False @@ -421,97 +456,104 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, @property def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, - self.user, self.password, self.host, + self.user, + self.password, + self.host, self.port if self.port else (443 if self.secure else 80), - self.fullpath.rstrip('/'), + self.fullpath.rstrip("/"), ) def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = { - 'method': self.method, + "method": self.method, } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Append our headers into our parameters - params.update({'+{}'.format(k): v for k, v in self.headers.items()}) + params.update({f"+{k}": v for k, v in self.headers.items()}) # Append our GET params into our parameters - params.update({'-{}'.format(k): v for k, v in self.params.items()}) + params.update({f"-{k}": v for k, v in self.params.items()}) # Append our payload extra's into our parameters - params.update( - {':{}'.format(k): v for k, v in self.payload_extras.items()}) - params.update( - {':{}'.format(k): v for k, v in self.payload_overrides.items()}) + params.update({f":{k}": v for k, v in self.payload_extras.items()}) + params.update({f":{k}": v for k, v in self.payload_overrides.items()}) # Determine Authentication - auth = '' + auth = "" if self.user and self.password: - auth = '{user}:{password}@'.format( - user=NotifyXML.quote(self.user, safe=''), + auth = "{user}:{password}@".format( + user=NotifyXML.quote(self.user, safe=""), password=self.pprint( - self.password, privacy, mode=PrivacyMode.Secret, safe=''), + self.password, privacy, mode=PrivacyMode.Secret, safe="" + ), ) elif self.user: - auth = '{user}@'.format( - user=NotifyXML.quote(self.user, safe=''), + auth = "{user}@".format( + user=NotifyXML.quote(self.user, safe=""), ) default_port = 443 if self.secure else 80 - - return '{schema}://{auth}{hostname}{port}{fullpath}?{params}'.format( + return "{schema}://{auth}{hostname}{port}{fullpath}?{params}".format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, - port='' if self.port is None or self.port == default_port - else ':{}'.format(self.port), - fullpath=NotifyXML.quote(self.fullpath, safe='/') - if self.fullpath else '/', + port=( + "" + if self.port is None or self.port == default_port + else f":{self.port}" + ), + fullpath=( + NotifyXML.quote(self.fullpath, safe="/") + if self.fullpath + else "/" + ), params=NotifyXML.urlencode(params), ) @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = NotifyBase.parse_url(url) if not results: # We're done early as we couldn't load the results return results # store any additional payload extra's defined - results['payload'] = {NotifyXML.unquote(x): NotifyXML.unquote(y) - for x, y in results['qsd:'].items()} + results["payload"] = { + NotifyXML.unquote(x): NotifyXML.unquote(y) + for x, y in results["qsd:"].items() + } # Add our headers that the user can potentially over-ride if they wish # to to our returned result set and tidy entries by unquoting them - results['headers'] = {NotifyXML.unquote(x): NotifyXML.unquote(y) - for x, y in results['qsd+'].items()} + results["headers"] = { + NotifyXML.unquote(x): NotifyXML.unquote(y) + for x, y in results["qsd+"].items() + } # Add our GET paramters in the event the user wants to pass these along - results['params'] = {NotifyXML.unquote(x): NotifyXML.unquote(y) - for x, y in results['qsd-'].items()} + results["params"] = { + NotifyXML.unquote(x): NotifyXML.unquote(y) + for x, y in results["qsd-"].items() + } # Set method if not otherwise set - if 'method' in results['qsd'] and len(results['qsd']['method']): - results['method'] = NotifyXML.unquote(results['qsd']['method']) + if "method" in results["qsd"] and len(results["qsd"]["method"]): + results["method"] = NotifyXML.unquote(results["qsd"]["method"]) return results diff --git a/libs/apprise/plugins/d7networks.py b/libs/apprise/plugins/d7networks.py index cdc7e815e5..3d4b6a4d54 100644 --- a/libs/apprise/plugins/d7networks.py +++ b/libs/apprise/plugins/d7networks.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -35,48 +34,50 @@ # # API Reference: https://d7networks.com/docs/Messages/Send_Message/ +from json import dumps, loads + import requests -from json import dumps -from json import loads -from .base import NotifyBase from ..common import NotifyType -from ..utils.parse import ( - is_phone_no, parse_phone_no, validate_regex, parse_bool) from ..locale import gettext_lazy as _ +from ..utils.parse import ( + is_phone_no, + parse_bool, + parse_phone_no, + validate_regex, +) +from .base import NotifyBase # Extend HTTP Error Messages D7NETWORKS_HTTP_ERROR_MAP = { - 401: 'Invalid Argument(s) Specified.', - 403: 'Unauthorized - Authentication Failure.', - 412: 'A Routing Error Occured', - 500: 'A Serverside Error Occured Handling the Request.', + 401: "Invalid Argument(s) Specified.", + 403: "Unauthorized - Authentication Failure.", + 412: "A Routing Error Occured", + 500: "A Serverside Error Occured Handling the Request.", } class NotifyD7Networks(NotifyBase): - """ - A wrapper for D7 Networks Notifications - """ + """A wrapper for D7 Networks Notifications.""" # The default descriptive name associated with the Notification - service_name = 'D7 Networks' + service_name = "D7 Networks" # The services URL - service_url = 'https://d7networks.com/' + service_url = "https://d7networks.com/" # All notification requests are secure - secure_protocol = 'd7sms' + secure_protocol = "d7sms" # Allow 300 requests per minute. # 60/300 = 0.2 request_rate_per_sec = 0.20 # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_d7networks' + setup_url = "https://appriseit.com/services/d7networks/" # D7 Networks single notification URL - notify_url = 'https://api.d7networks.com/messages/v1/send' + notify_url = "https://api.d7networks.com/messages/v1/send" # The maximum length of the body body_maxlen = 160 @@ -86,115 +87,122 @@ class NotifyD7Networks(NotifyBase): title_maxlen = 0 # Define object templates - templates = ( - '{schema}://{token}@{targets}', - ) + templates = ("{schema}://{token}@{targets}",) # Define our template tokens - template_tokens = dict(NotifyBase.template_tokens, **{ - 'token': { - 'name': _('API Access Token'), - 'type': 'string', - 'required': True, - 'private': True, - }, - 'target_phone': { - 'name': _('Target Phone No'), - 'type': 'string', - 'prefix': '+', - 'regex': (r'^[0-9\s)(+-]+$', 'i'), - 'map_to': 'targets', - }, - 'targets': { - 'name': _('Targets'), - 'type': 'list:string', - 'required': True, + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "token": { + "name": _("API Access Token"), + "type": "string", + "required": True, + "private": True, + }, + "target_phone": { + "name": _("Target Phone No"), + "type": "string", + "prefix": "+", + "regex": (r"^[0-9\s)(+-]+$", "i"), + "map_to": "targets", + }, + "targets": { + "name": _("Targets"), + "type": "list:string", + "required": True, + }, }, - }) + ) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'unicode': { - # Unicode characters (default is 'auto') - 'name': _('Unicode Characters'), - 'type': 'bool', - 'default': False, - }, - 'batch': { - 'name': _('Batch Mode'), - 'type': 'bool', - 'default': False, - }, - 'to': { - 'alias_of': 'targets', - }, - 'source': { - # Originating address,In cases where the rewriting of the sender's - # address is supported or permitted by the SMS-C. This is used to - # transmit the message, this number is transmitted as the - # originating address and is completely optional. - 'name': _('Originating Address'), - 'type': 'string', - 'map_to': 'source', - - }, - 'from': { - 'alias_of': 'source', + template_args = dict( + NotifyBase.template_args, + **{ + "unicode": { + # Unicode characters (default is 'auto') + "name": _("Unicode Characters"), + "type": "bool", + "default": False, + }, + "batch": { + "name": _("Batch Mode"), + "type": "bool", + "default": False, + }, + "to": { + "alias_of": "targets", + }, + "source": { + # Originating address,In cases where the rewriting of the + # sender's address is supported or permitted by the SMS-C. + # This is used to transmit the message, this number is + # transmitted as the originating address and is completely + # optional. + "name": _("Originating Address"), + "type": "string", + "map_to": "source", + }, + "from": { + "alias_of": "source", + }, }, - }) + ) - def __init__(self, token=None, targets=None, source=None, - batch=False, unicode=None, **kwargs): - """ - Initialize D7 Networks Object - """ + def __init__( + self, + token=None, + targets=None, + source=None, + batch=False, + unicode=None, + **kwargs, + ): + """Initialize D7 Networks Object.""" super().__init__(**kwargs) # Prepare Batch Mode Flag self.batch = batch # Setup our source address (if defined) - self.source = None \ - if not isinstance(source, str) else source.strip() + self.source = None if not isinstance(source, str) else source.strip() # Define whether or not we should set the unicode flag - self.unicode = self.template_args['unicode']['default'] \ - if unicode is None else bool(unicode) + self.unicode = ( + self.template_args["unicode"]["default"] + if unicode is None + else bool(unicode) + ) # The token associated with the account self.token = validate_regex(token) if not self.token: - msg = 'The D7 Networks token specified ({}) is invalid.'\ - .format(token) + msg = f"The D7 Networks token specified ({token}) is invalid." self.logger.warning(msg) raise TypeError(msg) # Parse our targets - self.targets = list() + self.targets = [] for target in parse_phone_no(targets): # Validate targets and drop bad ones: result = result = is_phone_no(target) if not result: self.logger.warning( - 'Dropped invalid phone # ' - '({}) specified.'.format(target), + f"Dropped invalid phone # ({target}) specified.", ) continue # store valid phone number - self.targets.append(result['full']) + self.targets.append(result["full"]) return - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): - """ - Depending on whether we are set to batch mode or single mode this - redirects to the appropriate handling - """ + def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): + """Depending on whether we are set to batch mode or single mode this + redirects to the appropriate handling.""" if len(self.targets) == 0: # There were no services to notify - self.logger.warning('There were no D7 Networks targets to notify.') + self.logger.warning("There were no D7 Networks targets to notify.") return False # error tracking (used for function return) @@ -202,24 +210,24 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): # Prepare our headers headers = { - 'User-Agent': self.app_id, - 'Content-Type': 'application/json', - 'Accept': 'application/json', - 'Authorization': f'Bearer {self.token}', + "User-Agent": self.app_id, + "Content-Type": "application/json", + "Accept": "application/json", + "Authorization": f"Bearer {self.token}", } payload = { - 'message_globals': { - 'channel': 'sms', + "message_globals": { + "channel": "sms", }, - 'messages': [{ + "messages": [{ # Populated later on - 'recipients': None, - 'content': body, - 'data_coding': + "recipients": None, + "content": body, + "data_coding": # auto is a better substitute over 'text' as text is easier to # detect from a post than `unicode` is. - 'auto' if not self.unicode else 'unicode', + "auto" if not self.unicode else "unicode", }], } @@ -227,14 +235,14 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): targets = list(self.targets) if self.source: - payload['message_globals']['originator'] = self.source + payload["message_globals"]["originator"] = self.source target = None while len(targets): if self.batch: # Prepare our payload - payload['messages'][0]['recipients'] = self.targets + payload["messages"][0]["recipients"] = self.targets # Reset our targets so we don't keep going. This is required # because we're in batch mode; we only need to loop once. @@ -246,13 +254,14 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): target = targets.pop(0) # Prepare our payload - payload['messages'][0]['recipients'] = [target] + payload["messages"][0]["recipients"] = [target] # Some Debug Logging self.logger.debug( - 'D7 Networks POST URL: {} (cert_verify={})'.format( - self.notify_url, self.verify_certificate)) - self.logger.debug('D7 Networks Payload: {}' .format(payload)) + "D7 Networks POST URL:" + f" {self.notify_url} (cert_verify={self.verify_certificate})" + ) + self.logger.debug(f"D7 Networks Payload: {payload}") # Always call throttle before any remote server i/o is made self.throttle() @@ -266,16 +275,18 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): ) if r.status_code not in ( - requests.codes.created, requests.codes.ok): + requests.codes.created, + requests.codes.ok, + ): # We had a problem - status_str = \ - NotifyBase.http_response_code_lookup( - r.status_code, D7NETWORKS_HTTP_ERROR_MAP) + status_str = NotifyBase.http_response_code_lookup( + r.status_code, D7NETWORKS_HTTP_ERROR_MAP + ) try: # Update our status response if we can json_response = loads(r.content) - status_str = json_response.get('message', status_str) + status_str = json_response.get("message", status_str) except (AttributeError, TypeError, ValueError): # ValueError = r.content is Unparsable @@ -287,15 +298,17 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): pass self.logger.warning( - 'Failed to send D7 Networks SMS notification to {}: ' - '{}{}error={}.'.format( - ', '.join(target) if self.batch else target, + "Failed to send D7 Networks SMS notification to {}: " + "{}{}error={}.".format( + ", ".join(target) if self.batch else target, status_str, - ', ' if status_str else '', - r.status_code)) + ", " if status_str else "", + r.status_code, + ) + ) self.logger.debug( - 'Response Details:\r\n{}'.format(r.content)) + "Response Details:\r\n%r", (r.content or b"")[:2000]) # Mark our failure has_error = True @@ -305,23 +318,24 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): if self.batch: self.logger.info( - 'Sent D7 Networks batch SMS notification to ' - '{} target(s).'.format(len(self.targets))) + "Sent D7 Networks batch SMS notification to " + f"{len(self.targets)} target(s)." + ) else: self.logger.info( - 'Sent D7 Networks SMS notification to {}.'.format( - target)) + f"Sent D7 Networks SMS notification to {target}." + ) - self.logger.debug( - 'Response Details:\r\n{}'.format(r.content)) + self.logger.debug(f"Response Details:\r\n{r.content}") except requests.RequestException as e: self.logger.warning( - 'A Connection error occurred sending D7 Networks:%s ' % ( - ', '.join(self.targets)) + 'notification.' + "A Connection error occurred sending D7 Networks:{} " + .format(", ".join(self.targets)) + + "notification." ) - self.logger.debug('Socket Exception: %s' % str(e)) + self.logger.debug(f"Socket Exception: {e!s}") # Mark our failure has_error = True continue @@ -329,42 +343,40 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): return not has_error def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = { - 'batch': 'yes' if self.batch else 'no', - 'unicode': 'yes' if self.unicode else 'no', + "batch": "yes" if self.batch else "no", + "unicode": "yes" if self.unicode else "no", } if self.source: - params['from'] = self.source + params["from"] = self.source # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) - return '{schema}://{token}@{targets}/?{params}'.format( + return "{schema}://{token}@{targets}/?{params}".format( schema=self.secure_protocol, - token=self.pprint(self.token, privacy, safe=''), - targets='/'.join( - [NotifyD7Networks.quote(x, safe='') for x in self.targets]), - params=NotifyD7Networks.urlencode(params)) + token=self.pprint(self.token, privacy, safe=""), + targets="/".join( + [NotifyD7Networks.quote(x, safe="") for x in self.targets] + ), + params=NotifyD7Networks.urlencode(params), + ) @property def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. """ return (self.secure_protocol, self.token) def __len__(self): - """ - Returns the number of targets associated with this notification - """ + """Returns the number of targets associated with this notification.""" # # Factor batch into calculation # @@ -372,65 +384,67 @@ def __len__(self): @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results - if 'token' in results['qsd'] and len(results['qsd']['token']): - results['token'] = \ - NotifyD7Networks.unquote(results['qsd']['token']) + if "token" in results["qsd"] and len(results["qsd"]["token"]): + results["token"] = NotifyD7Networks.unquote( + results["qsd"]["token"] + ) - elif results['user']: - results['token'] = NotifyD7Networks.unquote(results['user']) + elif results["user"]: + results["token"] = NotifyD7Networks.unquote(results["user"]) - if results['password']: + if results["password"]: # Support token containing a colon (:) - results['token'] += \ - ':' + NotifyD7Networks.unquote(results['password']) + results["token"] += ":" + NotifyD7Networks.unquote( + results["password"] + ) - elif results['password']: + elif results["password"]: # Support token starting with a colon (:) - results['token'] = \ - ':' + NotifyD7Networks.unquote(results['password']) + results["token"] = ":" + NotifyD7Networks.unquote( + results["password"] + ) # Initialize our targets - results['targets'] = list() + results["targets"] = [] # The store our first target stored in the hostname - results['targets'].append(NotifyD7Networks.unquote(results['host'])) + results["targets"].append(NotifyD7Networks.unquote(results["host"])) # Get our entries; split_path() looks after unquoting content for us # by default - results['targets'].extend( - NotifyD7Networks.split_path(results['fullpath'])) + results["targets"].extend( + NotifyD7Networks.split_path(results["fullpath"]) + ) # Get Batch Mode Flag - results['batch'] = \ - parse_bool(results['qsd'].get('batch', False)) + results["batch"] = parse_bool(results["qsd"].get("batch", False)) # Get Unicode Flag - results['unicode'] = \ - parse_bool(results['qsd'].get('unicode', False)) + results["unicode"] = parse_bool(results["qsd"].get("unicode", False)) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration - if 'to' in results['qsd'] and len(results['qsd']['to']): - results['targets'] += \ - NotifyD7Networks.parse_phone_no(results['qsd']['to']) + if "to" in results["qsd"] and len(results["qsd"]["to"]): + results["targets"] += NotifyD7Networks.parse_phone_no( + results["qsd"]["to"] + ) # Support the 'from' and source variable - if 'from' in results['qsd'] and len(results['qsd']['from']): - results['source'] = \ - NotifyD7Networks.unquote(results['qsd']['from']) - - elif 'source' in results['qsd'] and len(results['qsd']['source']): - results['source'] = \ - NotifyD7Networks.unquote(results['qsd']['source']) + if "from" in results["qsd"] and len(results["qsd"]["from"]): + results["source"] = NotifyD7Networks.unquote( + results["qsd"]["from"] + ) + + elif "source" in results["qsd"] and len(results["qsd"]["source"]): + results["source"] = NotifyD7Networks.unquote( + results["qsd"]["source"] + ) return results diff --git a/libs/apprise/plugins/dapnet.py b/libs/apprise/plugins/dapnet.py index f117095c80..3325bc19e5 100644 --- a/libs/apprise/plugins/dapnet.py +++ b/libs/apprise/plugins/dapnet.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -51,12 +50,11 @@ import requests from requests.auth import HTTPBasicAuth -from .base import NotifyBase +from ..common import NotifyType from ..locale import gettext_lazy as _ from ..url import PrivacyMode -from ..common import NotifyType -from ..utils.parse import ( - is_call_sign, parse_call_sign, parse_list, parse_bool) +from ..utils.parse import is_call_sign, parse_bool, parse_call_sign, parse_list +from .base import NotifyBase class DapnetPriority: @@ -65,42 +63,39 @@ class DapnetPriority: DAPNET_PRIORITIES = { - DapnetPriority.NORMAL: 'normal', - DapnetPriority.EMERGENCY: 'emergency', + DapnetPriority.NORMAL: "normal", + DapnetPriority.EMERGENCY: "emergency", } DAPNET_PRIORITY_MAP = { # Maps against string 'normal' - 'n': DapnetPriority.NORMAL, + "n": DapnetPriority.NORMAL, # Maps against string 'emergency' - 'e': DapnetPriority.EMERGENCY, - + "e": DapnetPriority.EMERGENCY, # Entries to additionally support (so more like Dapnet's API) - '0': DapnetPriority.NORMAL, - '1': DapnetPriority.EMERGENCY, + "0": DapnetPriority.NORMAL, + "1": DapnetPriority.EMERGENCY, } class NotifyDapnet(NotifyBase): - """ - A wrapper for DAPNET / Hampager Notifications - """ + """A wrapper for DAPNET / Hampager Notifications.""" # The default descriptive name associated with the Notification - service_name = 'Dapnet' + service_name = "Dapnet" # The services URL - service_url = 'https://hampager.de/' + service_url = "https://hampager.de/" # The default secure protocol - secure_protocol = 'dapnet' + secure_protocol = "dapnet" # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_dapnet' + setup_url = "https://appriseit.com/services/dapnet/" # Dapnet uses the http protocol with JSON requests - notify_url = 'http://www.hampager.de:8080/calls' + notify_url = "http://www.hampager.de:8080/calls" # The maximum length of the body body_maxlen = 80 @@ -113,96 +108,103 @@ class NotifyDapnet(NotifyBase): default_batch_size = 50 # Define object templates - templates = ('{schema}://{user}:{password}@{targets}',) + templates = ("{schema}://{user}:{password}@{targets}",) # Define our template tokens template_tokens = dict( NotifyBase.template_tokens, **{ - 'user': { - 'name': _('User Name'), - 'type': 'string', - 'required': True, + "user": { + "name": _("User Name"), + "type": "string", + "required": True, }, - 'password': { - 'name': _('Password'), - 'type': 'string', - 'private': True, - 'required': True, + "password": { + "name": _("Password"), + "type": "string", + "private": True, + "required": True, }, - 'target_callsign': { - 'name': _('Target Callsign'), - 'type': 'string', - 'regex': ( - r'^[a-z0-9]{2,5}(-[a-z0-9]{1,2})?$', 'i', + "target_callsign": { + "name": _("Target Callsign"), + "type": "string", + "regex": ( + r"^[a-z0-9]{2,5}(-[a-z0-9]{1,2})?$", + "i", ), - 'map_to': 'targets', + "map_to": "targets", }, - 'targets': { - 'name': _('Targets'), - 'type': 'list:string', - 'required': True, + "targets": { + "name": _("Targets"), + "type": "list:string", + "required": True, }, - } + }, ) # Define our template arguments template_args = dict( NotifyBase.template_args, **{ - 'to': { - 'name': _('Target Callsign'), - 'type': 'string', - 'map_to': 'targets', + "to": { + "name": _("Target Callsign"), + "type": "string", + "map_to": "targets", }, - 'priority': { - 'name': _('Priority'), - 'type': 'choice:int', - 'values': DAPNET_PRIORITIES, - 'default': DapnetPriority.NORMAL, + "priority": { + "name": _("Priority"), + "type": "choice:int", + "values": DAPNET_PRIORITIES, + "default": DapnetPriority.NORMAL, }, - 'txgroups': { - 'name': _('Transmitter Groups'), - 'type': 'string', - 'default': 'dl-all', - 'private': True, + "txgroups": { + "name": _("Transmitter Groups"), + "type": "string", + "default": "dl-all", + "private": True, }, - 'batch': { - 'name': _('Batch Mode'), - 'type': 'bool', - 'default': False, + "batch": { + "name": _("Batch Mode"), + "type": "bool", + "default": False, }, - } + }, ) - def __init__(self, targets=None, priority=None, txgroups=None, - batch=False, **kwargs): - """ - Initialize Dapnet Object - """ + def __init__( + self, targets=None, priority=None, txgroups=None, batch=False, **kwargs + ): + """Initialize Dapnet Object.""" super().__init__(**kwargs) # Parse our targets - self.targets = list() + self.targets = [] # The Priority of the message self.priority = int( - NotifyDapnet.template_args['priority']['default'] - if priority is None else - next(( - v for k, v in DAPNET_PRIORITY_MAP.items() - if str(priority).lower().startswith(k)), - NotifyDapnet.template_args['priority']['default'])) + NotifyDapnet.template_args["priority"]["default"] + if priority is None + else next( + ( + v + for k, v in DAPNET_PRIORITY_MAP.items() + if str(priority).lower().startswith(k) + ), + NotifyDapnet.template_args["priority"]["default"], + ) + ) if not (self.user and self.password): - msg = 'A Dapnet user/pass was not provided.' + msg = "A Dapnet user/pass was not provided." self.logger.warning(msg) raise TypeError(msg) # Get the transmitter group self.txgroups = parse_list( - NotifyDapnet.template_args['txgroups']['default'] - if not txgroups else txgroups) + txgroups + if txgroups + else NotifyDapnet.template_args["txgroups"]["default"] + ) # Prepare Batch Mode Flag self.batch = batch @@ -212,34 +214,32 @@ def __init__(self, targets=None, priority=None, txgroups=None, result = is_call_sign(target) if not result: self.logger.warning( - 'Dropping invalid Amateur radio call sign ({}).'.format( - target), + f"Dropping invalid Amateur radio call sign ({target}).", ) continue # Store callsign without SSID and ignore duplicates - if result['callsign'] not in self.targets: - self.targets.append(result['callsign']) + if result["callsign"] not in self.targets: + self.targets.append(result["callsign"]) return - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): - """ - Perform Dapnet Notification - """ + def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): + """Perform Dapnet Notification.""" if not self.targets: # There is no one to email; we're done self.logger.warning( - 'There are no Amateur radio callsigns to notify') + "There are no Amateur radio callsigns to notify" + ) return False # Send in batches if identified to do so batch_size = 1 if not self.batch else self.default_batch_size headers = { - 'User-Agent': self.app_id, - 'Content-Type': 'application/json; charset=utf-8', + "User-Agent": self.app_id, + "Content-Type": "application/json; charset=utf-8", } # error tracking (used for function return) @@ -252,14 +252,14 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): # prepare JSON payload payload = { - 'text': body, - 'callSignNames': targets[index:index + batch_size], - 'transmitterGroupNames': self.txgroups, - 'emergency': (self.priority == DapnetPriority.EMERGENCY), + "text": body, + "callSignNames": targets[index : index + batch_size], + "transmitterGroupNames": self.txgroups, + "emergency": self.priority == DapnetPriority.EMERGENCY, } - self.logger.debug('DAPNET POST URL: %s' % self.notify_url) - self.logger.debug('DAPNET Payload: %s' % dumps(payload)) + self.logger.debug(f"DAPNET POST URL: {self.notify_url}") + self.logger.debug(f"DAPNET Payload: {dumps(payload)}") # Always call throttle before any remote server i/o is made self.throttle() @@ -269,7 +269,8 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): data=dumps(payload), headers=headers, auth=HTTPBasicAuth( - username=self.user, password=self.password), + username=self.user, password=self.password + ), verify=self.verify_certificate, timeout=self.request_timeout, ) @@ -277,33 +278,33 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): # We had a problem self.logger.warning( - 'Failed to send DAPNET notification {} to {}: ' - 'error={}.'.format( - payload['text'], - ' to {}'.format(self.targets), - r.status_code + "Failed to send DAPNET notification {} to {}: " + "error={}.".format( + payload["text"], + f" to {self.targets}", + r.status_code, ) ) self.logger.debug( - 'Response Details:\r\n{}'.format(r.content)) + "Response Details:\r\n%r", (r.content or b"")[:2000]) # Mark our failure has_error = True else: self.logger.info( - 'Sent \'{}\' DAPNET notification {}'.format( - payload['text'], 'to {}'.format(self.targets) + "Sent '{}' DAPNET notification {}".format( + payload["text"], f"to {self.targets}" ) ) except requests.RequestException as e: self.logger.warning( - 'A Connection error occurred sending DAPNET ' - 'notification to {}'.format(self.targets) + "A Connection error occurred sending DAPNET " + f"notification to {self.targets}" ) - self.logger.debug('Socket Exception: %s' % str(e)) + self.logger.debug(f"Socket Exception: {e!s}") # Mark our failure has_error = True @@ -311,102 +312,101 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): return not has_error def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = { - 'priority': - DAPNET_PRIORITIES[self.template_args['priority']['default']] + "priority": ( + DAPNET_PRIORITIES[self.template_args["priority"]["default"]] if self.priority not in DAPNET_PRIORITIES - else DAPNET_PRIORITIES[self.priority], - 'batch': 'yes' if self.batch else 'no', - 'txgroups': ','.join(self.txgroups), + else DAPNET_PRIORITIES[self.priority] + ), + "batch": "yes" if self.batch else "no", + "txgroups": ",".join(self.txgroups), } # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) # Setup Authentication - auth = '{user}:{password}@'.format( + auth = "{user}:{password}@".format( user=NotifyDapnet.quote(self.user, safe=""), password=self.pprint( - self.password, privacy, mode=PrivacyMode.Secret, safe='' + self.password, privacy, mode=PrivacyMode.Secret, safe="" ), ) - return '{schema}://{auth}{targets}?{params}'.format( + return "{schema}://{auth}{targets}?{params}".format( schema=self.secure_protocol, auth=auth, - targets='/'.join([self.pprint(x, privacy, safe='') - for x in self.targets]), + targets="/".join( + [self.pprint(x, privacy, safe="") for x in self.targets] + ), params=NotifyDapnet.urlencode(params), ) @property def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. """ return (self.secure_protocol, self.user, self.password) def __len__(self): - """ - Returns the number of targets associated with this notification - """ + """Returns the number of targets associated with this notification.""" # # Factor batch into calculation # batch_size = 1 if not self.batch else self.default_batch_size targets = len(self.targets) if batch_size > 1: - targets = int(targets / batch_size) + \ - (1 if targets % batch_size else 0) + targets = int(targets / batch_size) + ( + 1 if targets % batch_size else 0 + ) return targets @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results # All elements are targets - results['targets'] = [NotifyDapnet.unquote(results['host'])] + results["targets"] = [NotifyDapnet.unquote(results["host"])] # All entries after the hostname are additional targets - results['targets'].extend(NotifyDapnet.split_path(results['fullpath'])) + results["targets"].extend(NotifyDapnet.split_path(results["fullpath"])) # Support the 'to' variable so that we can support rooms this way too # The 'to' makes it easier to use yaml configuration - if 'to' in results['qsd'] and len(results['qsd']['to']): - results['targets'] += \ - NotifyDapnet.parse_list(results['qsd']['to']) + if "to" in results["qsd"] and len(results["qsd"]["to"]): + results["targets"] += NotifyDapnet.parse_list(results["qsd"]["to"]) # Set our priority - if 'priority' in results['qsd'] and len(results['qsd']['priority']): - results['priority'] = \ - NotifyDapnet.unquote(results['qsd']['priority']) + if "priority" in results["qsd"] and len(results["qsd"]["priority"]): + results["priority"] = NotifyDapnet.unquote( + results["qsd"]["priority"] + ) # Check for one or multiple transmitter groups (comma separated) # and split them up, when necessary - if 'txgroups' in results['qsd']: - results['txgroups'] = \ - [x.lower() for x in - NotifyDapnet.parse_list(results['qsd']['txgroups'])] + if "txgroups" in results["qsd"]: + results["txgroups"] = [ + x.lower() + for x in NotifyDapnet.parse_list(results["qsd"]["txgroups"]) + ] # Get Batch Mode Flag - results['batch'] = \ - parse_bool(results['qsd'].get( - 'batch', NotifyDapnet.template_args['batch']['default'])) + results["batch"] = parse_bool( + results["qsd"].get( + "batch", NotifyDapnet.template_args["batch"]["default"] + ) + ) return results diff --git a/libs/apprise/plugins/dbus.py b/libs/apprise/plugins/dbus.py index 31aa9210fc..94c11248f6 100644 --- a/libs/apprise/plugins/dbus.py +++ b/libs/apprise/plugins/dbus.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -27,11 +26,11 @@ # POSSIBILITY OF SUCH DAMAGE. import sys -from .base import NotifyBase -from ..common import NotifyImageSize -from ..common import NotifyType -from ..utils.parse import parse_bool + +from ..common import NotifyImageSize, NotifyType from ..locale import gettext_lazy as _ +from ..utils.parse import parse_bool +from .base import NotifyBase # Default our global support flag NOTIFY_DBUS_SUPPORT_ENABLED = False @@ -45,18 +44,14 @@ try: - # dbus essentials - from dbus import SessionBus - from dbus import Interface - from dbus import Byte - from dbus import ByteArray - from dbus import DBusException + # D-Bus Message Bus Daemon 1.12.XX Essentials + from dbus import Byte, ByteArray, DBusException, Interface, SessionBus # # now we try to determine which mainloop(s) we can access # - # glib + # glib/dbus try: from dbus.mainloop.glib import DBusGMainLoop LOOP_GLIB = DBusGMainLoop() @@ -75,8 +70,7 @@ pass # We're good as long as at least one - NOTIFY_DBUS_SUPPORT_ENABLED = ( - LOOP_GLIB is not None or LOOP_QT is not None) + NOTIFY_DBUS_SUPPORT_ENABLED = LOOP_GLIB is not None or LOOP_QT is not None # ImportError: When using gi.repository you must not import static modules # like "gobject". Please change all occurrences of "import gobject" to @@ -88,7 +82,7 @@ try: # The following is required for Image/Icon loading only import gi - gi.require_version('GdkPixbuf', '2.0') + gi.require_version("GdkPixbuf", "2.0") from gi.repository import GdkPixbuf NOTIFY_DBUS_IMAGE_SUPPORT = True @@ -109,10 +103,9 @@ # The key to value pairs are the actual supported schema's matched # up with the Main Loop they should reference when accessed. MAINLOOP_MAP = { - 'qt': LOOP_QT, - 'kde': LOOP_QT, - 'glib': LOOP_GLIB, - 'dbus': LOOP_QT if LOOP_QT else LOOP_GLIB, + "qt": LOOP_QT, + "kde": LOOP_QT, + "dbus": LOOP_QT if LOOP_QT else LOOP_GLIB, } @@ -125,48 +118,46 @@ class DBusUrgency: DBUS_URGENCIES = { # Note: This also acts as a reverse lookup mapping - DBusUrgency.LOW: 'low', - DBusUrgency.NORMAL: 'normal', - DBusUrgency.HIGH: 'high', + DBusUrgency.LOW: "low", + DBusUrgency.NORMAL: "normal", + DBusUrgency.HIGH: "high", } DBUS_URGENCY_MAP = { # Maps against string 'low' - 'l': DBusUrgency.LOW, + "l": DBusUrgency.LOW, # Maps against string 'moderate' - 'm': DBusUrgency.LOW, + "m": DBusUrgency.LOW, # Maps against string 'normal' - 'n': DBusUrgency.NORMAL, + "n": DBusUrgency.NORMAL, # Maps against string 'high' - 'h': DBusUrgency.HIGH, + "h": DBusUrgency.HIGH, # Maps against string 'emergency' - 'e': DBusUrgency.HIGH, + "e": DBusUrgency.HIGH, # Entries to additionally support (so more like DBus's API) - '0': DBusUrgency.LOW, - '1': DBusUrgency.NORMAL, - '2': DBusUrgency.HIGH, + "0": DBusUrgency.LOW, + "1": DBusUrgency.NORMAL, + "2": DBusUrgency.HIGH, } class NotifyDBus(NotifyBase): - """ - A wrapper for local DBus/Qt Notifications - """ + """A wrapper for local DBus/Qt Notifications.""" # Set our global enabled flag enabled = NOTIFY_DBUS_SUPPORT_ENABLED requirements = { # Define our required packaging in order to work - 'details': _('libdbus-1.so.x must be installed.') + "details": _("libdbus-1.so.x must be installed.") } # The default descriptive name associated with the Notification - service_name = _('DBus Notification') + service_name = _("DBus Notification") # The services URL - service_url = 'http://www.freedesktop.org/Software/dbus/' + service_url = "http://www.freedesktop.org/Software/dbus/" # The default protocols # Python 3 keys() does not return a list object, it is its own dict_keys() @@ -176,7 +167,7 @@ class NotifyDBus(NotifyBase): protocol = list(MAINLOOP_MAP.keys()) # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_dbus' + setup_url = "https://appriseit.com/services/dbus/" # No throttling required for DBus queries request_rate_per_sec = 0 @@ -192,57 +183,62 @@ class NotifyDBus(NotifyBase): body_max_line_count = 10 # The following are required to hook into the notifications: - dbus_interface = 'org.freedesktop.Notifications' - dbus_setting_location = '/org/freedesktop/Notifications' + dbus_interface = "org.freedesktop.Notifications" + dbus_setting_location = "/org/freedesktop/Notifications" # No URL Identifier will be defined for this service as there simply isn't # enough details to uniquely identify one dbus:// from another. url_identifier = False # Define object templates - templates = ( - '{schema}://', - ) + templates = ("{schema}://",) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'urgency': { - 'name': _('Urgency'), - 'type': 'choice:int', - 'values': DBUS_URGENCIES, - 'default': DBusUrgency.NORMAL, - }, - 'priority': { - # Apprise uses 'priority' everywhere; it's just a nice consistent - # feel to be able to use it here as well. Just map the - # value back to 'priority' - 'alias_of': 'urgency', + template_args = dict( + NotifyBase.template_args, + **{ + "urgency": { + "name": _("Urgency"), + "type": "choice:int", + "values": DBUS_URGENCIES, + "default": DBusUrgency.NORMAL, + }, + "priority": { + # Apprise uses 'priority' everywhere; it's just a nice + # consistent feel to be able to use it here as well. Just map + # the value back to 'priority' + "alias_of": "urgency", + }, + "x": { + "name": _("X-Axis"), + "type": "int", + "min": 0, + "map_to": "x_axis", + }, + "y": { + "name": _("Y-Axis"), + "type": "int", + "min": 0, + "map_to": "y_axis", + }, + "image": { + "name": _("Include Image"), + "type": "bool", + "default": True, + "map_to": "include_image", + }, }, - 'x': { - 'name': _('X-Axis'), - 'type': 'int', - 'min': 0, - 'map_to': 'x_axis', - }, - 'y': { - 'name': _('Y-Axis'), - 'type': 'int', - 'min': 0, - 'map_to': 'y_axis', - }, - 'image': { - 'name': _('Include Image'), - 'type': 'bool', - 'default': True, - 'map_to': 'include_image', - }, - }) + ) - def __init__(self, urgency=None, x_axis=None, y_axis=None, - include_image=True, **kwargs): - """ - Initialize DBus Object - """ + def __init__( + self, + urgency=None, + x_axis=None, + y_axis=None, + include_image=True, + **kwargs, + ): + """Initialize DBus Object.""" super().__init__(**kwargs) @@ -250,22 +246,26 @@ def __init__(self, urgency=None, x_axis=None, y_axis=None, self.registry = {} # Store our schema; default to dbus - self.schema = kwargs.get('schema', 'dbus') + self.schema = kwargs.get("schema", "dbus") if self.schema not in MAINLOOP_MAP: - msg = 'The schema specified ({}) is not supported.' \ - .format(self.schema) + msg = f"The schema specified ({self.schema}) is not supported." self.logger.warning(msg) raise TypeError(msg) # The urgency of the message self.urgency = int( - NotifyDBus.template_args['urgency']['default'] - if urgency is None else - next(( - v for k, v in DBUS_URGENCY_MAP.items() - if str(urgency).lower().startswith(k)), - NotifyDBus.template_args['urgency']['default'])) + NotifyDBus.template_args["urgency"]["default"] + if urgency is None + else next( + ( + v + for k, v in DBUS_URGENCY_MAP.items() + if str(urgency).lower().startswith(k) + ), + NotifyDBus.template_args["urgency"]["default"], + ) + ) # Our x/y axis settings if x_axis or y_axis: @@ -275,10 +275,12 @@ def __init__(self, urgency=None, x_axis=None, y_axis=None, except (TypeError, ValueError): # Invalid x/y values specified - msg = 'The x,y coordinates specified ({},{}) are invalid.'\ - .format(x_axis, y_axis) + msg = ( + f"The x,y coordinates specified ({x_axis},{y_axis}) are" + " invalid." + ) self.logger.warning(msg) - raise TypeError(msg) + raise TypeError(msg) from None else: self.x_axis = None self.y_axis = None @@ -286,25 +288,23 @@ def __init__(self, urgency=None, x_axis=None, y_axis=None, # Track whether we want to add an image to the notification. self.include_image = include_image - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): - """ - Perform DBus Notification - """ + def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): + """Perform DBus Notification.""" # Acquire our session try: session = SessionBus(mainloop=MAINLOOP_MAP[self.schema]) except DBusException as e: # Handle exception - self.logger.warning('Failed to send DBus notification.') - self.logger.debug(f'DBus Exception: {e}') + self.logger.warning("Failed to send DBus notification.") + self.logger.debug(f"DBus Exception: {e}") return False # If there is no title, but there is a body, swap the two to get rid # of the weird whitespace if not title: title = body - body = '' + body = "" # acquire our dbus object dbus_obj = session.get_object( @@ -319,18 +319,19 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): ) # image path - icon_path = None if not self.include_image \ - else self.image_path(notify_type, extension='.ico') + icon_path = ( + None + if not self.include_image + else self.image_path(notify_type, extension=".ico") + ) # Our meta payload - meta_payload = { - "urgency": Byte(self.urgency) - } + meta_payload = {"urgency": Byte(self.urgency)} if not (self.x_axis is None and self.y_axis is None): # Set x/y access if these were set - meta_payload['x'] = self.x_axis - meta_payload['y'] = self.y_axis + meta_payload["x"] = self.x_axis + meta_payload["y"] = self.y_axis if NOTIFY_DBUS_IMAGE_SUPPORT and icon_path: try: @@ -338,20 +339,21 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): image = GdkPixbuf.Pixbuf.new_from_file(icon_path) # Associate our image to our notification - meta_payload['icon_data'] = ( + meta_payload["icon_data"] = ( image.get_width(), image.get_height(), image.get_rowstride(), image.get_has_alpha(), image.get_bits_per_sample(), image.get_n_channels(), - ByteArray(image.get_pixels()) + ByteArray(image.get_pixels()), ) except Exception as e: self.logger.warning( - "Could not load notification icon (%s).", icon_path) - self.logger.debug(f'DBus Exception: {e}') + "Could not load notification icon (%s).", icon_path + ) + self.logger.debug(f"DBus Exception: {e}") try: # Always call throttle() before any remote execution is made @@ -363,40 +365,39 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): # Message ID (0 = New Message) 0, # Icon (str) - not used - '', + "", # Title str(title), # Body str(body), # Actions - list(), + [], # Meta meta_payload, # Message Timeout self.message_timeout_ms, ) - self.logger.info('Sent DBus notification.') + self.logger.info("Sent DBus notification.") except Exception as e: - self.logger.warning('Failed to send DBus notification.') - self.logger.debug(f'DBus Exception: {e}') + self.logger.warning("Failed to send DBus notification.") + self.logger.debug(f"DBus Exception: {e}") return False return True def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any URL parameters params = { - 'image': 'yes' if self.include_image else 'no', - 'urgency': - DBUS_URGENCIES[self.template_args['urgency']['default']] + "image": "yes" if self.include_image else "no", + "urgency": ( + DBUS_URGENCIES[self.template_args["urgency"]["default"]] if self.urgency not in DBUS_URGENCIES - else DBUS_URGENCIES[self.urgency], + else DBUS_URGENCIES[self.urgency] + ), } # Extend our parameters @@ -404,48 +405,43 @@ def url(self, privacy=False, *args, **kwargs): # x in (x,y) screen coordinates if self.x_axis: - params['x'] = str(self.x_axis) + params["x"] = str(self.x_axis) # y in (x,y) screen coordinates if self.y_axis: - params['y'] = str(self.y_axis) + params["y"] = str(self.y_axis) - return '{schema}://_/?{params}'.format( - schema=self.schema, - params=NotifyDBus.urlencode(params), - ) + return f"{self.schema}://_/?{NotifyDBus.urlencode(params)}" @staticmethod def parse_url(url): - """ - There are no parameters nessisary for this protocol; simply having - gnome:// is all you need. This function just makes sure that - is in place. + """There are no parameters nessisary for this protocol; simply having + gnome:// is all you need. + This function just makes sure that is in place. """ results = NotifyBase.parse_url(url, verify_host=False) # Include images with our message - results['include_image'] = \ - parse_bool(results['qsd'].get('image', True)) + results["include_image"] = parse_bool( + results["qsd"].get("image", True) + ) # DBus supports urgency, but we we also support the keyword priority # so that it is consistent with some of the other plugins - if 'priority' in results['qsd'] and len(results['qsd']['priority']): + if "priority" in results["qsd"] and len(results["qsd"]["priority"]): # We intentionally store the priority in the urgency section - results['urgency'] = \ - NotifyDBus.unquote(results['qsd']['priority']) + results["urgency"] = NotifyDBus.unquote(results["qsd"]["priority"]) - if 'urgency' in results['qsd'] and len(results['qsd']['urgency']): - results['urgency'] = \ - NotifyDBus.unquote(results['qsd']['urgency']) + if "urgency" in results["qsd"] and len(results["qsd"]["urgency"]): + results["urgency"] = NotifyDBus.unquote(results["qsd"]["urgency"]) # handle x,y coordinates - if 'x' in results['qsd'] and len(results['qsd']['x']): - results['x_axis'] = NotifyDBus.unquote(results['qsd'].get('x')) + if "x" in results["qsd"] and len(results["qsd"]["x"]): + results["x_axis"] = NotifyDBus.unquote(results["qsd"].get("x")) - if 'y' in results['qsd'] and len(results['qsd']['y']): - results['y_axis'] = NotifyDBus.unquote(results['qsd'].get('y')) + if "y" in results["qsd"] and len(results["qsd"]["y"]): + results["y_axis"] = NotifyDBus.unquote(results["qsd"].get("y")) return results diff --git a/libs/apprise/plugins/dingtalk.py b/libs/apprise/plugins/dingtalk.py index 84de85db07..0c4bdfb744 100644 --- a/libs/apprise/plugins/dingtalk.py +++ b/libs/apprise/plugins/dingtalk.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,20 +25,20 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. +import base64 +import hashlib +import hmac +from json import dumps import re import time -import hmac -import hashlib -import base64 + import requests -from json import dumps -from .base import NotifyBase +from ..common import NotifyFormat, NotifyType +from ..locale import gettext_lazy as _ from ..url import PrivacyMode -from ..common import NotifyFormat -from ..common import NotifyType from ..utils.parse import parse_list, validate_regex -from ..locale import gettext_lazy as _ +from .base import NotifyBase # Register at https://dingtalk.com # - Download their PC based software as it is the only way you can create @@ -52,28 +51,26 @@ # dingtalk://{access_token}/{phone_no_1}/{phone_no_2}/{phone_no_N/ # Some Phone Number Detection -IS_PHONE_NO = re.compile(r'^\+?(?P[0-9\s)(+-]+)\s*$') +IS_PHONE_NO = re.compile(r"^\+?(?P[0-9\s)(+-]+)\s*$") class NotifyDingTalk(NotifyBase): - """ - A wrapper for DingTalk Notifications - """ + """A wrapper for DingTalk Notifications.""" # The default descriptive name associated with the Notification - service_name = 'DingTalk' + service_name = "DingTalk" # The services URL - service_url = 'https://www.dingtalk.com/' + service_url = "https://www.dingtalk.com/" # All notification requests are secure - secure_protocol = 'dingtalk' + secure_protocol = "dingtalk" # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_dingtalk' + setup_url = "https://appriseit.com/services/dingtalk/" # DingTalk API - notify_url = 'https://oapi.dingtalk.com/robot/send?access_token={token}' + notify_url = "https://oapi.dingtalk.com/robot/send?access_token={token}" # Do not set title_maxlen as it is set in a property value below # since the length varies depending if we are doing a markdown @@ -82,89 +79,92 @@ class NotifyDingTalk(NotifyBase): # Define object templates templates = ( - '{schema}://{token}/', - '{schema}://{token}/{targets}/', - '{schema}://{secret}@{token}/', - '{schema}://{secret}@{token}/{targets}/', + "{schema}://{token}/", + "{schema}://{token}/{targets}/", + "{schema}://{secret}@{token}/", + "{schema}://{secret}@{token}/{targets}/", ) # Define our template tokens - template_tokens = dict(NotifyBase.template_tokens, **{ - 'token': { - 'name': _('Token'), - 'type': 'string', - 'private': True, - 'required': True, - 'regex': (r'^[a-z0-9]+$', 'i'), - }, - 'secret': { - 'name': _('Secret'), - 'type': 'string', - 'private': True, - 'regex': (r'^[a-z0-9]+$', 'i'), + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "token": { + "name": _("Token"), + "type": "string", + "private": True, + "required": True, + "regex": (r"^[a-z0-9]+$", "i"), + }, + "secret": { + "name": _("Secret"), + "type": "string", + "private": True, + "regex": (r"^[a-z0-9]+$", "i"), + }, + "target_phone_no": { + "name": _("Target Phone No"), + "type": "string", + "map_to": "targets", + }, + "targets": { + "name": _("Targets"), + "type": "list:string", + }, }, - 'target_phone_no': { - 'name': _('Target Phone No'), - 'type': 'string', - 'map_to': 'targets', - }, - 'targets': { - 'name': _('Targets'), - 'type': 'list:string', - }, - }) + ) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'to': { - 'alias_of': 'targets', + template_args = dict( + NotifyBase.template_args, + **{ + "to": { + "alias_of": "targets", + }, + "token": { + "alias_of": "token", + }, + "secret": { + "alias_of": "secret", + }, }, - 'token': { - 'alias_of': 'token', - }, - 'secret': { - 'alias_of': 'secret', - }, - }) + ) def __init__(self, token, targets=None, secret=None, **kwargs): - """ - Initialize DingTalk Object - """ + """Initialize DingTalk Object.""" super().__init__(**kwargs) # Secret Key (associated with project) self.token = validate_regex( - token, *self.template_tokens['token']['regex']) + token, *self.template_tokens["token"]["regex"] + ) if not self.token: - msg = 'An invalid DingTalk API Token ' \ - '({}) was specified.'.format(token) + msg = f"An invalid DingTalk API Token ({token}) was specified." self.logger.warning(msg) raise TypeError(msg) self.secret = None if secret: self.secret = validate_regex( - secret, *self.template_tokens['secret']['regex']) + secret, *self.template_tokens["secret"]["regex"] + ) if not self.secret: - msg = 'An invalid DingTalk Secret ' \ - '({}) was specified.'.format(token) + msg = f"An invalid DingTalk Secret ({token}) was specified." self.logger.warning(msg) raise TypeError(msg) # Parse our targets - self.targets = list() + self.targets = [] for target in parse_list(targets): # Validate targets and drop bad ones: result = IS_PHONE_NO.match(target) if result: # Further check our phone # for it's digit count - result = ''.join(re.findall(r'\d+', result.group('phone'))) + result = "".join(re.findall(r"\d+", result.group("phone"))) if len(result) < 11 or len(result) > 14: self.logger.warning( - 'Dropped invalid phone # ' - '({}) specified.'.format(target), + f"Dropped invalid phone # ({target}) specified.", ) continue @@ -173,47 +173,43 @@ def __init__(self, token, targets=None, secret=None, **kwargs): continue self.logger.warning( - 'Dropped invalid phone # ' - '({}) specified.'.format(target), + f"Dropped invalid phone # ({target}) specified.", ) return def get_signature(self): - """ - Calculates time-based signature so that we can send arbitrary messages. - """ + """Calculates time-based signature so that we can send arbitrary + messages.""" timestamp = str(round(time.time() * 1000)) - secret_enc = self.secret.encode('utf-8') - str_to_sign_enc = \ - "{}\n{}".format(timestamp, self.secret).encode('utf-8') + secret_enc = self.secret.encode("utf-8") + str_to_sign_enc = f"{timestamp}\n{self.secret}".encode() hmac_code = hmac.new( - secret_enc, str_to_sign_enc, digestmod=hashlib.sha256).digest() - signature = NotifyDingTalk.quote(base64.b64encode(hmac_code), safe='') + secret_enc, str_to_sign_enc, digestmod=hashlib.sha256 + ).digest() + signature = NotifyDingTalk.quote(base64.b64encode(hmac_code), safe="") return timestamp, signature - def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): - """ - Perform DingTalk Notification - """ + def send(self, body, title="", notify_type=NotifyType.INFO, **kwargs): + """Perform DingTalk Notification.""" payload = { - 'msgtype': 'text', - 'at': { - 'atMobiles': self.targets, - 'isAtAll': False, - } + "msgtype": "text", + "at": { + "atMobiles": self.targets, + "isAtAll": False, + }, } if self.notify_format == NotifyFormat.MARKDOWN: - payload['markdown'] = { - 'title': title, - 'text': body, + payload["markdown"] = { + "title": title, + "text": body, } else: - payload['text'] = { - 'content': body, + payload["text"] = { + "content": body, } # Our Notification URL @@ -223,20 +219,22 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): if self.secret: timestamp, signature = self.get_signature() params = { - 'timestamp': timestamp, - 'sign': signature, + "timestamp": timestamp, + "sign": signature, } # Prepare our headers headers = { - 'User-Agent': self.app_id, - 'Content-Type': 'application/json' + "User-Agent": self.app_id, + "Content-Type": "application/json", } # Some Debug Logging - self.logger.debug('DingTalk URL: {} (cert_verify={})'.format( - notify_url, self.verify_certificate)) - self.logger.debug('DingTalk Payload: {}' .format(payload)) + self.logger.debug( + "DingTalk URL:" + f" {notify_url} (cert_verify={self.verify_certificate})" + ) + self.logger.debug(f"DingTalk Payload: {payload}") # Always call throttle before any remote server i/o is made self.throttle() @@ -248,119 +246,124 @@ def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs): headers=headers, params=params, verify=self.verify_certificate, + timeout=self.request_timeout, ) if r.status_code != requests.codes.ok: # We had a problem - status_str = \ - NotifyDingTalk.http_response_code_lookup( - r.status_code) + status_str = NotifyDingTalk.http_response_code_lookup( + r.status_code + ) self.logger.warning( - 'Failed to send DingTalk notification: ' - '{}{}error={}.'.format( - status_str, - ', ' if status_str else '', - r.status_code)) + "Failed to send DingTalk notification: " + "{}{}error={}.".format( + status_str, ", " if status_str else "", r.status_code + ) + ) self.logger.debug( - 'Response Details:\r\n{}'.format(r.content)) + "Response Details:\r\n%r", (r.content or b"")[:2000]) + return False else: - self.logger.info('Sent DingTalk notification.') + self.logger.info("Sent DingTalk notification.") except requests.RequestException as e: self.logger.warning( - 'A Connection error occured sending DingTalk ' - 'notification.' + "A Connection error occured sending DingTalk notification." ) - self.logger.debug('Socket Exception: %s' % str(e)) + self.logger.debug(f"Socket Exception: {e!s}") return False return True @property def title_maxlen(self): - """ - The title isn't used when not in markdown mode. - """ - return NotifyBase.title_maxlen \ - if self.notify_format == NotifyFormat.MARKDOWN else 0 + """The title isn't used when not in markdown mode.""" + return ( + NotifyBase.title_maxlen + if self.notify_format == NotifyFormat.MARKDOWN + else 0 + ) def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + """Returns the URL built dynamically based on specified arguments.""" # Define any arguments set args = { - 'format': self.notify_format, - 'overflow': self.overflow_mode, - 'verify': 'yes' if self.verify_certificate else 'no', + "format": self.notify_format, + "overflow": self.overflow_mode, + "verify": "yes" if self.verify_certificate else "no", } - return '{schema}://{secret}{token}/{targets}/?{args}'.format( + return "{schema}://{secret}{token}/{targets}/?{args}".format( schema=self.secure_protocol, - secret='' if not self.secret else '{}@'.format(self.pprint( - self.secret, privacy, mode=PrivacyMode.Secret, safe='')), - token=self.pprint(self.token, privacy, safe=''), - targets='/'.join( - [NotifyDingTalk.quote(x, safe='') for x in self.targets]), - args=NotifyDingTalk.urlencode(args)) + secret=( + "" + if not self.secret + else "{}@".format( + self.pprint( + self.secret, privacy, mode=PrivacyMode.Secret, safe="" + ) + ) + ), + token=self.pprint(self.token, privacy, safe=""), + targets="/".join( + [NotifyDingTalk.quote(x, safe="") for x in self.targets] + ), + args=NotifyDingTalk.urlencode(args), + ) @property def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. + """Returns all of the identifiers that make this URL unique from + another simliar one. + + Targets or end points should never be identified here. """ return (self.secure_protocol, self.secret, self.token) def __len__(self): - """ - Returns the number of targets associated with this notification - """ + """Returns the number of targets associated with this notification.""" targets = len(self.targets) return targets if targets > 0 else 1 @staticmethod def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to substantiate this object. - - """ + """Parses the URL and returns enough arguments that can allow us to + substantiate this object.""" results = NotifyBase.parse_url(url, verify_host=False) if not results: # We're done early as we couldn't load the results return results - results['token'] = NotifyDingTalk.unquote(results['host']) + results["token"] = NotifyDingTalk.unquote(results["host"]) # if a user has been defined, use it's value as the secret - if results.get('user'): - results['secret'] = results.get('user') + if results.get("user"): + results["secret"] = results.get("user") # Get our entries; split_path() looks after unquoting content for us # by default - results['targets'] = NotifyDingTalk.split_path(results['fullpath']) + results["targets"] = NotifyDingTalk.split_path(results["fullpath"]) # Support the use of the `token` keyword argument - if 'token' in results['qsd'] and len(results['qsd']['token']): - results['token'] = \ - NotifyDingTalk.unquote(results['qsd']['token']) + if "token" in results["qsd"] and len(results["qsd"]["token"]): + results["token"] = NotifyDingTalk.unquote(results["qsd"]["token"]) # Support the use of the `secret` keyword argument - if 'secret' in results['qsd'] and len(results['qsd']['secret']): - results['secret'] = \ - NotifyDingTalk.unquote(results['qsd']['secret']) + if "secret" in results["qsd"] and len(results["qsd"]["secret"]): + results["secret"] = NotifyDingTalk.unquote( + results["qsd"]["secret"] + ) # Support the 'to' variable so that we can support targets this way too # The 'to' makes it easier to use yaml configuration - if 'to' in results['qsd'] and len(results['qsd']['to']): - results['targets'] += \ - NotifyDingTalk.parse_list(results['qsd']['to']) + if "to" in results["qsd"] and len(results["qsd"]["to"]): + results["targets"] += NotifyDingTalk.parse_list( + results["qsd"]["to"] + ) return results diff --git a/libs/apprise/plugins/discord.py b/libs/apprise/plugins/discord.py index 14974cb875..3b05f1232f 100644 --- a/libs/apprise/plugins/discord.py +++ b/libs/apprise/plugins/discord.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -43,46 +42,45 @@ # API Documentation on Webhooks: # - https://discord.com/developers/docs/resources/webhook # +from __future__ import annotations + +from datetime import datetime, timedelta, timezone +from itertools import chain +from json import dumps import re +from typing import Any + import requests -from json import dumps -from datetime import timedelta -from datetime import datetime -from datetime import timezone -from .base import NotifyBase -from ..common import NotifyImageSize -from ..common import NotifyFormat -from ..common import NotifyType -from ..utils.parse import parse_bool, validate_regex -from ..locale import gettext_lazy as _ from ..attachment.base import AttachBase +from ..common import NotifyFormat, NotifyImageSize, NotifyType +from ..locale import gettext_lazy as _ +from ..utils.parse import parse_bool, parse_list, validate_regex +from .base import NotifyBase - -# Used to detect user/role IDs +# Used to detect user/role IDs and @here/@everyone tokens. USER_ROLE_DETECTION_RE = re.compile( - r'\s*(?:<@(?P&?)(?P[0-9]+)>|@(?P[a-z0-9]+))', re.I) + r"\s*(?:&?)(?P[0-9]+)>?|@(?P[a-z0-9]+))", re.I +) class NotifyDiscord(NotifyBase): - """ - A wrapper to Discord Notifications + """A wrapper to Discord Notifications.""" - """ # The default descriptive name associated with the Notification - service_name = 'Discord' + service_name = "Discord" # The services URL - service_url = 'https://discord.com/' + service_url = "https://discord.com/" # The default secure protocol - secure_protocol = 'discord' + secure_protocol = "discord" # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_discord' + setup_url = "https://appriseit.com/services/discord/" # Discord Webhook - notify_url = 'https://discord.com/api/webhooks' + notify_url = "https://discord.com/api/webhooks" # Support attachments attachment_support = True @@ -115,106 +113,137 @@ class NotifyDiscord(NotifyBase): # Define object templates templates = ( - '{schema}://{webhook_id}/{webhook_token}', - '{schema}://{botname}@{webhook_id}/{webhook_token}', + "{schema}://{webhook_id}/{webhook_token}", + "{schema}://{botname}@{webhook_id}/{webhook_token}", ) # Define our template tokens - template_tokens = dict(NotifyBase.template_tokens, **{ - 'botname': { - 'name': _('Bot Name'), - 'type': 'string', - 'map_to': 'user', - }, - 'webhook_id': { - 'name': _('Webhook ID'), - 'type': 'string', - 'private': True, - 'required': True, + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "botname": { + "name": _("Bot Name"), + "type": "string", + "map_to": "user", + }, + "webhook_id": { + "name": _("Webhook ID"), + "type": "string", + "private": True, + "required": True, + }, + "webhook_token": { + "name": _("Webhook Token"), + "type": "string", + "private": True, + "required": True, + }, }, - 'webhook_token': { - 'name': _('Webhook Token'), - 'type': 'string', - 'private': True, - 'required': True, - }, - }) + ) # Define our template arguments - template_args = dict(NotifyBase.template_args, **{ - 'tts': { - 'name': _('Text To Speech'), - 'type': 'bool', - 'default': False, - }, - 'avatar': { - 'name': _('Avatar Image'), - 'type': 'bool', - 'default': True, + template_args = dict( + NotifyBase.template_args, + **{ + "tts": { + "name": _("Text To Speech"), + "type": "bool", + "default": False, + }, + "avatar": { + "name": _("Avatar Image"), + "type": "bool", + "default": True, + }, + "avatar_url": { + "name": _("Avatar URL"), + "type": "string", + }, + "href": { + "name": _("URL"), + "type": "string", + }, + "url": { + "alias_of": "href", + }, + # Send a message to the specified thread within a webhook's + # channel. The thread will automatically be unarchived. + "thread": { + "name": _("Thread ID"), + "type": "string", + }, + "footer": { + "name": _("Display Footer"), + "type": "bool", + "default": False, + }, + "footer_logo": { + "name": _("Footer Logo"), + "type": "bool", + "default": True, + }, + "fields": { + "name": _("Use Fields"), + "type": "bool", + "default": True, + }, + "flags": { + "name": _("Discord Flags"), + "type": "int", + "min": 0, + }, + "image": { + "name": _("Include Image"), + "type": "bool", + "default": False, + "map_to": "include_image", + }, + # Explicit ping targets. Examples: + # - ping=12345,67890 + # - ping=<@12345>,<@&67890>,@here + "ping": { + "name": _("Ping Users/Roles"), + "type": "list:string", + }, }, - 'avatar_url': { - 'name': _('Avatar URL'), - 'type': 'string', - }, - 'href': { - 'name': _('URL'), - 'type': 'string', - }, - 'url': { - 'alias_of': 'href', - }, - # Send a message to the specified thread within a webhook's channel. - # The thread will automatically be unarchived. - 'thread': { - 'name': _('Thread ID'), - 'type': 'string', - }, - 'footer': { - 'name': _('Display Footer'), - 'type': 'bool', - 'default': False, - }, - 'footer_logo': { - 'name': _('Footer Logo'), - 'type': 'bool', - 'default': True, - }, - 'fields': { - 'name': _('Use Fields'), - 'type': 'bool', - 'default': True, - }, - 'image': { - 'name': _('Include Image'), - 'type': 'bool', - 'default': False, - 'map_to': 'include_image', - }, - }) - - def __init__(self, webhook_id, webhook_token, tts=False, avatar=True, - footer=False, footer_logo=True, include_image=False, - fields=True, avatar_url=None, href=None, thread=None, - **kwargs): - """ - Initialize Discord Object + ) - """ + def __init__( + self, + webhook_id: str, + webhook_token: str, + tts: bool = False, + avatar: bool = True, + footer: bool = False, + footer_logo: bool = True, + include_image: bool = False, + fields: bool = True, + avatar_url: str | None = None, + href: str | None = None, + thread: str | None = None, + flags: int | None = None, + ping: list[str] | None = None, + **kwargs: Any, + ) -> None: + """Initialize Discord Object.""" super().__init__(**kwargs) # Webhook ID (associated with project) self.webhook_id = validate_regex(webhook_id) if not self.webhook_id: - msg = 'An invalid Discord Webhook ID ' \ - '({}) was specified.'.format(webhook_id) + msg = ( + f"An invalid Discord Webhook ID ({webhook_id}) was " + "specified.") self.logger.warning(msg) raise TypeError(msg) # Webhook Token (associated with project) self.webhook_token = validate_regex(webhook_token) if not self.webhook_token: - msg = 'An invalid Discord Webhook Token ' \ - '({}) was specified.'.format(webhook_token) + msg = ( + "An invalid Discord Webhook Token " + f"({webhook_token}) was specified." + ) self.logger.warning(msg) raise TypeError(msg) @@ -247,7 +276,25 @@ def __init__(self, webhook_id, webhook_token, tts=False, avatar=True, # A URL to have the title link to self.href = href - # For Tracking Purposes + # A URL to have the title link to + if flags: + try: + self.flags = int(flags) + if self.flags < NotifyDiscord.template_args["flags"]["min"]: + raise ValueError() + + except (TypeError, ValueError): + msg = ( + f"An invalid Discord flags setting ({flags}) was " + "specified.") + self.logger.warning(msg) + raise TypeError(msg) from None + else: + self.flags = None + + # Ping targets (tokens from URL, already split by parse_list) + self.ping: list[str] = parse_list(ping) + self.ratelimit_reset = datetime.now(timezone.utc).replace(tzinfo=None) # Default to 1.0 @@ -255,73 +302,93 @@ def __init__(self, webhook_id, webhook_token, tts=False, avatar=True, return - def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, - **kwargs): - """ - Perform Discord Notification - """ - - payload = { - # Text-To-Speech - 'tts': self.tts, - + def send( + self, + body: str, + title: str = "", + notify_type: NotifyType = NotifyType.INFO, + attach: list[AttachBase] | None = None, + **kwargs: Any, + ) -> bool: + """Perform Discord Notification.""" + + payload: dict[str, Any] = { + "tts": self.tts, # If Text-To-Speech is set to True, then we do not want to wait # for the whole message before continuing. Otherwise, we wait - 'wait': self.tts is False, + "wait": self.tts is False, } + if self.flags: + # Set our flag if defined: + payload["flags"] = self.flags + # Acquire image_url image_url = self.image_url(notify_type) if self.avatar and (image_url or self.avatar_url): - payload['avatar_url'] = \ + payload["avatar_url"] = ( self.avatar_url if self.avatar_url else image_url + ) if self.user: # Optionally override the default username of the webhook - payload['username'] = self.user + payload["username"] = self.user # Associate our thread_id with our message - params = {'thread_id': self.thread_id} if self.thread_id else None + params = {"thread_id": self.thread_id} if self.thread_id else None + + # Ping handling rules: + # - If ping= is set, it is an additive if in MARKDOWN mode otherwise + # it is explicit for TEXT/HTML formats. + # - Otherwise, ping detection only happens in MARKDOWN mode + if self.notify_format == NotifyFormat.MARKDOWN: + if self.ping: + payload.update(self.ping_payload(body, " ".join(self.ping))) + else: + payload.update(self.ping_payload(body)) + + # TEXT/HTML: no body parsing, ping= is exclusive + elif self.ping: + payload.update(self.ping_payload(" ".join(self.ping))) if body: - # our fields variable - fields = [] + # Track extra embed fields (if used) + fields: list[dict[str, str]] = [] if self.notify_format == NotifyFormat.MARKDOWN: # Use embeds for payload - payload['embeds'] = [{ - 'author': { - 'name': self.app_id, - 'url': self.app_url, + payload["embeds"] = [{ + "author": { + "name": self.app_id, + "url": self.app_url, }, - 'title': title, - 'description': body, - + "title": title, + "description": body, # Our color associated with our notification - 'color': self.color(notify_type, int), + "color": self.color(notify_type, int), }] if self.href: - payload['embeds'][0]['url'] = self.href + payload["embeds"][0]["url"] = self.href if self.footer: # Acquire logo URL logo_url = self.image_url(notify_type, logo=True) # Set Footer text to our app description - payload['embeds'][0]['footer'] = { - 'text': self.app_desc, + payload["embeds"][0]["footer"] = { + "text": self.app_desc, } if self.footer_logo and logo_url: - payload['embeds'][0]['footer']['icon_url'] = logo_url + payload["embeds"][0]["footer"]["icon_url"] = logo_url if self.include_image and image_url: - payload['embeds'][0]['thumbnail'] = { - 'url': image_url, - 'height': 256, - 'width': 256, + payload["embeds"][0]["thumbnail"] = { + "url": image_url, + "height": 256, + "width": 256, } if self.fields: @@ -329,58 +396,35 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, description, fields = self.extract_markdown_sections(body) # Swap first entry for description - payload['embeds'][0]['description'] = description + payload["embeds"][0]["description"] = description if fields: # Apply our additional parsing for a better # presentation - payload['embeds'][0]['fields'] = \ - fields[:self.discord_max_fields] - - # Remove entry from head of fields - fields = fields[self.discord_max_fields:] - + payload["embeds"][0]["fields"] = fields[ + : self.discord_max_fields + ] + fields = fields[self.discord_max_fields :] else: - # not markdown - payload['content'] = \ - body if not title else "{}\r\n{}".format(title, body) - - # parse for user id's <@123> and role IDs <@&456> - results = USER_ROLE_DETECTION_RE.findall(body) - if results: - payload['allow_mentions'] = { - 'parse': [], - 'users': [], - 'roles': [], - } - - _content = [] - for (is_role, no, value) in results: - if value: - payload['allow_mentions']['parse'].append(value) - _content.append(f'@{value}') - - elif is_role: - payload['allow_mentions']['roles'].append(no) - _content.append(f'<@&{no}>') - - else: # is_user - payload['allow_mentions']['users'].append(no) - _content.append(f'<@{no}>') - - if self.notify_format == NotifyFormat.MARKDOWN: - # Add pingable elements to content field - payload['content'] = '👉 ' + ' '.join(_content) + # TEXT or HTML: + # - No ping detection unless ping= was provided. + # - If ping= was provided, ping_payload() already generated + # payload["content"] starting with "👉 ...", and we append + # it. + payload["content"] = ( + body if not title else f"{title}\r\n{body}" + ) + payload.get("content", "") if not self._send(payload, params=params): # We failed to post our message return False - # Process any remaining fields IF set + # Send remaining fields (if any) if fields: - payload['embeds'][0]['description'] = '' + payload["embeds"][0]["description"] = "" for i in range(0, len(fields), self.discord_max_fields): - payload['embeds'][0]['fields'] = \ - fields[i:i + self.discord_max_fields] + payload["embeds"][0]["fields"] = fields[ + i : i + self.discord_max_fields + ] if not self._send(payload): # We failed to post our message return False @@ -390,29 +434,25 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, # and assigned values for re-use here too payload.update({ # Text-To-Speech - 'tts': False, + "tts": False, # Wait until the upload has posted itself before continuing - 'wait': True, + "wait": True, }) # # Remove our text/title based content for attachment use # - if 'embeds' in payload: - del payload['embeds'] - - if 'content' in payload: - del payload['content'] - - if 'allow_mentions' in payload: - del payload['allow_mentions'] + payload.pop("embeds", None) + payload.pop("content", None) + payload.pop("allow_mentions", None) # # Send our attachments # for attachment in attach: self.logger.info( - 'Posting Discord Attachment {}'.format(attachment.name)) + f"Posting Discord Attachment {attachment.name}" + ) if not self._send(payload, params=params, attach=attachment): # We failed to post our message return False @@ -420,31 +460,33 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, # Otherwise return return True - def _send(self, payload, attach=None, params=None, rate_limit=1, - **kwargs): - """ - Wrapper to the requests (post) object - """ + def _send( + self, + payload: dict[str, Any], + attach: AttachBase | None = None, + params: dict[str, str] | None = None, + rate_limit: int = 1, + **kwargs: Any, + ) -> bool: + """Wrapper to the requests (post) object.""" # Our headers headers = { - 'User-Agent': self.app_id, + "User-Agent": self.app_id, } # Construct Notify URL - notify_url = '{0}/{1}/{2}'.format( - self.notify_url, - self.webhook_id, - self.webhook_token, + notify_url = ( + f"{self.notify_url}/{self.webhook_id}/{self.webhook_token}" ) - self.logger.debug('Discord POST URL: %s (cert_verify=%r)' % ( - notify_url, self.verify_certificate, - )) - self.logger.debug('Discord Payload: %s' % str(payload)) + self.logger.debug( + "Discord POST URL:" + f" {notify_url} (cert_verify={self.verify_certificate!r})" + ) + self.logger.debug(f"Discord Payload: {payload!s}") - # By default set wait to None - wait = None + wait: float | None = None if self.ratelimit_remaining <= 0.0: # Determine how long we should wait for or if we should wait at @@ -457,8 +499,10 @@ def _send(self, payload, attach=None, params=None, rate_limit=1, if now < self.ratelimit_reset: # We need to throttle for the difference in seconds wait = abs( - (self.ratelimit_reset - now + self.clock_skew) - .total_seconds()) + ( + self.ratelimit_reset - now + self.clock_skew + ).total_seconds() + ) # Always call throttle before any remote server i/o is made; self.throttle(wait=wait) @@ -468,13 +512,13 @@ def _send(self, payload, attach=None, params=None, rate_limit=1, if not attach: # We could not access the attachment self.logger.error( - 'Could not access attachment {}.'.format( - attach.url(privacy=True))) + f"Could not access attachment {attach.url(privacy=True)}." + ) return False self.logger.debug( - 'Posting Discord attachment {}'.format( - attach.url(privacy=True))) + f"Posting Discord attachment {attach.url(privacy=True)}" + ) # Our attachment path (if specified) files = None @@ -482,10 +526,16 @@ def _send(self, payload, attach=None, params=None, rate_limit=1, # Open our attachment path if required: if attach: - files = {'file': (attach.name, open(attach.path, 'rb'))} - + files = { + "file": ( + attach.name, + # file handle is safely closed in `finally`; inline + # open is intentional + open(attach.path, "rb"), # noqa: SIM115 + ) + } else: - headers['Content-Type'] = 'application/json; charset=utf-8' + headers["Content-Type"] = "application/json; charset=utf-8" r = requests.post( notify_url, @@ -500,12 +550,12 @@ def _send(self, payload, attach=None, params=None, rate_limit=1, # Handle rate limiting (if specified) try: # Store our rate limiting (if provided) - self.ratelimit_remaining = \ - float(r.headers.get( - 'X-RateLimit-Remaining')) + self.ratelimit_remaining = float( + r.headers.get("X-RateLimit-Remaining") + ) self.ratelimit_reset = datetime.fromtimestamp( - int(r.headers.get('X-RateLimit-Reset')), - timezone.utc).replace(tzinfo=None) + int(r.headers.get("X-RateLimit-Reset")), timezone.utc + ).replace(tzinfo=None) except (TypeError, ValueError): # This is returned if we could not retrieve this @@ -513,123 +563,141 @@ def _send(self, payload, attach=None, params=None, rate_limit=1, pass if r.status_code not in ( - requests.codes.ok, requests.codes.no_content): + requests.codes.ok, + requests.codes.no_content, + ): # We had a problem - status_str = \ - NotifyBase.http_response_code_lookup(r.status_code) + status_str = NotifyBase.http_response_code_lookup( + r.status_code + ) - if r.status_code == requests.codes.too_many_requests \ - and rate_limit > 0: + if ( + r.status_code == requests.codes.too_many_requests + and rate_limit > 0 + ): # handle rate limiting self.logger.warning( - 'Discord rate limiting in effect; ' - 'blocking for %.2f second(s)', - self.ratelimit_remaining) + "Discord rate limiting in effect; " + "blocking for %.2f second(s)", + self.ratelimit_remaining, + ) # Try one more time before failing return self._send( - payload=payload, attach=attach, params=params, - rate_limit=rate_limit - 1, **kwargs) + payload=payload, + attach=attach, + params=params, + rate_limit=rate_limit - 1, + **kwargs, + ) self.logger.warning( - 'Failed to send {}to Discord notification: ' - '{}{}error={}.'.format( - attach.name if attach else '', + "Failed to send {}to Discord notification: " + "{}{}error={}.".format( + attach.name if attach else "", status_str, - ', ' if status_str else '', - r.status_code)) + ", " if status_str else "", + r.status_code, + ) + ) - self.logger.debug('Response Details:\r\n{}'.format(r.content)) + self.logger.debug( + "Response Details:\r\n%r", (r.content or b"")[:2000]) # Return; we're done return False else: - self.logger.info('Sent Discord {}.'.format( - 'attachment' if attach else 'notification')) + self.logger.info( + "Sent Discord {}.".format( + "attachment" if attach else "notification" + ) + ) except requests.RequestException as e: self.logger.warning( - 'A Connection error occurred posting {}to Discord.'.format( - attach.name if attach else '')) - self.logger.debug('Socket Exception: %s' % str(e)) + "A Connection error occurred posting {}to Discord.".format( + attach.name if attach else "" + ) + ) + self.logger.debug(f"Socket Exception: {e!s}") return False - except (OSError, IOError) as e: + except OSError as e: self.logger.warning( - 'An I/O error occurred while reading {}.'.format( - attach.name if attach else 'attachment')) - self.logger.debug('I/O Exception: %s' % str(e)) + "An I/O error occurred while reading {}.".format( + attach.name if attach else "attachment" + ) + ) + self.logger.debug(f"I/O Exception: {e!s}") return False finally: # Close our file (if it's open) stored in the second element # of our files tuple (index 1) if files: - files['file'][1].close() + files["file"][1].close() return True - def url(self, privacy=False, *args, **kwargs): - """ - Returns the URL built dynamically based on specified arguments. - """ + def url(self, privacy: bool = False, *args: Any, **kwargs: Any) -> str: + """Returns the URL built dynamically based on specified arguments.""" - # Define any URL parameters - params = { - 'tts': 'yes' if self.tts else 'no', - 'avatar': 'yes' if self.avatar else 'no', - 'footer': 'yes' if self.footer else 'no', - 'footer_logo': 'yes' if self.footer_logo else 'no', - 'image': 'yes' if self.include_image else 'no', - 'fields': 'yes' if self.fields else 'no', + params: dict[str, str] = { + "tts": "yes" if self.tts else "no", + "avatar": "yes" if self.avatar else "no", + "footer": "yes" if self.footer else "no", + "footer_logo": "yes" if self.footer_logo else "no", + "image": "yes" if self.include_image else "no", + "fields": "yes" if self.fields else "no", } if self.avatar_url: - params['avatar_url'] = self.avatar_url + params["avatar_url"] = self.avatar_url + + if self.flags: + params["flags"] = str(self.flags) if self.href: - params['href'] = self.href + params["href"] = self.href if self.thread_id: - params['thread'] = self.thread_id + params["thread"] = self.thread_id + + if self.ping: + # Let Apprise urlencode handle list formatting + params["ping"] = ",".join(self.ping) # Ensure our botname is set - botname = f'{self.user}@' if self.user else '' + botname = f"{self.user}@" if self.user else "" # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) - return '{schema}://{botname}{webhook_id}/{webhook_token}/?{params}' \ - .format( + return ( + "{schema}://{bname}{webhook_id}/{webhook_token}/?{params}".format( schema=self.secure_protocol, - botname=botname, - webhook_id=self.pprint(self.webhook_id, privacy, safe=''), + bname=botname, + webhook_id=self.pprint(self.webhook_id, privacy, safe=""), webhook_token=self.pprint( - self.webhook_token, privacy, safe=''), + self.webhook_token, privacy, safe=""), params=NotifyDiscord.urlencode(params), ) + ) @property - def url_identifier(self): - """ - Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified - here. - """ + def url_identifier(self) -> tuple[str, str, str]: + """Returns all of the identifiers that make this URL unique.""" return (self.secure_protocol, self.webhook_id, self.webhook_token) @staticmethod - def parse_url(url): - """ - Parses the URL and returns enough arguments that can allow - us to re-instantiate this object. + def parse_url(url: str) -> dict[str, Any] | None: + """Parses the URL and returns arguments for instantiating this object. Syntax: discord://webhook_id/webhook_token - """ results = NotifyBase.parse_url(url, verify_host=False) if not results: @@ -637,75 +705,83 @@ def parse_url(url): return results # Store our webhook ID - webhook_id = NotifyDiscord.unquote(results['host']) + webhook_id = NotifyDiscord.unquote(results["host"]) # Now fetch our tokens try: - webhook_token = \ - NotifyDiscord.split_path(results['fullpath'])[0] + webhook_token = NotifyDiscord.split_path(results["fullpath"])[0] except IndexError: # Force some bad values that will get caught # in parsing later webhook_token = None - results['webhook_id'] = webhook_id - results['webhook_token'] = webhook_token + results["webhook_id"] = webhook_id + results["webhook_token"] = webhook_token # Text To Speech - results['tts'] = parse_bool(results['qsd'].get('tts', False)) + results["tts"] = parse_bool(results["qsd"].get("tts", False)) # Use sections # effectively detect multiple fields and break them off # into sections - results['fields'] = parse_bool(results['qsd'].get('fields', True)) + results["fields"] = parse_bool(results["qsd"].get("fields", True)) # Use Footer - results['footer'] = parse_bool(results['qsd'].get('footer', False)) + results["footer"] = parse_bool(results["qsd"].get("footer", False)) # Use Footer Logo - results['footer_logo'] = \ - parse_bool(results['qsd'].get('footer_logo', True)) + results["footer_logo"] = parse_bool( + results["qsd"].get("footer_logo", True) + ) # Update Avatar Icon - results['avatar'] = parse_bool(results['qsd'].get('avatar', True)) + results["avatar"] = parse_bool(results["qsd"].get("avatar", True)) # Boolean to include an image or not - results['include_image'] = parse_bool(results['qsd'].get( - 'image', NotifyDiscord.template_args['image']['default'])) + results["include_image"] = parse_bool( + results["qsd"].get( + "image", NotifyDiscord.template_args["image"]["default"] + ) + ) + + if "botname" in results["qsd"]: + # Alias to User + results["user"] = NotifyDiscord.unquote(results["qsd"]["botname"]) - if 'botname' in results['qsd']: + if "flags" in results["qsd"]: # Alias to User - results['user'] = \ - NotifyDiscord.unquote(results['qsd']['botname']) + results["flags"] = NotifyDiscord.unquote(results["qsd"]["flags"]) # Extract avatar url if it was specified - if 'avatar_url' in results['qsd']: - results['avatar_url'] = \ - NotifyDiscord.unquote(results['qsd']['avatar_url']) + if "avatar_url" in results["qsd"]: + results["avatar_url"] = NotifyDiscord.unquote( + results["qsd"]["avatar_url"] + ) # Extract url if it was specified - if 'href' in results['qsd']: - results['href'] = \ - NotifyDiscord.unquote(results['qsd']['href']) + if "href" in results["qsd"]: + results["href"] = NotifyDiscord.unquote(results["qsd"]["href"]) - elif 'url' in results['qsd']: - results['href'] = \ - NotifyDiscord.unquote(results['qsd']['url']) + elif "url" in results["qsd"]: + results["href"] = NotifyDiscord.unquote(results["qsd"]["url"]) # Markdown is implied - results['format'] = NotifyFormat.MARKDOWN + results["format"] = NotifyFormat.MARKDOWN # Extract thread id if it was specified - if 'thread' in results['qsd']: - results['thread'] = \ - NotifyDiscord.unquote(results['qsd']['thread']) + if "thread" in results["qsd"]: + results["thread"] = NotifyDiscord.unquote(results["qsd"]["thread"]) # Markdown is implied - results['format'] = NotifyFormat.MARKDOWN + results["format"] = NotifyFormat.MARKDOWN + + # Extract ping targets, comma/space separated + if "ping" in results["qsd"]: + results["ping"] = NotifyDiscord.unquote(results["qsd"]["ping"]) return results @staticmethod - def parse_native_url(url): + def parse_native_url(url: str) -> dict[str, Any] | None: """ Support https://discord.com/api/webhooks/WEBHOOK_ID/WEBHOOK_TOKEN Support Legacy URL as well: @@ -713,56 +789,118 @@ def parse_native_url(url): """ result = re.match( - r'^https?://discord(app)?\.com/api/webhooks/' - r'(?P[0-9]+)/' - r'(?P[A-Z0-9_-]+)/?' - r'(?P\?.+)?$', url, re.I) + r"^https?://discord(app)?\.com/api/webhooks/" + r"(?P[0-9]+)/" + r"(?P[A-Z0-9_-]+)/?" + r"(?P\?.+)?$", + url, + re.I, + ) if result: return NotifyDiscord.parse_url( - '{schema}://{webhook_id}/{webhook_token}/{params}'.format( + "{schema}://{webhook_id}/{webhook_token}/{params}".format( schema=NotifyDiscord.secure_protocol, - webhook_id=result.group('webhook_id'), - webhook_token=result.group('webhook_token'), - params='' if not result.group('params') - else result.group('params'))) + webhook_id=result.group("webhook_id"), + webhook_token=result.group("webhook_token"), + params=( + "" + if not result.group("params") + else result.group("params") + ), + ) + ) return None - @staticmethod - def extract_markdown_sections(markdown): + def ping_payload(self, *args: str) -> dict[str, Any]: """ - Takes a string in a markdown type format and extracts - the headers and their corresponding sections into individual - fields that get passed as an embed entry to Discord. + Takes one or more strings and applies the payload associated with + pinging the users detected within. + This returns a dict that may contain: + - allow_mentions + - content (starting with "👉 " and containing mention tokens) """ + + payload: dict[str, Any] = {} + + roles: set[str] = set() + users: set[str] = set() + parse: set[str] = set() + + for arg in args: + # parse for user id's <@123> and role IDs <@&456> + results = USER_ROLE_DETECTION_RE.findall(arg) + if not results: + continue + + for is_role, no, value in results: + if value: + parse.add(value) + + elif is_role: + roles.add(no) + + else: # is_user + users.add(no) + + if not (roles or users or parse): + # Nothing to add + return payload + + payload["allow_mentions"] = { + "parse": list(parse), + "users": list(users), + "roles": list(roles), + } + + payload["content"] = "👉 " + " ".join( + chain( + [f"@{value}" for value in parse], + [f"<@&{value}>" for value in roles], + [f"<@{value}>" for value in users], + ) + ) + + return payload + + @staticmethod + def extract_markdown_sections( + markdown: str) -> tuple[str, list[dict[str, str]]]: + """Extract headers and their corresponding sections into embed + fields.""" + # Search for any header information found without it's own section # identifier match = re.match( - r'^\s*(?P[^\s#]+.*?)(?=\s*$|[\r\n]+\s*#)', - markdown, flags=re.S) + r"^\s*(?P[^\s#]+.*?)(?=\s*$|[\r\n]+\s*#)", + markdown, + flags=re.S, + ) - description = match.group('desc').strip() if match else '' + description = match.group("desc").strip() if match else "" if description: # Strip description from our string since it has been handled # now. - markdown = re.sub(re.escape(description), '', markdown, count=1) + markdown = re.sub(re.escape(description), "", markdown, count=1) regex = re.compile( - r'\s*#[# \t\v]*(?P[^\n]+)(\n|\s*$)' - r'\s*((?P[^#].+?)(?=\s*$|[\r\n]+\s*#))?', flags=re.S) + r"\s*#[# \t\v]*(?P[^\n]+)(\n|\s*$)" + r"\s*((?P[^#].+?)(?=\s*$|[\r\n]+\s*#))?", + flags=re.S, + ) common = regex.finditer(markdown) - fields = list() + fields: list[dict[str, str]] = [] for el in common: d = el.groupdict() fields.append({ - 'name': d.get('name', '').strip('#`* \r\n\t\v'), - 'value': '```{}\n{}```'.format( - 'md' if d.get('value') else '', - d.get('value').strip() + '\n' if d.get('value') else '', + "name": d.get("name", "").strip("#`* \r\n\t\v"), + "value": "```{}\n{}```".format( + "md" if d.get("value") else "", + (d.get("value").strip() + "\n" if d.get("value") else ""), ), }) diff --git a/libs/apprise/plugins/dot.py b/libs/apprise/plugins/dot.py new file mode 100644 index 0000000000..5b386059f0 --- /dev/null +++ b/libs/apprise/plugins/dot.py @@ -0,0 +1,618 @@ +# BSD 2-Clause License +# +# Apprise - Push Notification Library. +# Copyright (c) 2026, Chris Caron +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +# +# API: https://dot.mindreset.tech/docs/service/studio/api/text_api +# https://dot.mindreset.tech/docs/service/studio/api/image_api +# +# Text API Fields: +# - refreshNow (bool, optional, default true): controls display timing. +# - deviceId (string, required): unique device serial. +# - title (string, optional): title text shown on screen. +# - message (string, optional): body text shown on screen. +# - signature (string, optional): footer/signature text. +# - icon (string, optional): base64 PNG icon (40px x 40px). +# - link (string, optional): tap-to-interact target URL. +# +# Image API Fields: +# - refreshNow (bool, optional, default true): controls display timing. +# - deviceId (string, required): unique device serial. +# - image (string, required): base64 PNG image (296px x 152px). +# - link (string, optional): tap-to-interact target URL. +# - border (number, optional, default 0): 0=white, 1=black frame. +# - ditherType (string, optional, default DIFFUSION): dithering mode. +# - ditherKernel (string, optional, default FLOYD_STEINBERG): +# dithering kernel. + +from contextlib import suppress +import json +import logging + +import requests + +from ..common import NotifyImageSize, NotifyType +from ..locale import gettext_lazy as _ +from ..url import PrivacyMode +from ..utils.parse import parse_bool +from ..utils.sanitize import sanitize_payload +from .base import NotifyBase + +# Supported Dither Types +DOT_DITHER_TYPES = ( + "DIFFUSION", + "ORDERED", + "NONE", +) + +# Supported Dither Kernels +DOT_DITHER_KERNELS = ( + "THRESHOLD", + "ATKINSON", + "BURKES", + "FLOYD_STEINBERG", + "SIERRA2", + "STUCKI", + "JARVIS_JUDICE_NINKE", + "DIFFUSION_ROW", + "DIFFUSION_COLUMN", + "DIFFUSION_2D", +) + + +class NotifyDot(NotifyBase): + """A wrapper for Dot. Notifications.""" + + # The default descriptive name associated with the Notification + service_name = "Dot." + # Alias: devices marketed as "Quote/0" remain discoverable. + + # The services URL + service_url = "https://dot.mindreset.tech" + + # All notification requests are secure + secure_protocol = "dot" + + # A URL that takes you to the setup/help of the specific protocol + setup_url = "https://appriseit.com/services/dot/" + + # Allows the user to specify the NotifyImageSize object + image_size = NotifyImageSize.XY_128 + + # Support Attachments + attachment_support = True + + # Supported API modes + SUPPORTED_MODES = ("text", "image") + + DEFAULT_MODE = "text" + + # Define object templates + templates = ("{schema}://{apikey}@{device_id}/{mode}/",) + + # Define our template arguments + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "apikey": { + "name": _("API Key"), + "type": "string", + "required": True, + "private": True, + }, + "device_id": { + "name": _("Device Serial Number"), + "type": "string", + "required": True, + "map_to": "device_id", + }, + "mode": { + "name": _("API Mode"), + "type": "choice:string", + "values": SUPPORTED_MODES, + "default": DEFAULT_MODE, + "map_to": "mode", + }, + }, + ) + + # Define our template arguments + template_args = dict( + NotifyBase.template_args, + **{ + "refresh": { + "name": _("Refresh Now"), + "type": "bool", + "default": True, + "map_to": "refresh_now", + }, + "signature": { + "name": _("Text Signature"), + "type": "string", + }, + "icon": { + "name": _("Icon Base64 (Text API)"), + "type": "string", + }, + "image": { + "name": _("Image Base64 (Image API)"), + "type": "string", + "map_to": "image_data", + }, + "link": { + "name": _("Link"), + "type": "string", + }, + "border": { + "name": _("Border"), + "type": "int", + "min": 0, + "max": 1, + "default": 0, + }, + "dither_type": { + "name": _("Dither Type"), + "type": "choice:string", + "values": DOT_DITHER_TYPES, + "default": "DIFFUSION", + }, + "dither_kernel": { + "name": _("Dither Kernel"), + "type": "choice:string", + "values": DOT_DITHER_KERNELS, + "default": "FLOYD_STEINBERG", + }, + }, + ) + # Note: + # - icon (Text API): base64 PNG icon (40px x 40px) in lower-left corner. + # Can be provided via `icon` parameter or first attachment. + # - image (Image API): base64 PNG image (296px x 152px) supplied via + # configuration `image` parameter or first attachment. + # - Only the first attachment is used; multiple attachments trigger a + # warning. + + def __init__( + self, + apikey=None, + device_id=None, + mode=DEFAULT_MODE, + refresh_now=True, + signature=None, + icon=None, + link=None, + border=None, + dither_type=None, + dither_kernel=None, + image_data=None, + **kwargs, + ): + """Initialize Notify Dot Object.""" + super().__init__(**kwargs) + + # API Key (from user) + self.apikey = apikey + + # Device ID tracks the Dot hardware serial. + self.device_id = device_id + + # Refresh Now flag: True shows content immediately (default). + self.refresh_now = parse_bool(refresh_now, default=True) + + # API mode ("text" or "image") + self.mode = ( + mode.lower() + if isinstance(mode, str) and mode.lower() in self.SUPPORTED_MODES + else self.DEFAULT_MODE + ) + if ( + not isinstance(mode, str) + or mode.lower() not in self.SUPPORTED_MODES + ): + self.logger.warning( + "Unsupported Dot mode (%s) specified; defaulting to '%s'.", + mode, + self.mode, + ) + + # Signature text used by the Text API footer. + self.signature = signature if isinstance(signature, str) else None + + # Icon for the Text API (base64 PNG 40x40, lower-left corner). + # Note: distinct from the Image API "image" field. + self.icon = icon if isinstance(icon, str) else None + + # Image payload for the Image API (base64 PNG 296x152). + self.image_data = image_data if isinstance(image_data, str) else None + if self.mode == "text" and self.image_data: + self.logger.warning( + "Image data provided in text mode; ignoring configurable" + " image payload." + ) + self.image_data = None + + # Link for tap-to-interact navigation. + self.link = link if isinstance(link, str) else None + + # Border for the Image API + self.border = border + + # Dither type for Image API + self.dither_type = dither_type + + # Dither kernel for the Image API + self.dither_kernel = dither_kernel + + # Text API endpoint + self.text_api_url = "https://dot.mindreset.tech/api/open/text" + + # Image API endpoint + self.image_api_url = "https://dot.mindreset.tech/api/open/image" + + return + + def send( + self, + body, + title="", + notify_type=NotifyType.INFO, + attach=None, + **kwargs, + ): + """Perform Dot Notification.""" + + if not self.apikey: + self.logger.warning("No API key was specified") + return False + + if not self.device_id: + self.logger.warning("No device ID was specified") + return False + + # Prepare our headers + headers = { + "Authorization": f"Bearer {self.apikey}", + "Content-Type": "application/json", + "User-Agent": self.app_id, + } + + if self.mode == "image": + if title or body: + self.logger.warning( + "Title and body are not supported in image mode " + "and will be ignored." + ) + + image_data = ( + self.image_data if isinstance(self.image_data, str) else None + ) + + # Use first attachment as image if no image_data provided + # attachment.base64() returns base64-encoded string for API + if not image_data and attach and self.attachment_support: + if len(attach) > 1: + self.logger.warning( + "Multiple attachments provided; only the first " + "one will be used as image." + ) + try: + attachment = attach[0] + if attachment: + # Convert attachment to base64-encoded string + image_data = attachment.base64() + except Exception as e: + self.logger.warning(f"Failed to process attachment: {e!s}") + + if not image_data: + self.logger.warning( + "Image API mode selected but no image data was provided." + ) + return False + + # Use Image API + # Image API payload: + # refreshNow: display timing control. + # deviceId: Dot device serial (required). + # image: base64 PNG 296x152 (required). + # link: optional tap target. + # border: optional frame color. + # ditherType: optional dithering mode. + # ditherKernel: optional dithering kernel. + payload = { + "refreshNow": self.refresh_now, + "deviceId": self.device_id, + "image": image_data, # Image payload shown on screen + } + + if self.link: + payload["link"] = self.link + + if self.border is not None: + payload["border"] = self.border + + if self.dither_type is not None: + payload["ditherType"] = self.dither_type + + if self.dither_kernel is not None: + payload["ditherKernel"] = self.dither_kernel + + api_url = self.image_api_url + + else: + # Use Text API + # Text API payload: + # refreshNow: display timing control. + # deviceId: Dot device serial (required). + # title: optional title on screen. + # message: optional body on screen. + # signature: optional footer text. + # icon: optional base64 PNG icon (40x40). + # link: optional tap target. + payload = { + "refreshNow": self.refresh_now, + "deviceId": self.device_id, + } + + if title: + payload["title"] = title + + if body: + payload["message"] = body + + if self.signature: + payload["signature"] = ( + self.signature + ) # Footer/signature displayed on screen + + # Use first attachment as icon if no icon provided + # attachment.base64() returns base64-encoded string for API + icon_data = self.icon + if not icon_data and attach and self.attachment_support: + if len(attach) > 1: + self.logger.warning( + "Multiple attachments provided; only the first " + "one will be used as icon." + ) + try: + attachment = attach[0] + if attachment: + # Convert attachment to base64-encoded string + icon_data = attachment.base64() + except Exception as e: + self.logger.warning(f"Failed to process attachment: {e!s}") + + if icon_data: + # Text API icon payload + payload["icon"] = icon_data + + if self.link: + payload["link"] = self.link + + api_url = self.text_api_url + + # Some Debug Logging + if self.logger.isEnabledFor(logging.DEBUG): + # Due to attachments; output can be quite heavy and io intensive + # To accommodate this, we only show our debug payload information + # if required. + self.logger.debug( + "Dot POST URL:" + f" {api_url} (cert_verify={self.verify_certificate!r})" + ) + self.logger.debug("Dot Payload %s", sanitize_payload(payload)) + + # Always call throttle before any remote server i/o is made + self.throttle() + + try: + r = requests.post( + api_url, + data=json.dumps(payload), + headers=headers, + verify=self.verify_certificate, + timeout=self.request_timeout, + ) + + if r.status_code == requests.codes.ok: + self.logger.info(f"Sent Dot notification to {self.device_id}.") + return True + + # We had a problem + status_str = NotifyDot.http_response_code_lookup(r.status_code) + + self.logger.warning( + "Failed to send Dot notification to {}: " + "{}{}error={}.".format( + self.device_id, + status_str, + ", " if status_str else "", + r.status_code, + ) + ) + + self.logger.debug( + "Response Details:\r\n%r", (r.content or b"")[:2000]) + + return False + + except requests.RequestException as e: + self.logger.warning( + "A Connection error occurred sending Dot " + f"notification to {self.device_id}." + ) + self.logger.debug(f"Socket Exception: {e!s}") + + return False + + @property + def url_identifier(self): + """Returns all of the identifiers that make this URL unique from + another similar one. + """ + return ( + self.secure_protocol, + self.apikey, + self.device_id, + self.mode, + ) + + def url(self, privacy=False, *args, **kwargs): + """Returns the URL built dynamically based on specified arguments.""" + + # Define any URL parameters + params = { + "refresh": "yes" if self.refresh_now else "no", + } + + if self.mode == "text": + if self.signature: + params["signature"] = self.signature + + if self.icon: + params["icon"] = self.icon + + if self.link: + params["link"] = self.link + + else: # image mode + if self.image_data: + params["image"] = self.image_data + + if self.link: + params["link"] = self.link + + if self.border is not None: + params["border"] = str(self.border) + + if self.dither_type and self.dither_type != "DIFFUSION": + params["dither_type"] = self.dither_type + + if self.dither_kernel and self.dither_kernel != "FLOYD_STEINBERG": + params["dither_kernel"] = self.dither_kernel + + # Extend our parameters + params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) + + mode_segment = f"/{self.mode}/" + + return "{schema}://{apikey}@{device_id}{mode}?{params}".format( + schema=self.secure_protocol, + apikey=self.pprint( + self.apikey, privacy, mode=PrivacyMode.Secret, safe="" + ), + device_id=NotifyDot.quote(self.device_id, safe=""), + mode=mode_segment, + params=NotifyDot.urlencode(params), + ) + + def __len__(self): + """Returns the number of targets associated with this notification.""" + return 1 if self.device_id else 0 + + @staticmethod + def parse_url(url): + """Parses the URL and returns enough arguments that can allow us to re- + instantiate this object.""" + + results = NotifyBase.parse_url(url) + if not results: + # We're done early as we couldn't load the results + return results + + # Determine API mode from path (default to text) + mode = NotifyDot.DEFAULT_MODE + path_tokens = NotifyDot.split_path(results.get("fullpath")) + if path_tokens: + candidate = path_tokens.pop(0).lower() + if candidate in NotifyDot.SUPPORTED_MODES: + mode = candidate + else: + NotifyDot.logger.warning( + "Unsupported Dot mode (%s) detected; defaulting to '%s'.", + candidate, + NotifyDot.DEFAULT_MODE, + ) + results["mode"] = mode + remaining_path = "/".join(path_tokens) + results["fullpath"] = "/" + remaining_path if remaining_path else "/" + results["path"] = remaining_path + + # Extract API key from user + user = results.get("user") + if user: + results["apikey"] = NotifyDot.unquote(user) + + # Extract device ID from hostname + host = results.get("host") + if host: + results["device_id"] = NotifyDot.unquote(host) + + # Refresh Now + refresh_value = results["qsd"].get("refresh") + if refresh_value: + results["refresh_now"] = parse_bool(refresh_value.strip()) + + # Signature + signature_value = results["qsd"].get("signature") + if signature_value: + results["signature"] = NotifyDot.unquote(signature_value.strip()) + + # Icon + icon_value = results["qsd"].get("icon") + if icon_value: + results["icon"] = NotifyDot.unquote(icon_value.strip()) + + # Link + link_value = results["qsd"].get("link") + if link_value: + results["link"] = NotifyDot.unquote(link_value.strip()) + + # Border + border_value = results["qsd"].get("border") + if border_value: + with suppress(TypeError, ValueError): + results["border"] = int(border_value.strip()) + + # Dither Type + dither_type_value = results["qsd"].get("dither_type") + if dither_type_value: + results["dither_type"] = NotifyDot.unquote( + dither_type_value.strip() + ) + + # Dither Kernel + dither_kernel_value = results["qsd"].get("dither_kernel") + if dither_kernel_value: + results["dither_kernel"] = NotifyDot.unquote( + dither_kernel_value.strip() + ) + + # Image (Image API) + image_value = results["qsd"].get("image") + if image_value: + results["image_data"] = NotifyDot.unquote(image_value.strip()) + + return results diff --git a/libs/apprise/plugins/email/__init__.py b/libs/apprise/plugins/email/__init__.py index d2d7e6be75..835b52898c 100644 --- a/libs/apprise/plugins/email/__init__.py +++ b/libs/apprise/plugins/email/__init__.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -30,24 +29,23 @@ from .base import NotifyEmail from .common import ( - AppriseEmailException, EmailMessage, SecureMailMode, SECURE_MODES, - WebBaseLogin) + SECURE_MODES, + AppriseEmailException, + EmailMessage, + SecureMailMode, + WebBaseLogin, +) from .templates import EMAIL_TEMPLATES # Globally Default encoding mode set to Quoted Printable. -charset.add_charset('utf-8', charset.QP, charset.QP, 'utf-8') +charset.add_charset("utf-8", charset.QP, charset.QP, "utf-8") __all__ = [ - # Reference - 'NotifyEmail', - - # Pretty Good Privacy - 'ApprisePGPController', 'ApprisePGPException', - - # Other - 'AppriseEmailException', 'EmailMessage', 'SecureMailMode', 'SECURE_MODES', - 'WebBaseLogin', - - # Additional entries that may be useful to some developers - 'EMAIL_TEMPLATES', 'PGP_SUPPORT', + "EMAIL_TEMPLATES", + "SECURE_MODES", + "AppriseEmailException", + "EmailMessage", + "NotifyEmail", + "SecureMailMode", + "WebBaseLogin", ] diff --git a/libs/apprise/plugins/email/base.py b/libs/apprise/plugins/email/base.py index ce89cb3d7d..2aa00d3c5a 100644 --- a/libs/apprise/plugins/email/base.py +++ b/libs/apprise/plugins/email/base.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -26,32 +25,39 @@ # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -import re -import smtplib -from email.mime.text import MIMEText +from datetime import datetime +from email.header import Header from email.mime.application import MIMEApplication -from email.mime.multipart import MIMEMultipart from email.mime.base import MIMEBase -from email.utils import formataddr, make_msgid -from email.header import Header - -from socket import error as SocketError -from datetime import datetime -from datetime import timezone +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from email.utils import format_datetime, formataddr, make_msgid +import re +import smtplib +from typing import Optional -from ..base import NotifyBase -from ...url import PrivacyMode -from ...common import NotifyFormat, NotifyType +from ...common import NotifyFormat, NotifyType, PersistentStoreMode from ...conversion import convert_between -from ...utils import pgp as _pgp -from ...utils.parse import ( - is_ipaddr, is_email, parse_emails, is_hostname, parse_bool) from ...locale import gettext_lazy as _ from ...logger import logger -from .common import ( - AppriseEmailException, EmailMessage, SecureMailMode, SECURE_MODES, - WebBaseLogin) +from ...url import PrivacyMode +from ...utils import pgp as _pgp +from ...utils.parse import ( + is_email, + is_hostname, + is_ipaddr, + parse_bool, + parse_emails, +) +from ..base import NotifyBase from . import templates +from .common import ( + SECURE_MODES, + AppriseEmailException, + EmailMessage, + SecureMailMode, + WebBaseLogin, +) class NotifyEmail(NotifyBase): @@ -61,20 +67,24 @@ class NotifyEmail(NotifyBase): """ # The default descriptive name associated with the Notification - service_name = 'E-Mail' + service_name = "E-Mail" # The default simple (insecure) protocol - protocol = 'mailto' + protocol = "mailto" # The default secure protocol - secure_protocol = 'mailtos' + secure_protocol = "mailtos" # A URL that takes you to the setup/help of the specific protocol - setup_url = 'https://github.com/caronc/apprise/wiki/Notify_email' + setup_url = "https://appriseit.com/services/email/" # Support attachments attachment_support = True + # Our default is to no not use persistent storage beyond in-memory + # reference; this allows us to auto-generate our config if needed + storage_mode = PersistentStoreMode.AUTO + # Default Notify Format notify_format = NotifyFormat.HTML @@ -83,121 +93,138 @@ class NotifyEmail(NotifyBase): # Define object templates templates = ( - '{schema}://{host}', - '{schema}://{host}:{port}', - '{schema}://{host}/{targets}', - '{schema}://{host}:{port}/{targets}', - '{schema}://{user}@{host}', - '{schema}://{user}@{host}:{port}', - '{schema}://{user}@{host}/{targets}', - '{schema}://{user}@{host}:{port}/{targets}', - '{schema}://{user}:{password}@{host}', - '{schema}://{user}:{password}@{host}:{port}', - '{schema}://{user}:{password}@{host}/{targets}', - '{schema}://{user}:{password}@{host}:{port}/{targets}', + "{schema}://{host}", + "{schema}://{host}:{port}", + "{schema}://{host}/{targets}", + "{schema}://{host}:{port}/{targets}", + "{schema}://{user}@{host}", + "{schema}://{user}@{host}:{port}", + "{schema}://{user}@{host}/{targets}", + "{schema}://{user}@{host}:{port}/{targets}", + "{schema}://{user}:{password}@{host}", + "{schema}://{user}:{password}@{host}:{port}", + "{schema}://{user}:{password}@{host}/{targets}", + "{schema}://{user}:{password}@{host}:{port}/{targets}", ) # Define our template tokens - template_tokens = dict(NotifyBase.template_tokens, **{ - 'user': { - 'name': _('User Name'), - 'type': 'string', - }, - 'password': { - 'name': _('Password'), - 'type': 'string', - 'private': True, - }, - 'host': { - 'name': _('Domain'), - 'type': 'string', - 'required': True, - }, - 'port': { - 'name': _('Port'), - 'type': 'int', - 'min': 1, - 'max': 65535, - }, - 'target_email': { - 'name': _('Target Email'), - 'type': 'string', - 'map_to': 'targets', - }, - 'targets': { - 'name': _('Targets'), - 'type': 'list:string', + template_tokens = dict( + NotifyBase.template_tokens, + **{ + "user": { + "name": _("User Name"), + "type": "string", + }, + "password": { + "name": _("Password"), + "type": "string", + "private": True, + }, + "host": { + "name": _("Domain"), + "type": "string", + "required": True, + }, + "port": { + "name": _("Port"), + "type": "int", + "min": 1, + "max": 65535, + }, + "target_email": { + "name": _("Target Email"), + "type": "string", + "map_to": "targets", + }, + "targets": { + "name": _("Targets"), + "type": "list:string", + }, }, - }) + ) - template_args = dict(NotifyBase.template_args, **{ - 'to': { - 'name': _('To Email'), - 'type': 'string', - 'map_to': 'targets', - }, - 'from': { - 'name': _('From Email'), - 'type': 'string', - 'map_to': 'from_addr', - }, - 'name': { - 'name': _('From Name'), - 'type': 'string', - 'map_to': 'from_addr', - }, - 'cc': { - 'name': _('Carbon Copy'), - 'type': 'list:string', - }, - 'bcc': { - 'name': _('Blind Carbon Copy'), - 'type': 'list:string', - }, - 'smtp': { - 'name': _('SMTP Server'), - 'type': 'string', - 'map_to': 'smtp_host', + template_args = dict( + NotifyBase.template_args, + **{ + "to": { + "name": _("To Email"), + "type": "string", + "map_to": "targets", + }, + "from": { + "name": _("From Email"), + "type": "string", + "map_to": "from_addr", + }, + "name": { + "name": _("From Name"), + "type": "string", + "map_to": "from_addr", + }, + "cc": { + "name": _("Carbon Copy"), + "type": "list:string", + }, + "bcc": { + "name": _("Blind Carbon Copy"), + "type": "list:string", + }, + "smtp": { + "name": _("SMTP Server"), + "type": "string", + "map_to": "smtp_host", + }, + "mode": { + "name": _("Secure Mode"), + "type": "choice:string", + "values": SECURE_MODES, + "default": SecureMailMode.STARTTLS, + "map_to": "secure_mode", + }, + "reply": { + "name": _("Reply To"), + "type": "list:string", + "map_to": "reply_to", + }, + "pgp": { + "name": _("PGP Encryption"), + "type": "bool", + "map_to": "use_pgp", + "default": False, + }, + "pgpkey": { + "name": _("PGP Public Key Path"), + "type": "string", + "private": True, + # By default persistent storage is referenced + "default": "", + "map_to": "pgp_key", + }, }, - 'mode': { - 'name': _('Secure Mode'), - 'type': 'choice:string', - 'values': SECURE_MODES, - 'default': SecureMailMode.STARTTLS, - 'map_to': 'secure_mode', - }, - 'reply': { - 'name': _('Reply To'), - 'type': 'list:string', - 'map_to': 'reply_to', - }, - 'pgp': { - 'name': _('PGP Encryption'), - 'type': 'bool', - 'map_to': 'use_pgp', - 'default': False, - }, - 'pgpkey': { - 'name': _('PGP Public Key Path'), - 'type': 'string', - 'private': True, - # By default persistent storage is referenced - 'default': '', - 'map_to': 'pgp_key', - }, - }) + ) # Define any kwargs we're using template_kwargs = { - 'headers': { - 'name': _('Email Header'), - 'prefix': '+', + "headers": { + "name": _("Email Header"), + "prefix": "+", }, } - def __init__(self, smtp_host=None, from_addr=None, secure_mode=None, - targets=None, cc=None, bcc=None, reply_to=None, headers=None, - use_pgp=None, pgp_key=None, **kwargs): + def __init__( + self, + smtp_host=None, + from_addr=None, + secure_mode=None, + targets=None, + cc=None, + bcc=None, + reply_to=None, + headers=None, + use_pgp=None, + pgp_key=None, + **kwargs, + ): """ Initialize Email Object @@ -207,7 +234,7 @@ def __init__(self, smtp_host=None, from_addr=None, secure_mode=None, super().__init__(**kwargs) # Acquire Email 'To' - self.targets = list() + self.targets = [] # Acquire Carbon Copies self.cc = set() @@ -228,24 +255,29 @@ def __init__(self, smtp_host=None, from_addr=None, secure_mode=None, # Now we want to construct the To and From email # addresses from the URL provided - self.from_addr = [False, ''] + self.from_addr = [False, ""] # Now detect the SMTP Server - self.smtp_host = \ - smtp_host if isinstance(smtp_host, str) else '' + self.smtp_host = smtp_host if isinstance(smtp_host, str) else "" # Now detect secure mode if secure_mode: - self.secure_mode = None \ - if not isinstance(secure_mode, str) \ + self.secure_mode = ( + None + if not isinstance(secure_mode, str) else secure_mode.lower() + ) else: - self.secure_mode = SecureMailMode.INSECURE \ - if not self.secure else self.template_args['mode']['default'] + self.secure_mode = ( + SecureMailMode.INSECURE + if not self.secure + else self.template_args["mode"]["default"] + ) if self.secure_mode not in SECURE_MODES: - msg = 'The secure mode specified ({}) is invalid.'\ - .format(secure_mode) + msg = "The secure mode specified ({}) is invalid.".format( + secure_mode + ) self.logger.warning(msg) raise TypeError(msg) @@ -253,48 +285,53 @@ def __init__(self, smtp_host=None, from_addr=None, secure_mode=None, for recipient in parse_emails(cc): email = is_email(recipient) if email: - self.cc.add(email['full_email']) + self.cc.add(email["full_email"]) # Index our name (if one exists) - self.names[email['full_email']] = \ - email['name'] if email['name'] else False + self.names[email["full_email"]] = ( + email["name"] if email["name"] else False + ) continue self.logger.warning( - 'Dropped invalid Carbon Copy email ' - '({}) specified.'.format(recipient), + "Dropped invalid Carbon Copy email ({}) specified.".format( + recipient + ), ) # Validate recipients (bcc:) and drop bad ones: for recipient in parse_emails(bcc): email = is_email(recipient) if email: - self.bcc.add(email['full_email']) + self.bcc.add(email["full_email"]) # Index our name (if one exists) - self.names[email['full_email']] = \ - email['name'] if email['name'] else False + self.names[email["full_email"]] = ( + email["name"] if email["name"] else False + ) continue self.logger.warning( - 'Dropped invalid Blind Carbon Copy email ' - '({}) specified.'.format(recipient), + "Dropped invalid Blind Carbon Copy email " + "({}) specified.".format(recipient), ) # Validate recipients (reply-to:) and drop bad ones: for recipient in parse_emails(reply_to): email = is_email(recipient) if email: - self.reply_to.add(email['full_email']) + self.reply_to.add(email["full_email"]) # Index our name (if one exists) - self.names[email['full_email']] = \ - email['name'] if email['name'] else False + self.names[email["full_email"]] = ( + email["name"] if email["name"] else False + ) continue self.logger.warning( - 'Dropped invalid Reply To email ' - '({}) specified.'.format(recipient), + "Dropped invalid Reply To email ({}) specified.".format( + recipient + ), ) # Apply any defaults based on certain known configurations @@ -303,24 +340,28 @@ def __init__(self, smtp_host=None, from_addr=None, secure_mode=None, if self.user: if self.host: # Prepare the bases of our email - self.from_addr = [self.app_id, '{}@{}'.format( - re.split(r'[\s@]+', self.user)[0], - self.host, - )] + self.from_addr = [ + self.app_id, + "{}@{}".format( + re.split(r"[\s@]+", self.user)[0], + self.host, + ), + ] else: result = is_email(self.user) if result: # Prepare the bases of our email and include domain - self.host = result['domain'] + self.host = result["domain"] self.from_addr = [self.app_id, self.user] if from_addr: result = is_email(from_addr) if result: self.from_addr = ( - result['name'] if result['name'] else False, - result['full_email']) + result["name"] if result["name"] else False, + result["full_email"], + ) else: # Only update the string but use the already detected info self.from_addr[0] = from_addr @@ -328,9 +369,11 @@ def __init__(self, smtp_host=None, from_addr=None, secure_mode=None, result = is_email(self.from_addr[1]) if not result: # Parse Source domain based on from_addr - msg = 'Invalid ~From~ email specified: {}'.format( - '{} <{}>'.format(self.from_addr[0], self.from_addr[1]) - if self.from_addr[0] else '{}'.format(self.from_addr[1])) + msg = "Invalid ~From~ email specified: {}".format( + "{} <{}>".format(self.from_addr[0], self.from_addr[1]) + if self.from_addr[0] + else "{}".format(self.from_addr[1]) + ) self.logger.warning(msg) raise TypeError(msg) @@ -342,14 +385,16 @@ def __init__(self, smtp_host=None, from_addr=None, secure_mode=None, for recipient in parse_emails(targets): result = is_email(recipient) if result: - self.targets.append( - (result['name'] if result['name'] else False, - result['full_email'])) + self.targets.append(( + result["name"] if result["name"] else False, + result["full_email"], + )) continue self.logger.warning( - 'Dropped invalid To email ' - '({}) specified.'.format(recipient), + "Dropped invalid To email ({}) specified.".format( + recipient + ), ) else: @@ -363,7 +408,7 @@ def __init__(self, smtp_host=None, from_addr=None, secure_mode=None, if not self.port: # Assign our port based on our secure_mode if not otherwise # detected - self.port = SECURE_MODES[self.secure_mode]['default_port'] + self.port = SECURE_MODES[self.secure_mode]["default_port"] # if there is still no smtp_host then we fall back to the hostname if not self.smtp_host: @@ -371,19 +416,24 @@ def __init__(self, smtp_host=None, from_addr=None, secure_mode=None, # Prepare our Pretty Good Privacy Object self.pgp = _pgp.ApprisePGPController( - path=self.store.path, pub_keyfile=pgp_key, - email=self.from_addr[1], asset=self.asset) + path=self.store.path, + pub_keyfile=pgp_key, + email=self.from_addr[1], + asset=self.asset, + ) # We store so we can generate a URL later on self.pgp_key = pgp_key - self.use_pgp = use_pgp if not None \ - else self.template_args['pgp']['default'] + self.use_pgp = ( + use_pgp if not None else self.template_args["pgp"]["default"] + ) if self.use_pgp and not _pgp.PGP_SUPPORT: self.logger.warning( - 'PGP Support is not available on this installation; ' - 'ask admin to install PGPy') + "PGP Support is not available on this installation; " + "ask admin to install PGPy" + ) return @@ -393,7 +443,7 @@ def apply_email_defaults(self, secure_mode=None, port=None, **kwargs): it was provided. """ - if self.smtp_host or not self.user: + if self.smtp_host: # SMTP Server was explicitly specified, therefore it is assumed # the caller knows what he's doing and is intentionally # over-riding any smarts to be applied. We also can not apply @@ -401,45 +451,54 @@ def apply_email_defaults(self, secure_mode=None, port=None, **kwargs): return # detect our email address using our user/host combo - from_addr = '{}@{}'.format( - re.split(r'[\s@]+', self.user)[0], - self.host, + from_addr = ( + "{}@{}".format( + re.split(r"[\s@]+", self.user)[0], + self.host, + ) + if self.user + else self.host ) for i in range(len(templates.EMAIL_TEMPLATES)): # pragma: no branch - self.logger.trace('Scanning %s against %s' % ( - from_addr, templates.EMAIL_TEMPLATES[i][0] - )) + self.logger.trace( + "Scanning %s against %s", + from_addr, templates.EMAIL_TEMPLATES[i][0]) + match = templates.EMAIL_TEMPLATES[i][1].match(from_addr) if match: self.logger.info( - 'Applying %s Defaults' % - templates.EMAIL_TEMPLATES[i][0], - ) + f"Applying {templates.EMAIL_TEMPLATES[i][0]} Defaults") + # the secure flag can not be altered if defined in the template - self.secure = templates.EMAIL_TEMPLATES[i][2]\ - .get('secure', self.secure) + self.secure = templates.EMAIL_TEMPLATES[i][2].get( + "secure", self.secure + ) # The SMTP Host check is already done above; if it was # specified we wouldn't even reach this part of the code. - self.smtp_host = templates.EMAIL_TEMPLATES[i][2]\ - .get('smtp_host', self.smtp_host) + self.smtp_host = templates.EMAIL_TEMPLATES[i][2].get( + "smtp_host", self.smtp_host + ) # The following can be over-ridden if defined manually in the # Apprise URL. Otherwise they take on the template value if not port: - self.port = templates.EMAIL_TEMPLATES[i][2]\ - .get('port', self.port) + self.port = templates.EMAIL_TEMPLATES[i][2].get( + "port", self.port + ) if not secure_mode: - self.secure_mode = templates.EMAIL_TEMPLATES[i][2]\ - .get('secure_mode', self.secure_mode) + self.secure_mode = templates.EMAIL_TEMPLATES[i][2].get( + "secure_mode", self.secure_mode + ) # Adjust email login based on the defined usertype. If no entry # was specified, then we default to having them all set (which # basically implies that there are no restrictions and use use # whatever was specified) - login_type = \ - templates.EMAIL_TEMPLATES[i][2].get('login_type', []) + login_type = templates.EMAIL_TEMPLATES[i][2].get( + "login_type", [] + ) if login_type: # only apply additional logic to our user if a login_type # was specified. @@ -447,25 +506,41 @@ def apply_email_defaults(self, secure_mode=None, port=None, **kwargs): if WebBaseLogin.EMAIL not in login_type: # Email specified but login type # not supported; switch it to user id - self.user = match.group('id') + self.user = match.group("id") else: # Enforce our host information - self.host = self.user.split('@')[1] + self.host = self.user.split("@")[1] elif WebBaseLogin.USERID not in login_type: # user specified but login type # not supported; switch it to email - self.user = '{}@{}'.format(self.user, self.host) + self.user = "{}@{}".format(self.user, self.host) + + if ( + "from_user" in templates.EMAIL_TEMPLATES[i][2] + and not self.from_addr[1] + ): + + # Update our from address if defined + self.from_addr[1] = "{}@{}".format( + templates.EMAIL_TEMPLATES[i][2]["from_user"], self.host + ) break - def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, - **kwargs): + def send( + self, + body, + title="", + notify_type=NotifyType.INFO, + attach=None, + **kwargs, + ): if not self.targets: # There is no one to email; we're done - logger.warning('There are no Email recipients to notify') + logger.warning("There are no Email recipients to notify") return False # error tracking (used for function return) @@ -478,10 +553,10 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, self.throttle() try: - self.logger.debug('Connecting to remote SMTP server...') + self.logger.debug("Connecting to remote SMTP server...") socket_func = smtplib.SMTP if self.secure_mode == SecureMailMode.SSL: - self.logger.debug('Securing connection with SSL...') + self.logger.debug("Securing connection with SSL...") socket_func = smtplib.SMTP_SSL socket = socket_func( @@ -493,57 +568,67 @@ def send(self, body, title='', notify_type=NotifyType.INFO, attach=None, if self.secure_mode == SecureMailMode.STARTTLS: # Handle Secure Connections - self.logger.debug('Securing connection with STARTTLS...') + self.logger.debug("Securing connection with STARTTLS...") socket.starttls() - self.logger.trace('Login ID: {}'.format(self.user)) + self.logger.trace("Login ID: {}".format(self.user)) if self.user and self.password: - # Apply Login credetials - self.logger.debug('Applying user credentials...') + # Apply Login credentials + self.logger.debug("Applying user credentials...") socket.login(self.user, self.password) # Prepare our headers headers = { - 'X-Application': self.app_id, + "X-Application": self.app_id, } headers.update(self.headers) # Iterate over our email messages we can generate and then # send them off. for message in NotifyEmail.prepare_emails( - subject=title, body=body, notify_format=self.notify_format, - from_addr=self.from_addr, to=self.targets, - cc=self.cc, bcc=self.bcc, reply_to=self.reply_to, - smtp_host=self.smtp_host, - attach=attach, headers=headers, names=self.names, - pgp=self.pgp if self.use_pgp else None): + subject=title, + body=body, + notify_format=self.notify_format, + from_addr=self.from_addr, + to=self.targets, + cc=self.cc, + bcc=self.bcc, + reply_to=self.reply_to, + smtp_host=self.smtp_host, + attach=attach, + headers=headers, + names=self.names, + pgp=self.pgp if self.use_pgp else None, + tzinfo=self.tzinfo, + ): try: socket.sendmail( - self.from_addr[1], - message.to_addrs, - message.body) + self.from_addr[1], message.to_addrs, message.body + ) - self.logger.info('Sent Email to %s', message.recipient) + self.logger.info("Sent Email to %s", message.recipient) - except (SocketError, smtplib.SMTPException, RuntimeError) as e: + except (OSError, smtplib.SMTPException, RuntimeError) as e: self.logger.warning( - 'Sending email to "%s" failed.', message.recipient) - self.logger.debug(f'Socket Exception: {e}') + 'Sending email to "%s" failed.', message.recipient + ) + self.logger.debug(f"Socket Exception: {e}") # Mark as failure has_error = True - except (SocketError, smtplib.SMTPException, RuntimeError) as e: + except (OSError, smtplib.SMTPException, RuntimeError) as e: self.logger.warning( 'Connection error while submitting email to "%s"', - self.smtp_host) - self.logger.debug(f'Socket Exception: {e}') + self.smtp_host, + ) + self.logger.debug(f"Socket Exception: {e}") # Mark as failure has_error = True except AppriseEmailException as e: - self.logger.debug(f'Socket Exception: {e}') + self.logger.debug(f"Socket Exception: {e}") # Mark as failure has_error = True @@ -565,15 +650,15 @@ def url(self, privacy=False, *args, **kwargs): # Define an URL parameters params = { - 'pgp': 'yes' if self.use_pgp else 'no', + "pgp": "yes" if self.use_pgp else "no", } # Store our public key back into your URL if self.pgp_key is not None: - params['pgp_key'] = NotifyEmail.quote(self.pgp_key, safe=':\\/') + params["pgp_key"] = NotifyEmail.quote(self.pgp_key, safe=":\\/") # Append our headers into our parameters - params.update({'+{}'.format(k): v for k, v in self.headers.items()}) + params.update({"+{}".format(k): v for k, v in self.headers.items()}) # Extend our parameters params.update(self.url_parameters(privacy=privacy, *args, **kwargs)) @@ -585,91 +670,116 @@ def url(self, privacy=False, *args, **kwargs): if self.smtp_host != self.host: # Apply our SMTP Host only if it differs from the provided hostname - params['smtp'] = self.smtp_host + params["smtp"] = self.smtp_host if self.secure: - # Mode is only requried if we're dealing with a secure connection - params['mode'] = self.secure_mode + # Mode is only required if we're dealing with a secure connection + params["mode"] = self.secure_mode if self.from_addr[0] and self.from_addr[0] != self.app_id: # A custom name was provided - params['from'] = self.from_addr[0] if not from_addr else \ - formataddr((self.from_addr[0], from_addr), charset='utf-8') + params["from"] = ( + self.from_addr[0] + if not from_addr + else formataddr( + (self.from_addr[0], from_addr), charset="utf-8" + ) + ) elif from_addr: - params['from'] = formataddr((False, from_addr), charset='utf-8') + params["from"] = formataddr((False, from_addr), charset="utf-8") elif not self.user: - params['from'] = \ - formataddr((False, self.from_addr[1]), charset='utf-8') + params["from"] = formataddr( + (False, self.from_addr[1]), charset="utf-8" + ) if self.cc: # Handle our Carbon Copy Addresses - params['cc'] = ','.join([ + params["cc"] = ",".join([ formataddr( - (self.names[e] if e in self.names else False, e), + (self.names.get(e, False), e), # Swap comma for it's escaped url code (if detected) since # we're using that as a delimiter - charset='utf-8').replace(',', '%2C') - for e in self.cc]) + charset="utf-8", + ).replace(",", "%2C") + for e in self.cc + ]) if self.bcc: # Handle our Blind Carbon Copy Addresses - params['bcc'] = ','.join([ + params["bcc"] = ",".join([ formataddr( - (self.names[e] if e in self.names else False, e), + (self.names.get(e, False), e), # Swap comma for it's escaped url code (if detected) since # we're using that as a delimiter - charset='utf-8').replace(',', '%2C') - for e in self.bcc]) + charset="utf-8", + ).replace(",", "%2C") + for e in self.bcc + ]) if self.reply_to: # Handle our Reply-To Addresses - params['reply'] = ','.join([ + params["reply"] = ",".join([ formataddr( - (self.names[e] if e in self.names else False, e), - # Swap comma for it's escaped url code (if detected) since + (self.names.get(e, False), e), + # Swap comma for its escaped url code (if detected) since # we're using that as a delimiter - charset='utf-8').replace(',', '%2C') - for e in self.reply_to]) + charset="utf-8", + ).replace(",", "%2C") + for e in self.reply_to + ]) # pull email suffix from username (if present) - user = None if not self.user else self.user.split('@')[0] + user = None if not self.user else self.user.split("@")[0] # Determine Authentication - auth = '' + auth = "" if self.user and self.password: - auth = '{user}:{password}@'.format( - user=NotifyEmail.quote(user, safe=''), + auth = "{user}:{password}@".format( + user=NotifyEmail.quote(user, safe=""), password=self.pprint( - self.password, privacy, mode=PrivacyMode.Secret, safe=''), + self.password, privacy, mode=PrivacyMode.Secret, safe="" + ), ) elif user: # user url - auth = '{user}@'.format( - user=NotifyEmail.quote(user, safe=''), + auth = "{user}@".format( + user=NotifyEmail.quote(user, safe=""), ) # Default Port setup - default_port = SECURE_MODES[self.secure_mode]['default_port'] + default_port = SECURE_MODES[self.secure_mode]["default_port"] # a simple boolean check as to whether we display our target emails # or not - has_targets = \ - not (len(self.targets) == 1 - and self.targets[0][1] == self.from_addr[1]) + has_targets = not ( + len(self.targets) == 1 and self.targets[0][1] == self.from_addr[1] + ) - return '{schema}://{auth}{hostname}{port}/{targets}?{params}'.format( + return "{schema}://{auth}{hostname}{port}/{targets}?{params}".format( schema=self.secure_protocol if self.secure else self.protocol, auth=auth, # never encode hostname since we're expecting it to be a valid one hostname=self.host, - port='' if self.port is None or self.port == default_port - else ':{}'.format(self.port), - targets='' if not has_targets else '/'.join( - [NotifyEmail.quote('{}{}'.format( - '' if not e[0] else '{}:'.format(e[0]), e[1]), - safe='') for e in self.targets]), + port=( + "" + if self.port is None or self.port == default_port + else ":{}".format(self.port) + ), + targets=( + "" + if not has_targets + else "/".join([ + NotifyEmail.quote( + "{}{}".format( + "" if not e[0] else "{}:".format(e[0]), e[1] + ), + safe="", + ) + for e in self.targets + ]) + ), params=NotifyEmail.urlencode(params), ) @@ -677,14 +787,20 @@ def url(self, privacy=False, *args, **kwargs): def url_identifier(self): """ Returns all of the identifiers that make this URL unique from - another simliar one. Targets or end points should never be identified + another similar one. Targets or end points should never be identified here. """ return ( self.secure_protocol if self.secure else self.protocol, - self.user, self.password, self.host, self.smtp_host, - self.port if self.port - else SECURE_MODES[self.secure_mode]['default_port'], + self.user, + self.password, + self.host, + self.smtp_host, + ( + self.port + if self.port + else SECURE_MODES[self.secure_mode]["default_port"] + ), ) def __len__(self): @@ -706,90 +822,95 @@ def parse_url(url): return results # Prepare our target lists - results['targets'] = [] + results["targets"] = [] - if is_ipaddr(results['host']): + if is_ipaddr(results["host"]): # Silently move on and do not disrupt any configuration pass - elif not is_hostname(results['host'], ipv4=False, ipv6=False, - underscore=False): + elif not is_hostname( + results["host"], ipv4=False, ipv6=False, underscore=False + ): - if is_email(NotifyEmail.unquote(results['host'])): + if is_email(NotifyEmail.unquote(results["host"])): # Don't lose defined email addresses - results['targets'].append(NotifyEmail.unquote(results['host'])) + results["targets"].append(NotifyEmail.unquote(results["host"])) # Detect if we have a valid hostname or not; be sure to reset it's # value if invalid; we'll attempt to figure this out later on - results['host'] = '' + results["host"] = "" # Get PGP Flag - results['use_pgp'] = \ - parse_bool(results['qsd'].get( - 'pgp', NotifyEmail.template_args['pgp']['default'])) + results["use_pgp"] = parse_bool( + results["qsd"].get( + "pgp", NotifyEmail.template_args["pgp"]["default"] + ) + ) # Get PGP Public Key Override - if 'pgpkey' in results['qsd'] and results['qsd']['pgpkey']: - results['pgp_key'] = \ - NotifyEmail.unquote(results['qsd']['pgpkey']) + if "pgpkey" in results["qsd"] and results["qsd"]["pgpkey"]: + results["pgp_key"] = NotifyEmail.unquote(results["qsd"]["pgpkey"]) # The From address is a must; either through the use of templates # from= entry and/or merging the user and hostname together, this # must be calculated or parse_url will fail. - from_addr = '' + from_addr = "" # The server we connect to to send our mail to - smtp_host = '' + smtp_host = "" # Get our potential email targets; if none our found we'll just # add one to ourselves - results['targets'] += NotifyEmail.split_path(results['fullpath']) + results["targets"] += NotifyEmail.split_path(results["fullpath"]) # Attempt to detect 'to' email address - if 'to' in results['qsd'] and len(results['qsd']['to']): - results['targets'].append(results['qsd']['to']) + if "to" in results["qsd"] and len(results["qsd"]["to"]): + results["targets"].append(results["qsd"]["to"]) # Attempt to detect 'from' email address - if 'from' in results['qsd'] and len(results['qsd']['from']): - from_addr = NotifyEmail.unquote(results['qsd']['from']) + if "from" in results["qsd"] and len(results["qsd"]["from"]): + from_addr = NotifyEmail.unquote(results["qsd"]["from"]) - if 'name' in results['qsd'] and len(results['qsd']['name']): + if "name" in results["qsd"] and len(results["qsd"]["name"]): from_addr = formataddr( - (NotifyEmail.unquote(results['qsd']['name']), from_addr), - charset='utf-8') + (NotifyEmail.unquote(results["qsd"]["name"]), from_addr), + charset="utf-8", + ) - elif 'name' in results['qsd'] and len(results['qsd']['name']): + elif "name" in results["qsd"] and len(results["qsd"]["name"]): # Extract from name to associate with from address - from_addr = NotifyEmail.unquote(results['qsd']['name']) + from_addr = NotifyEmail.unquote(results["qsd"]["name"]) # Store SMTP Host if specified - if 'smtp' in results['qsd'] and len(results['qsd']['smtp']): + if "smtp" in results["qsd"] and len(results["qsd"]["smtp"]): # Extract the smtp server - smtp_host = NotifyEmail.unquote(results['qsd']['smtp']) + smtp_host = NotifyEmail.unquote(results["qsd"]["smtp"]) - if 'mode' in results['qsd'] and len(results['qsd']['mode']): + if "mode" in results["qsd"] and len(results["qsd"]["mode"]): # Extract the secure mode to over-ride the default - results['secure_mode'] = results['qsd']['mode'].lower() + results["secure_mode"] = results["qsd"]["mode"].lower() # Handle Carbon Copy Addresses - if 'cc' in results['qsd'] and len(results['qsd']['cc']): - results['cc'] = results['qsd']['cc'] + if "cc" in results["qsd"] and len(results["qsd"]["cc"]): + results["cc"] = results["qsd"]["cc"] # Handle Blind Carbon Copy Addresses - if 'bcc' in results['qsd'] and len(results['qsd']['bcc']): - results['bcc'] = results['qsd']['bcc'] + if "bcc" in results["qsd"] and len(results["qsd"]["bcc"]): + results["bcc"] = results["qsd"]["bcc"] # Handle Reply To Addresses - if 'reply' in results['qsd'] and len(results['qsd']['reply']): - results['reply_to'] = results['qsd']['reply'] + if "reply" in results["qsd"] and len(results["qsd"]["reply"]): + results["reply_to"] = results["qsd"]["reply"] - results['from_addr'] = from_addr - results['smtp_host'] = smtp_host + results["from_addr"] = from_addr + results["smtp_host"] = smtp_host # Add our Meta Headers that the user can provide with their outbound # emails - results['headers'] = {NotifyBase.unquote(x): NotifyBase.unquote(y) - for x, y in results['qsd+'].items()} + results["headers"] = { + NotifyBase.unquote(x): NotifyBase.unquote(y) + for x, y in results["qsd+"].items() + } return results @@ -804,22 +925,33 @@ def _get_charset(input_string): """ if not input_string: return None - return 'utf-8' if not all(ord(c) < 128 for c in input_string) else None + return "utf-8" if not all(ord(c) < 128 for c in input_string) else None @staticmethod - def prepare_emails(subject, body, from_addr, to, - cc=set(), bcc=set(), reply_to=set(), - # Providing an SMTP Host helps improve Email Message-ID - # and avoids getting flagged as spam - smtp_host=None, - # Can be either 'html' or 'text' - notify_format=NotifyFormat.HTML, - attach=None, headers=dict(), - # Names can be a dictionary - names=None, - # Pretty Good Privacy Support; Pass in an - # ApprisePGPController if you wish to use it - pgp=None): + def prepare_emails( + subject, + body, + from_addr, + to, + cc: Optional[set] = None, + bcc: Optional[set] = None, + reply_to: Optional[set] = None, + # Providing an SMTP Host helps improve Email Message-ID + # and avoids getting flagged as spam + smtp_host=None, + # Can be either 'html' or 'text' + notify_format=NotifyFormat.HTML, + attach=None, + headers: Optional[dict] = None, + # Names can be a dictionary + names=None, + # Pretty Good Privacy Support; Pass in an + # ApprisePGPController if you wish to use it + pgp=None, + # Define our timezone; if one isn't provided, then we use + # the system time instead + tzinfo=None, + ): """ Generator for emails from_addr: must be in format: (from_name, from_addr) @@ -847,17 +979,28 @@ def prepare_emails(subject, body, from_addr, to, Pass in an ApprisePGPController() if you wish to use this """ - if not to: # There is no one to email; we're done - msg = 'There are no Email recipients to notify' + msg = "There are no Email recipients to notify" logger.warning(msg) - raise AppriseEmailException(msg) + raise AppriseEmailException(msg) from None elif pgp and not _pgp.PGP_SUPPORT: - msg = 'PGP Support unavailable; install PGPy library' + msg = "PGP Support unavailable; install PGPy library" logger.warning(msg) - raise AppriseEmailException(msg) + raise AppriseEmailException(msg) from None + + if headers is None: + headers = {} + + if cc is None: + cc = set() + + if bcc is None: + bcc = set() + + if reply_to is None: + reply_to = set() if not names: # Prepare a empty dictionary to prevent errors/warnings @@ -865,9 +1008,13 @@ def prepare_emails(subject, body, from_addr, to, if not smtp_host: # Generate a host identifier (used for Message-ID Creation) - smtp_host = from_addr[1].split('@')[1] + smtp_host = from_addr[1].split("@")[1] - logger.debug('SMTP Host: {smtp_host}') + if not tzinfo: + # use server time + tzinfo = datetime.now().astimezone().tzinfo + + logger.debug(f"SMTP Host: {smtp_host}") # Create a copy of the targets list emails = list(to) @@ -876,55 +1023,60 @@ def prepare_emails(subject, body, from_addr, to, to_name, to_addr = emails.pop(0) # Strip target out of cc list if in To or Bcc - _cc = (cc - bcc - set([to_addr])) + cc_ = cc - bcc - {to_addr} # Strip target out of bcc list if in To - _bcc = (bcc - set([to_addr])) + bcc_ = bcc - {to_addr} # Strip target out of reply_to list if in To - _reply_to = (reply_to - set([to_addr])) + reply_to_ = reply_to - {to_addr} # Format our cc addresses to support the Name field - _cc = [formataddr( - (names.get(addr, False), addr), charset='utf-8') - for addr in _cc] + cc_ = [ + formataddr((names.get(addr, False), addr), charset="utf-8") + for addr in cc_ + ] # Format our bcc addresses to support the Name field - _bcc = [formataddr( - (names.get(addr, False), addr), charset='utf-8') - for addr in _bcc] + bcc_ = [ + formataddr((names.get(addr, False), addr), charset="utf-8") + for addr in bcc_ + ] - if reply_to: + if reply_to_: # Format our reply-to addresses to support the Name field - reply_to = [formataddr( - (names.get(addr, False), addr), charset='utf-8') - for addr in reply_to] + reply_to = [ + formataddr((names.get(addr, False), addr), charset="utf-8") + for addr in reply_to_ + ] logger.debug( - 'Email From: {}'.format( - formataddr(from_addr, charset='utf-8'))) - - logger.debug('Email To: {}'.format(to_addr)) - if _cc: - logger.debug('Email Cc: {}'.format(', '.join(_cc))) - if _bcc: - logger.debug('Email Bcc: {}'.format(', '.join(_bcc))) - if _reply_to: - logger.debug( - 'Email Reply-To: {}'.format(', '.join(_reply_to)) - ) + "Email From: {}".format(formataddr(from_addr, charset="utf-8")) + ) + + logger.debug("Email To: {}".format(to_addr)) + if cc_: + logger.debug("Email Cc: {}".format(", ".join(cc_))) + if bcc_: + logger.debug("Email Bcc: {}".format(", ".join(bcc_))) + if reply_to_: + logger.debug("Email Reply-To: {}".format(", ".join(reply_to_))) # Prepare Email Message if notify_format == NotifyFormat.HTML: base = MIMEMultipart("alternative") - base.attach(MIMEText( - convert_between( - NotifyFormat.HTML, NotifyFormat.TEXT, body), - 'plain', 'utf-8') + base.attach( + MIMEText( + convert_between( + NotifyFormat.HTML, NotifyFormat.TEXT, body + ), + "plain", + "utf-8", + ) ) - base.attach(MIMEText(body, 'html', 'utf-8')) + base.attach(MIMEText(body, "html", "utf-8")) else: - base = MIMEText(body, 'plain', 'utf-8') + base = MIMEText(body, "plain", "utf-8") if attach: mixed = MIMEMultipart("mixed") @@ -936,27 +1088,34 @@ def prepare_emails(subject, body, from_addr, to, # exit since this isn't what the end user wanted # We could not access the attachment - msg = 'Could not access attachment {}.'.format( - attachment.url(privacy=True)) + msg = "Could not access attachment {}.".format( + attachment.url(privacy=True) + ) logger.warning(msg) raise AppriseEmailException(msg) logger.debug( - 'Preparing Email attachment {}'.format( - attachment.url(privacy=True))) + "Preparing Email attachment {}".format( + attachment.url(privacy=True) + ) + ) with open(attachment.path, "rb") as abody: app = MIMEApplication(abody.read()) app.set_type(attachment.mimetype) # Prepare our attachment name - filename = attachment.name \ - if attachment.name else f'file{no:03}.dat' + filename = ( + attachment.name + if attachment.name + else f"file{no:03}.dat" + ) app.add_header( - 'Content-Disposition', + "Content-Disposition", 'attachment; filename="{}"'.format( - Header(filename, 'utf-8')), + Header(filename, "utf-8") + ), ) mixed.attach(app) base = mixed @@ -964,31 +1123,34 @@ def prepare_emails(subject, body, from_addr, to, if pgp: logger.debug("Securing Email with PGP Encryption") # Set our header information to include in the encryption - base['From'] = formataddr( - (None, from_addr[1]), charset='utf-8') - base['To'] = formataddr((None, to_addr), charset='utf-8') - base['Subject'] = \ - Header(subject, NotifyEmail._get_charset(subject)) + base["From"] = formataddr( + (None, from_addr[1]), charset="utf-8" + ) + base["To"] = formataddr((None, to_addr), charset="utf-8") + base["Subject"] = Header( + subject, NotifyEmail._get_charset(subject) + ) # Apply our encryption - encrypted_content = \ - pgp.encrypt(base.as_string(), to_addr) + encrypted_content = pgp.encrypt(base.as_string(), to_addr) if not encrypted_content: # Unable to send notification - msg = 'Unable to encrypt email via PGP' + msg = "Unable to encrypt email via PGP" logger.warning(msg) raise AppriseEmailException(msg) - # prepare our messsage + # prepare our message base = MIMEMultipart( - "encrypted", protocol="application/pgp-encrypted") + "encrypted", protocol="application/pgp-encrypted" + ) # Store Autocrypt header (DeltaChat Support) base.add_header( "Autocrypt", - "addr=%s; prefer-encrypt=mutual" % formataddr( - (False, to_addr), charset='utf-8')) + f"addr={formataddr((False, to_addr), charset='utf-8')}; " + "prefer-encrypt=mutual" + ) # Set Encryption Info Part enc_payload = MIMEText("Version: 1", "plain") @@ -1003,22 +1165,22 @@ def prepare_emails(subject, body, from_addr, to, for k, v in headers.items(): base[k] = Header(v, NotifyEmail._get_charset(v)) - base['Subject'] = \ - Header(subject, NotifyEmail._get_charset(subject)) - base['From'] = formataddr(from_addr, charset='utf-8') - base['To'] = formataddr((to_name, to_addr), charset='utf-8') - base['Message-ID'] = make_msgid(domain=smtp_host) - base['Date'] = \ - datetime.now(timezone.utc)\ - .strftime("%a, %d %b %Y %H:%M:%S +0000") + base["Subject"] = Header( + subject, NotifyEmail._get_charset(subject) + ) + base["From"] = formataddr(from_addr, charset="utf-8") + base["To"] = formataddr((to_name, to_addr), charset="utf-8") + base["Message-ID"] = make_msgid(domain=smtp_host) + base["Date"] = format_datetime(datetime.now(tz=tzinfo)) if cc: - base['Cc'] = ','.join(_cc) + base["Cc"] = ",".join(cc_) - if reply_to: - base['Reply-To'] = ','.join(_reply_to) + if reply_to_: + base["Reply-To"] = ",".join(reply_to) yield EmailMessage( recipient=to_addr, - to_addrs=[to_addr] + list(_cc) + list(_bcc), - body=base.as_string()) + to_addrs=[to_addr, *list(cc_), *list(bcc_)], + body=base.as_string(), + ) diff --git a/libs/apprise/plugins/email/common.py b/libs/apprise/plugins/email/common.py index e6a29deb86..eed4709ca5 100644 --- a/libs/apprise/plugins/email/common.py +++ b/libs/apprise/plugins/email/common.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -28,7 +27,6 @@ import dataclasses -import typing as t from ...exception import ApprisePluginException @@ -37,6 +35,7 @@ class AppriseEmailException(ApprisePluginException): """ Thrown when there is an error with the Email Attachment """ + def __init__(self, message, error_code=601): super().__init__(message, error_code=error_code) @@ -46,11 +45,12 @@ class WebBaseLogin: This class is just used in conjunction of the default emailers to best formulate a login to it using the data detected """ + # User Login must be Email Based - EMAIL = 'Email' + EMAIL = "Email" # User Login must UserID Based - USERID = 'UserID' + USERID = "UserID" # Secure Email Modes @@ -63,13 +63,13 @@ class SecureMailMode: # Define all of the secure modes (used during validation) SECURE_MODES = { SecureMailMode.STARTTLS: { - 'default_port': 587, + "default_port": 587, }, SecureMailMode.SSL: { - 'default_port': 465, + "default_port": 465, }, SecureMailMode.INSECURE: { - 'default_port': 25, + "default_port": 25, }, } @@ -79,6 +79,7 @@ class EmailMessage: """ Our message structure """ + recipient: str - to_addrs: t.List[str] + to_addrs: list[str] body: str diff --git a/libs/apprise/plugins/email/templates.py b/libs/apprise/plugins/email/templates.py index ac31357365..777de3f784 100644 --- a/libs/apprise/plugins/email/templates.py +++ b/libs/apprise/plugins/email/templates.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- # BSD 2-Clause License # # Apprise - Push Notification Library. -# Copyright (c) 2025, Chris Caron +# Copyright (c) 2026, Chris Caron # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: @@ -28,7 +27,8 @@ import re -from .common import (SecureMailMode, WebBaseLogin) + +from .common import SecureMailMode, WebBaseLogin # To attempt to make this script stupid proof, if we detect an email address # that is part of the this table, we can pre-use a lot more defaults if they @@ -36,237 +36,275 @@ EMAIL_TEMPLATES = ( # Google GMail ( - 'Google Mail', + "Google Mail", re.compile( - r'^((?P