From 98fdabcb181035992c0e583cd6327593b3921d57 Mon Sep 17 00:00:00 2001 From: Jammy2211 Date: Wed, 21 May 2025 19:06:35 +0100 Subject: [PATCH 1/5] fix actions --- .github/workflows/main.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 75e0a78a2..2a13cf338 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -59,10 +59,8 @@ jobs: pip3 install setuptools pip3 install wheel pip3 install pytest coverage pytest-cov - pip3 install -r PyAutoConf/requirements.txt - pip3 install -r PyAutoFit/requirements.txt - pip3 install -r PyAutoFit/optional_requirements.txt - pip3 install -r PyAutoFit/build_requirements.txt + pip install ./PyAutoConf + pip install ./PyAutoFit - name: Run tests run: | export ROOT_DIR=`pwd` From 0c1f36d540d7d509810d8babf1a00db8dbacbbd0 Mon Sep 17 00:00:00 2001 From: Jammy2211 Date: Wed, 21 May 2025 19:11:27 +0100 Subject: [PATCH 2/5] github build --- .github/workflows/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 2a13cf338..27a26830b 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -61,6 +61,7 @@ jobs: pip3 install pytest coverage pytest-cov pip install ./PyAutoConf pip install ./PyAutoFit + pip install ./PyAutoFit[optional] - name: Run tests run: | export ROOT_DIR=`pwd` From 78133a29b1c780069492c744b5e775d5979cde0b Mon Sep 17 00:00:00 2001 From: Jammy2211 Date: Wed, 21 May 2025 19:33:06 +0100 Subject: [PATCH 3/5] readthedocs --- pyproject.toml | 9 +++++++++ readthedocs.yml | 7 +++++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5900bd2eb..3d42be5cd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -74,6 +74,15 @@ optional=[ "ultranest==4.3.2", "zeus-mcmc==2.5.4", ] +docs=[ + "sphinx", + "furo", + "myst-parser", + "sphinx_copybutton", + "sphinx_design", + "sphinx_inline_tabs", + "sphinx_autodoc_typehints" +] test = ["pytest"] dev = ["pytest", "black"] diff --git a/readthedocs.yml b/readthedocs.yml index ca3049c2a..2c9c24e6d 100644 --- a/readthedocs.yml +++ b/readthedocs.yml @@ -1,13 +1,16 @@ version: 2 build: - os: ubuntu-20.04 + os: ubuntu-22.04 tools: python: "3.11" python: install: - - requirements: docs/requirements.txt + - method: pip + path: . + extra_requirements: + - docs sphinx: configuration: docs/conf.py \ No newline at end of file From 57ed16baf3a33d49659460e67a534db62a3e9b03 Mon Sep 17 00:00:00 2001 From: Jammy2211 Date: Wed, 21 May 2025 19:55:06 +0100 Subject: [PATCH 4/5] conf.py --- docs/conf.py | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index fcb18313a..cdd05935d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,30 +12,18 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. # -from pyprojroot import here - -workspace_path = str(here()) - import os import sys sys.path.insert(0, os.path.abspath(".")) -clone_path = str(here()) -clone_path = os.path.split(clone_path)[0] - -sys.path.insert( - 0, - os.path.abspath(clone_path), -) - import autofit # -- Project information ----------------------------------------------------- year = datetime.date.today().year project = "PyAutoFit" -copyright = "2022, James Nightingale, Richard Hayes" +copyright = "2025, James Nightingale, Richard Hayes" author = "James Nightingale, Richard Hayes" # The full version, including alpha/beta/rc tags From 3f8ce4d5e867d7b860ea8d11a31654fe366d8129 Mon Sep 17 00:00:00 2001 From: Jammy2211 Date: Tue, 27 May 2025 15:05:53 +0100 Subject: [PATCH 5/5] docs --- docs/overview/the_basics.rst | 64 ++++++++++++++++++++++++++++-------- 1 file changed, 51 insertions(+), 13 deletions(-) diff --git a/docs/overview/the_basics.rst b/docs/overview/the_basics.rst index da3d213d5..3bdd0fd66 100644 --- a/docs/overview/the_basics.rst +++ b/docs/overview/the_basics.rst @@ -573,31 +573,69 @@ Multiple Datasets Many model-fitting problems require multiple datasets to be fitted simultaneously in order to provide the best constraints on the model. -In **PyAutoFit**, all you have to do to fit multiple datasets is sum your ``Analysis`` classes together: +In **PyAutoFit**, all you have to do to fit multiple datasets is combine them with the model via ``AnalysisFactor`` +objects. .. code-block:: python - analysis_0 = Analysis(data=data_0, noise_map=noise_map_0) - analysis_1 = Analysis(data=data_1, noise_map=noise_map_1) + analysis_0 = Analysis(data=data, noise_map=noise_map) + analysis_1 = Analysis(data=data, noise_map=noise_map) - # This means the model is fitted to both datasets simultaneously. + analysis_list = [analysis_0, analysis_1] - analysis = analysis_0 + analysis_1 + analysis_factor_list = [] - # summing a list of analysis objects is also a valid API: + for analysis in analysis_list: - analysis = sum([analysis_0, analysis_1]) + # The model can be customized here so that different model parameters are tied to each analysis. + model_analysis = model.copy() -By summing analysis objects the log likelihood values computed by the ``log_likelihood_function`` of each individual -analysis class are summed to give an overall log likelihood value that the non-linear search samples when model-fitting. + analysis_factor = af.AnalysisFactor(prior_model=model_analysis, analysis=analysis) + + analysis_factor_list.append(analysis_factor) + +All ``AnalysisFactor`` objects are combined into a ``FactorGraphModel``, which represents a global model fit to +multiple datasets using a graphical model structure. + +The key outcomes of this setup are: + + - The individual log likelihoods from each ``Analysis`` object are summed to form the total log likelihood + evaluated during the model-fitting process. + + - Results from all datasets are output to a unified directory, with subdirectories for visualizations + from each analysis object, as defined by their ``visualize`` methods. + +This is a basic use of **PyAutoFit**'s graphical modeling capabilities, which support advanced hierarchical +and probabilistic modeling for large, multi-dataset analyses. + +To inspect the model, we print ``factor_graph.global_prior_model.info``. + +.. code-block:: python + + print(factor_graph.global_prior_model.info) + +To fit multiple datasets, we pass the ``FactorGraphModel`` to a non-linear search. + +Unlike single-dataset fitting, we now pass the ``factor_graph.global_prior_model`` as the model and +the ``factor_graph`` itself as the analysis object. + +This structure enables simultaneous fitting of multiple datasets in a consistent and scalable way. + +.. code-block:: python + + search = af.DynestyStatic( + nlive=100, + ) + + result_list = search.fit(model=factor_graph.global_prior_model, analysis=factor_graph) .. note:: In the simple example above, instances of the same ``Analysis`` class (``analysis_0`` and ``analysis_1``) were -summed. However, different ``Analysis`` classes can also be summed together. This is useful when fitting different -datasets that each require a unique ``log_likelihood_function`` to be fitted simultaneously. For more detailed -information and a dedicated API for customizing how the model changes across different datasets, refer to -the [multiple datasets cookbook](https://pyautofit.readthedocs.io/en/latest/cookbooks/multiple_datasets.html). + combined. However, different ``Analysis`` classes can also be combined. This is useful when fitting different + datasets that each require a unique ``log_likelihood_function`` to be fitted simultaneously. For more detailed + information and a dedicated API for customizing how the model changes across different datasets, refer to + the [multiple datasets cookbook](https://pyautofit.readthedocs.io/en/latest/cookbooks/multiple_datasets.html). Wrap Up -------