diff --git a/scripts/imaging/features/pixelization/modeling.py b/scripts/imaging/features/pixelization/modeling.py index 6edbbce7..94159f60 100644 --- a/scripts/imaging/features/pixelization/modeling.py +++ b/scripts/imaging/features/pixelization/modeling.py @@ -288,8 +288,11 @@ This is why the `batch_size` above is 20, lower than other examples, because reducing the batch size ensures a more modest amount of VRAM is used. If you have a GPU with more VRAM, increasing the batch size will lead to faster run times. -Given VRAM use is an important consideration, we print out the estimated VRAM required for this +Given VRAM use is an important consideration, we print out the estimated VRAM required for this model-fit and advise you do this for your own pixelization model-fits. + +The method below prints the VRAM usage estimate for the analysis and model with the specified batch size, +it takes about 20-30 seconds to run so you may want to comment it out once you are familiar with your GPU's VRAM limits. """ analysis.print_vram_use(model=model, batch_size=search.batch_size) diff --git a/scripts/interferometer/features/pixelization/modeling.py b/scripts/interferometer/features/pixelization/modeling.py index 1f45e177..1a426d94 100644 --- a/scripts/interferometer/features/pixelization/modeling.py +++ b/scripts/interferometer/features/pixelization/modeling.py @@ -337,6 +337,9 @@ VRAM does scale with batch size though, and for high resoluiton datasets may require you to reduce from the value of 20 set above if your GPU does not have too much VRAM (e.g. < 4GB). + +The method below prints the VRAM usage estimate for the analysis and model with the specified batch size, +it takes about 20-30 seconds to run so you may want to comment it out once you are familiar with your GPU's VRAM limits. """ analysis.print_vram_use(model=model, batch_size=search.batch_size) diff --git a/scripts/multi/modeling.py b/scripts/multi/modeling.py index b2f8b9b1..657dc356 100644 --- a/scripts/multi/modeling.py +++ b/scripts/multi/modeling.py @@ -272,8 +272,11 @@ When multiple datasets are fitted simultaneously, as in this example, VRAM usage increases with each dataset, as their data structures must all be stored in VRAM. -Given VRAM use is an important consideration, we print out the estimated VRAM required for this +Given VRAM use is an important consideration, we print out the estimated VRAM required for this model-fit and advise you do this for your own pixelization model-fits. + +The method below prints the VRAM usage estimate for the analysis and model with the specified batch size, +it takes about 20-30 seconds to run so you may want to comment it out once you are familiar with your GPU's VRAM limits. """ factor_graph.print_vram_use( model=factor_graph.global_prior_model, batch_size=search.batch_size