-
Notifications
You must be signed in to change notification settings - Fork 33
Reduce duplicated logging code, standardize logs #777
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
69859a1
e68a1ed
e98f2b1
a3c0b6f
fa93fe7
41cbdce
4bcc46f
866f7e8
c83d8af
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -7,6 +7,7 @@ | |
| from typing import Any | ||
|
|
||
| from fme.core.cloud import is_local | ||
| from fme.core.device import get_device | ||
| from fme.core.distributed import Distributed | ||
| from fme.core.wandb import WandB | ||
|
|
||
|
|
@@ -54,7 +55,34 @@ class LoggingConfig: | |
| def __post_init__(self): | ||
| self._dist = Distributed.get_instance() | ||
|
|
||
| def configure_logging(self, experiment_dir: str, log_filename: str): | ||
| def configure_logging( | ||
| self, | ||
| experiment_dir: str, | ||
| log_filename: str, | ||
| config: Mapping[str, Any], | ||
| resumable: bool = False, | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The default value for |
||
| ): | ||
| """ | ||
| Configure global logging settings, including WandB, and output | ||
| initial logs of the runtime environment. | ||
|
|
||
| Args: | ||
| experiment_dir: Directory to save logs to. | ||
| log_filename: Name of the log file. | ||
| config: Configuration dictionary to log to WandB. | ||
| resumable: Whether this is a resumable run. | ||
| """ | ||
| self._configure_logging_module(experiment_dir, log_filename) | ||
| log_versions() | ||
| log_beaker_url() | ||
| self._configure_wandb( | ||
| experiment_dir=experiment_dir, | ||
| config=config, | ||
| resumable=resumable, | ||
| ) | ||
| logging.info(f"Current device is {get_device()}") | ||
|
|
||
| def _configure_logging_module(self, experiment_dir: str, log_filename: str): | ||
| """ | ||
| Configure the global `logging` module based on this LoggingConfig. | ||
| """ | ||
|
|
@@ -85,14 +113,15 @@ def configure_logging(self, experiment_dir: str, log_filename: str): | |
| fh.setFormatter(logging.Formatter(self.log_format)) | ||
| logger.addHandler(fh) | ||
|
|
||
| def configure_wandb( | ||
| def _configure_wandb( | ||
| self, | ||
| experiment_dir: str, | ||
| config: Mapping[str, Any], | ||
| env_vars: Mapping[str, Any] | None = None, | ||
| resumable: bool = True, | ||
| resume: Any = None, | ||
| **kwargs, | ||
| ): | ||
| env_vars = retrieve_env_vars() | ||
| if resume is not None: | ||
| raise ValueError( | ||
| "The 'resume' argument is no longer supported, " | ||
|
|
@@ -107,7 +136,6 @@ def configure_wandb( | |
| elif env_vars is not None: | ||
| config_copy["environment"] = env_vars | ||
|
|
||
| experiment_dir = config["experiment_dir"] | ||
| if self.wandb_dir_in_experiment_dir: | ||
| wandb_dir = experiment_dir | ||
| else: | ||
|
|
@@ -123,10 +151,24 @@ def configure_wandb( | |
| experiment_dir=experiment_dir, | ||
| resumable=resumable, | ||
| dir=wandb_dir, | ||
| **kwargs, | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
|
||
| notes=_get_beaker_url(_get_beaker_id()), | ||
| ) | ||
|
|
||
|
|
||
| def _get_beaker_id() -> str | None: | ||
| try: | ||
| return os.environ["BEAKER_EXPERIMENT_ID"] | ||
| except KeyError: | ||
| logging.warning("Beaker Experiment ID not found.") | ||
| return None | ||
|
|
||
|
|
||
| def _get_beaker_url(beaker_id: str | None) -> str: | ||
| if beaker_id is None: | ||
| return "No beaker URL." | ||
| return f"https://beaker.org/ex/{beaker_id}" | ||
|
|
||
|
|
||
| def log_versions(): | ||
| import torch | ||
|
|
||
|
|
@@ -158,13 +200,9 @@ def log_beaker_url(beaker_id=None): | |
| Returns the Beaker URL. | ||
| """ | ||
| if beaker_id is None: | ||
| try: | ||
| beaker_id = os.environ["BEAKER_EXPERIMENT_ID"] | ||
| except KeyError: | ||
| logging.warning("Beaker Experiment ID not found.") | ||
| return None | ||
| beaker_id = _get_beaker_id() | ||
|
|
||
| beaker_url = f"https://beaker.org/ex/{beaker_id}" | ||
| beaker_url = _get_beaker_url(beaker_id) | ||
| logging.info(f"Beaker ID: {beaker_id}") | ||
| logging.info(f"Beaker URL: {beaker_url}") | ||
| return beaker_url | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Should remove
to_flat_dicthere (and other inference entrypoints) as was done in the training scripts?