Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions src/openfe/storage/warehouse.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,7 @@ def _get_store_for_key(self, key: GufeKey) -> ExternalStorage:
ValueError
If the key is not found in any store.
"""
print(key)
for name in self.stores:
if key in self.stores[name]:
return self.stores[name]
Expand Down Expand Up @@ -313,7 +314,14 @@ class FileSystemWarehouse(WarehouseBaseClass):
"""

def __init__(self, root_dir: str = "warehouse"):
self.root_dir = root_dir
setup_store = FileStorage(f"{root_dir}/setup")
result_store = FileStorage(f"{root_dir}/result")
stores = WarehouseStores(setup=setup_store, result=result_store)
super().__init__(stores)

def dump(self):
print(self.root_dir)
print(self.setup_store)
for item in self.setup_store:
print(item)
10 changes: 9 additions & 1 deletion src/openfecli/commands/plan_rbfe_network.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# This code is part of OpenFE and is licensed under the MIT license.
# For details, see https://github.com/OpenFreeEnergy/openfe

import click

from openfe.storage.warehouse import FileSystemWarehouse
from openfecli import OFECommandPlugin
from openfecli.parameters import (
COFACTORS,
Expand All @@ -12,6 +12,7 @@
OUTPUT_DIR,
OVERWRITE,
PROTEIN,
WAREHOUSE,
YAML_OPTIONS,
)
from openfecli.utils import print_duration, write
Expand Down Expand Up @@ -130,6 +131,7 @@ def plan_rbfe_network_main(
@N_PROTOCOL_REPEATS.parameter(multiple=False, required=False, default=3, help=N_PROTOCOL_REPEATS.kwargs["help"]) # fmt: skip
@NCORES.parameter(help=NCORES.kwargs["help"], default=1)
@OVERWRITE.parameter(help=OVERWRITE.kwargs["help"], default=OVERWRITE.kwargs["default"], is_flag=True) # fmt: skip
@WAREHOUSE.parameter(help=WAREHOUSE.kwargs["help"], is_flag=True)
@print_duration
def plan_rbfe_network(
molecules: list[str],
Expand All @@ -140,6 +142,7 @@ def plan_rbfe_network(
n_protocol_repeats: int,
n_cores: int,
overwrite_charges: bool,
warehouse: bool,
):
"""
Plan a relative binding free energy network, saved as JSON files for use by
Expand Down Expand Up @@ -243,10 +246,15 @@ def plan_rbfe_network(
# OUTPUT
write("Output:")
write("\tSaving to: " + str(output_dir))
warehouse_object = None
if warehouse:
warehouse_object = FileSystemWarehouse()

plan_alchemical_network_output(
alchemical_network=alchemical_network,
ligand_network=ligand_network,
folder_path=OUTPUT_DIR.get(output_dir),
warehouse=warehouse_object,
)


Expand Down
151 changes: 151 additions & 0 deletions src/openfecli/commands/quickrun-warehouse.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
import json
import pathlib
from typing import Optional

import click

from openfe.storage.warehouse import FileSystemWarehouse, WarehouseBaseClass
from openfecli.commands.quickrun import _format_exception
from openfecli.plugins import OFECommandPlugin
from openfecli.utils import configure_logger, print_duration, write


@click.command("warehouse-quickrun", short_help="Run a given transformation, saved as a JSON file")
@click.option(
"--work-dir", "-d", default=None,
type=click.Path(dir_okay=True, file_okay=False, writable=True, path_type=pathlib.Path),
help=(
"Directory in which to store files in (defaults to current directory). "
"If the directory does not exist, it will be created at runtime."
),
) # fmt: skip
@click.option(
"output", "-o", default=None,
type=click.Path(dir_okay=False, file_okay=False, path_type=pathlib.Path),
help="Filepath at which to create and write the JSON-formatted results.",
) # fmt: skip
@click.argument("transformation", type=click.STRING, required=True)
@print_duration
def quickrun(transformation, work_dir, output):
"""Run the transformation (edge) in the given JSON file.

Simulation JSON files can be created with the
:ref:`cli_plan-rbfe-network`
or from Python a :class:`.Transformation` can be saved using its to_json
method::

transformation.to_json("filename.json")

That will save a JSON file suitable to be input for this command.

Running this command will execute the simulation defined in the JSON file,
creating a directory for each individual task (``Unit``) in the workflow.
For example, when running the OpenMM HREX Protocol a directory will be created
for each repeat of the sampling process (by default 3).
"""
import logging
import os
import sys

from gufe.protocols.protocoldag import execute_DAG
from gufe.tokenization import JSON_HANDLER
from gufe.transformations.transformation import Transformation

from openfe.utils import logging_control

# avoid problems with output not showing if queueing system kills a job
sys.stdout.reconfigure(line_buffering=True)

stdout_handler = logging.StreamHandler(sys.stdout)

configure_logger("gufekey", handler=stdout_handler)
configure_logger("gufe", handler=stdout_handler)
configure_logger("openfe", handler=stdout_handler)

# silence the openmmtools.multistate API warning
logging_control._silence_message(
msg=[
"The openmmtools.multistate API is experimental and may change in future releases",
],
logger_names=[
"openmmtools.multistate.multistatereporter",
"openmmtools.multistate.multistateanalyzer",
"openmmtools.multistate.multistatesampler",
],
)
# turn warnings into log message (don't show stack trace)
logging.captureWarnings(True)

if work_dir is None:
work_dir = pathlib.Path(os.getcwd()) / "warehouse"
else:
work_dir.mkdir(exist_ok=True, parents=True)

# Setup the warehouse
warehouse = FileSystemWarehouse()
trans: Transformation = warehouse.load_setup_tokenizable(transformation)

write("Loading file...")

output = work_dir / (str(trans.key) + "_results.json")

write("Planning simulations for this edge...")
dag = trans.create()
write("Starting the simulations for this edge...")
dagresult = execute_DAG(
dag,
shared_basedir=work_dir,
scratch_basedir=work_dir,
keep_shared=True,
raise_error=False,
n_retries=2,
)
# How this would change with new context
# dagresult = execute_DAG(
# dag,
# shared_storage=warehouse.shared,
# perm_storage=warehouse.perm
# scratch_basedir=work_dir,
# keep_shared=True,
# raise_error=False,
# n_retries=2,
# )
warehouse.store_result_tokenizable(dagresult)
write("Done with all simulations! Analyzing the results....")
prot_result = trans.protocol.gather([dagresult])

if dagresult.ok():
estimate = prot_result.get_estimate()
uncertainty = prot_result.get_uncertainty()
else:
estimate = uncertainty = None # for output file

out_dict = {
"estimate": estimate,
"uncertainty": uncertainty,
"protocol_result": prot_result.to_dict(),
"unit_results": {
unit.key: unit.to_keyed_dict() for unit in dagresult.protocol_unit_results
},
}

with open(output, mode="w") as outf:
json.dump(out_dict, outf, cls=JSON_HANDLER.encoder)

write(f"Here is the result:\n\tdG = {estimate} ± {uncertainty}\n")
write("")

if not dagresult.ok():
# there can be only one, MacCleod
failure = dagresult.protocol_unit_failures[-1]
raise click.ClickException(
f"The protocol unit '{failure.name}' failed with the error "
f"message:\n{_format_exception(failure.exception)}\n\n"
"Details provided in output."
)


PLUGIN = OFECommandPlugin(command=quickrun, section="Quickrun Executor", requires_ofe=(0, 3))

if __name__ == "__main__":
quickrun()
1 change: 1 addition & 0 deletions src/openfecli/commands/quickrun.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import click

from openfecli import OFECommandPlugin
from openfecli.parameters import WAREHOUSE
from openfecli.utils import configure_logger, print_duration, write


Expand Down
1 change: 1 addition & 0 deletions src/openfecli/parameters/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,4 @@
from .output_dir import OUTPUT_DIR
from .plan_network_options import YAML_OPTIONS
from .protein import PROTEIN
from .warehouse import WAREHOUSE
4 changes: 4 additions & 0 deletions src/openfecli/parameters/warehouse.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
import click
from plugcli.params import Option

WAREHOUSE = Option("--warehouse", type=click.BOOL, help="Use a warehouse", default=False)
47 changes: 27 additions & 20 deletions src/openfecli/plan_alchemical_networks_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,35 +4,42 @@

import json
import pathlib
from typing import Optional

from openfe import AlchemicalNetwork, LigandNetwork
from openfe.storage.warehouse import WarehouseBaseClass
from openfecli.utils import write


def plan_alchemical_network_output(
alchemical_network: AlchemicalNetwork,
ligand_network: LigandNetwork,
folder_path: pathlib.Path,
warehouse: Optional[WarehouseBaseClass],
):
"""Write the contents of an alchemical network into the structure"""

base_name = folder_path.name
folder_path.mkdir(parents=True, exist_ok=True)

an_json = folder_path / f"{base_name}.json"
alchemical_network.to_json(an_json)
write("\t\t- " + base_name + ".json")

ln_fname = "ligand_network.graphml"
with open(folder_path / ln_fname, mode="w") as f:
f.write(ligand_network.to_graphml())
write(f"\t\t- {ln_fname}")

transformations_dir = folder_path / "transformations"
transformations_dir.mkdir(parents=True, exist_ok=True)

for transformation in alchemical_network.edges:
transformation_name = transformation.name or transformation.key
filename = f"{transformation_name}.json"
transformation.to_json(transformations_dir / filename)
write("\t\t\t\t- " + filename)
if warehouse:
warehouse.store_setup_tokenizable(alchemical_network)
warehouse.store_setup_tokenizable(ligand_network)
else:
base_name = folder_path.name
folder_path.mkdir(parents=True, exist_ok=True)

an_json = folder_path / f"{base_name}.json"
alchemical_network.to_json(an_json)
write("\t\t- " + base_name + ".json")

ln_fname = "ligand_network.graphml"
with open(folder_path / ln_fname, mode="w") as f:
f.write(ligand_network.to_graphml())
write(f"\t\t- {ln_fname}")

transformations_dir = folder_path / "transformations"
transformations_dir.mkdir(parents=True, exist_ok=True)

for transformation in alchemical_network.edges:
transformation_name = transformation.name or transformation.key
filename = f"{transformation_name}.json"
transformation.to_json(transformations_dir / filename)
write("\t\t\t\t- " + filename)
Loading