Skip to content

Commit cc76b12

Browse files
committed
Various minor improvements
1 parent 6fd61ab commit cc76b12

File tree

6 files changed

+66
-22
lines changed

6 files changed

+66
-22
lines changed

code/postprocess.py

Lines changed: 40 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,24 @@
1-
"""Script for converting file """
1+
"""
2+
Script for converting file
3+
4+
SPDX-License-Identifier: MIT
5+
"""
26
import json
7+
import logging
8+
import sys
39
import typing
410
from pathlib import Path
511

612
import config
713
import dolfin
8-
from cardiac_geometries.geometry import Geometry
9-
from cardiac_geometries.geometry import load_microstructure
14+
import numpy as np
15+
from cardiac_geometries.geometry import Geometry, load_microstructure
16+
17+
logger = logging.Logger(__name__, logging.INFO)
18+
ch = logging.StreamHandler(sys.stdout)
19+
FORMAT = '%(levelname)-5s [%(filename)s:%(lineno)d] %(message)s'
20+
ch.setFormatter(logging.Formatter(FORMAT))
21+
logger.addHandler(ch)
1022

1123

1224
def generate_fiber_xdmf_file(
@@ -17,17 +29,17 @@ def generate_fiber_xdmf_file(
1729

1830
geo = Geometry.from_file(outfile, schema_path=outfile.with_suffix(".json"))
1931

20-
f0, s0, n0 = load_microstructure(
32+
f0, _, _ = load_microstructure(
2133
mesh=geo.mesh,
2234
microstructure_path=microstructure_path,
2335
)
2436

25-
# Save fibers to a file that can be visualized in paraview
37+
# Save fibers to a file that can be visualized in Paraview
2638

2739
with dolfin.XDMFFile(fiber_path.as_posix()) as f:
2840
f.write(f0)
2941

30-
print(f"Saved fibers to {fiber_path}")
42+
logger.info(f"Saved fibers to {fiber_path}")
3143
# Compute some features. This could be some results presented in the paper
3244

3345
return {
@@ -39,11 +51,28 @@ def generate_fiber_xdmf_file(
3951
}
4052

4153

42-
def check_results(features_path: Path, features):
43-
expected_feautres = json.loads(features_path.read_text())
44-
print("Checking reproducibility")
45-
assert expected_feautres == features
46-
print("Results are reproducible!")
54+
def check_results(features_path: Path, features: dict):
55+
"""
56+
Check if data from input file `features_path` matches input features
57+
58+
Args:
59+
features_path (Path): Path to reference results
60+
features (dict): Results generated from current simulation
61+
"""
62+
expected_features = json.loads(features_path.read_text())
63+
logger.info("Checking reproducibility")
64+
reproducible = True
65+
# Check each (key, value) pair in the features and check they are
66+
# within machine precision
67+
for key in expected_features.keys():
68+
if not np.isclose(expected_features[key], features[key]):
69+
logger.error(f"{key}: {expected_features[key]}!={features[key]}")
70+
reproducible = False
71+
72+
if reproducible:
73+
logger.info("Results are reproducible")
74+
else:
75+
raise RuntimeError("Results are not reproducible")
4776

4877

4978
def main() -> int:

code/pre_processing.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,11 @@
1616
import sys
1717

1818
# We set default log level to be info
19-
logger = logging.Logger("Preprocessing", level=logging.INFO)
20-
logger.addHandler(logging.StreamHandler(sys.stdout))
21-
# Mute FFC and UFL errors at this stage
22-
logging.getLogger('FFC').setLevel(logging.WARNING)
23-
logging.getLogger('UFL').setLevel(logging.WARNING)
19+
logger = logging.Logger(__name__, logging.INFO)
20+
ch = logging.StreamHandler(sys.stdout)
21+
FORMAT = '%(levelname)-5s [%(filename)s:%(lineno)d] %(message)s'
22+
ch.setFormatter(logging.Formatter(FORMAT))
23+
logger.addHandler(ch)
2424

2525
schema = {
2626
"mesh": H5Path(

code/run_fiber_generation.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,24 @@
33
algorithm
44
55
https://finsberg.github.io/ldrb/
6+
7+
SPDX-License-Identifier: MIT
68
"""
9+
import logging
10+
import sys
711
from pathlib import Path
812

913
import config
1014
import dolfin
1115
import ldrb
1216
from cardiac_geometries.geometry import Geometry
1317

18+
logger = logging.Logger(__name__, logging.INFO)
19+
ch = logging.StreamHandler(sys.stdout)
20+
FORMAT = '%(levelname)-5s [%(filename)s:%(lineno)d] %(message)s'
21+
ch.setFormatter(logging.Formatter(FORMAT))
22+
logger.addHandler(ch)
23+
1424

1525
def generate_fibers(outfile: Path, microstructure_path: Path) -> None:
1626

@@ -46,7 +56,7 @@ def generate_fibers(outfile: Path, microstructure_path: Path) -> None:
4656
h5file.write(s0, "s0")
4757
h5file.write(n0, "n0")
4858

49-
print(f"Microstructure saved to {microstructure_path}")
59+
logger.info(f"Microstructure saved to {microstructure_path}")
5060

5161

5262
def main() -> int:

docs/_config.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ parse:
3535
sphinx:
3636
config:
3737
bibtex_bibfiles: ["refs.bib"]
38+
suppress_warnings: ["bibtex.duplicate_citation"] # If the same paper is cited in multiple files
3839

3940
extra_extensions:
4041
- 'sphinx.ext.autodoc'

docs/abstract.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ Here you can write a little abstract for the paper or some text explaining what
55

66
You could also add citation that you can add to the `refs.bib` in the `docs` folder. You can read more about adding citations [here](https://jupyterbook.org/en/stable/content/citations.html).
77

8-
In this example we use the LDRB algorithm {cite}`bayer2012novel` to data from {cite}`martinez2019repository`
8+
In this example we use the LDRB algorithm {cite}`bayer2012novel` to data from {cite}`martinez2019repository`.
99

1010

1111
## References

docs/reproducing.md

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
## Data
55

6-
Data is available in a dropbox folder. Use the script `download_data.sh` in the data folder to download the data, i.e
6+
Data is available in a Dropbox folder. Use the script `download_data.sh` in the data folder to download the data, i.e
77
```bash
88
cd data
99
bash download_data.sh
@@ -18,9 +18,9 @@ The data folder should have the following structure after the data is downloaded
1818
├── heart01.msh
1919
└── heart02.msh
2020
```
21-
These meshes are originally taken from <https://ora.ox.ac.uk/objects/uuid:951b086c-c4ba-41ef-b967-c2106d87ee06>, but since the original data is about 26GB we decided to make a smaller dataset for this example.
21+
These meshes are originally taken from {cite}`martinez2019repository`, but since the original data is about 26GB we decided to make a smaller dataset for this example.
2222

23-
Eventually when you publish a paper you could put this data on e.g [Zenodo](https://zenodo.org). That will make sure the data gets it's own DOI.
23+
Eventually when you publish a paper you could put this data on e.g [Zenodo](https://zenodo.org). That will make sure the data gets it's own [Digital Object Identifier](https://www.doi.org/) (DOI).
2424

2525

2626
## Scripts
@@ -31,7 +31,7 @@ In order to reproduce the results you need to first run the pre-processing scrip
3131
```
3232
python3 pre_processing.py
3333
```
34-
This will convert the meshes from Gmsh to a dolfin format.
34+
This will convert the meshes from Gmsh to a Dolfin format.
3535

3636
### Fiber generation
3737
The next step is to run the fiber generation. You can do this by running the script
@@ -46,3 +46,7 @@ The final step is to postprocess the results by running the script
4646
python3 postprocess.py
4747
```
4848
This will generate a file for visualizing the fibers in the Paraview (inside `code/results` called `fiber_<heart_nr>.xdmf`). This script will also compare some features computed from the fibers with the results published in the (artificial) paper. If the results differ, then the program will raise an error.
49+
50+
```{bibliography}
51+
:filter: docname in docnames
52+
```

0 commit comments

Comments
 (0)