Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 57 additions & 3 deletions volume-cartographer/apps/src/vc_render_tifxyz.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
#include <chrono>
#include <cstdarg>
#include <thread>
#include <optional>
#include <unordered_set>
#include <tiffio.h>
#include <omp.h>
Expand Down Expand Up @@ -1054,6 +1055,34 @@ static void renderTiles(
}


// ============================================================
// readVolumeVoxelSize – read voxelsize from volume metadata
// ============================================================

static std::optional<double> readVolumeVoxelSize(const std::filesystem::path& volPath)
{
using json = nlohmann::json;
auto tryFile = [](const std::filesystem::path& p, const char* key) -> std::optional<double> {
if (!std::filesystem::exists(p)) return std::nullopt;
try {
auto j = json::parse(std::ifstream(p));
if (key) {
if (j.contains(key) && j[key].is_object())
j = j[key];
else
return std::nullopt;
}
if (j.contains("voxelsize") && j["voxelsize"].is_number())
return j["voxelsize"].get<double>();
} catch (...) {}
return std::nullopt;
};
if (auto v = tryFile(volPath / "meta.json", nullptr)) return v;
if (auto v = tryFile(volPath / "metadata.json", "scan")) return v;
if (auto v = tryFile(volPath / "metadata.json", nullptr)) return v;
return std::nullopt;
}

// ============================================================
// main
// ============================================================
Expand Down Expand Up @@ -1111,7 +1140,9 @@ int main(int argc, char *argv[])
("merge-tiff-parts", po::bool_switch()->default_value(false), "Merge partial TIFFs from multi-VM render")
("pyramid", po::value<bool>()->default_value(true), "Build pyramid levels L1-L5 (default: true)")
("resume", po::bool_switch()->default_value(false), "Skip chunks that already exist on disk")
("pre", po::bool_switch()->default_value(false), "Create zarr + all level datasets");
("pre", po::bool_switch()->default_value(false), "Create zarr + all level datasets")
("voxel-size", po::value<double>(), "Physical voxel size for OME-Zarr scale metadata (reads from volume metadata if omitted)")
("voxel-unit", po::value<std::string>()->default_value("nanometer"), "Physical unit for OME-Zarr axes (e.g. nanometer, micrometer)");
// clang-format on

po::options_description all("Usage");
Expand Down Expand Up @@ -1357,6 +1388,27 @@ int main(int argc, char *argv[])
logPrintf(stdout, "chunk shape [%s]\n", oss.str().c_str());
}

// --- Resolve voxel size for OME-Zarr metadata ---
const std::string voxel_unit = parsed["voxel-unit"].as<std::string>();
double base_voxel_size = 1.0;
if (parsed.count("voxel-size")) {
base_voxel_size = parsed["voxel-size"].as<double>();
if (!std::isfinite(base_voxel_size) || base_voxel_size <= 0.0) {
logPrintf(stderr, "Error: --voxel-size must be a positive finite number\n");
return EXIT_FAILURE;
}
logPrintf(stdout, "Voxel size (from CLI): %g %s\n", base_voxel_size, voxel_unit.c_str());
} else if (auto mv = readVolumeVoxelSize(vol_path); mv.has_value()) {
if (std::isfinite(*mv) && *mv > 0.0) {
base_voxel_size = *mv;
logPrintf(stdout, "Voxel size (from volume metadata): %g %s\n", base_voxel_size, voxel_unit.c_str());
} else {
logPrintf(stderr, "Warning: ignoring invalid metadata voxelsize; using default 1.0\n");
}
} else {
logPrintf(stdout, "Voxel size: 1.0 (no metadata found; override with --voxel-size)\n");
}

int rotQuadGlobal = -1;
if (std::abs(rotate_angle) > 1e-6) {
rotQuadGlobal = normalizeQuadrantRotation(rotate_angle);
Expand Down Expand Up @@ -1506,7 +1558,8 @@ int main(int argc, char *argv[])
cv::Size attrXY = tgt_size;
if (rotQuad >= 0 && (rotQuad % 2) == 1) std::swap(attrXY.width, attrXY.height);
writeZarrAttrs(outFilePath, vol_path, group_idx, baseZ, slice_step, accum_step,
accum_type_str, accumOffsets.size(), attrXY, baseZ, CH, CW);
accum_type_str, accumOffsets.size(), attrXY, baseZ, CH, CW,
base_voxel_size, voxel_unit);
return true;
} else if (numParts > 1) {
if (!std::filesystem::exists(std::filesystem::path(zarrOutputArg) / "0" / ".zarray")) {
Expand Down Expand Up @@ -1705,7 +1758,8 @@ int main(int argc, char *argv[])
cv::Size attrXY = tgt_size;
if (rotQuad >= 0 && (rotQuad % 2) == 1) std::swap(attrXY.width, attrXY.height);
writeZarrAttrs(outFilePath, vol_path, group_idx, baseZ, slice_step, accum_step,
accum_type_str, accumOffsets.size(), attrXY, baseZ, CH, CW);
accum_type_str, accumOffsets.size(), attrXY, baseZ, CH, CW,
base_voxel_size, voxel_unit);
}
}
return true;
Expand Down
4 changes: 3 additions & 1 deletion volume-cartographer/core/include/vc/core/util/Zarr.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,9 @@ void writeZarrAttrs(const std::filesystem::path& outFile,
const std::filesystem::path& volPath, int groupIdx,
size_t baseZ, double sliceStep, double accumStep,
const std::string& accumTypeStr, size_t accumSamples,
const cv::Size& canvasSize, size_t CZ, size_t CH, size_t CW);
const cv::Size& canvasSize, size_t CZ, size_t CH, size_t CW,
double baseVoxelSize = 1.0,
const std::string& voxelUnit = "");

// Write a dense uint8 ZYX subregion into a freshly created dataset via
// writeChunk(). Chunks overlapping the region are materialized; untouched
Expand Down
9 changes: 7 additions & 2 deletions volume-cartographer/core/src/Tiff.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -285,10 +285,11 @@ bool mergeTiffParts(const std::string& outputPath, int numParts)
}

std::cout << "Merging " << groups.size() << " TIFF(s) from " << numParts << " parts..." << std::endl;
size_t failures = 0;
for (auto& [finalPath, partFiles] : groups) {
std::sort(partFiles.begin(), partFiles.end());
TIFF* first = TIFFOpen(partFiles[0].c_str(), "r");
if (!first) { std::cerr << "Cannot open " << partFiles[0] << "\n"; continue; }
if (!first) { std::cerr << "Cannot open " << partFiles[0] << "\n"; failures++; continue; }
uint32_t w, h, tw, th; uint16_t bps, spp, sf, comp;
TIFFGetField(first, TIFFTAG_IMAGEWIDTH, &w);
TIFFGetField(first, TIFFTAG_IMAGELENGTH, &h);
Expand All @@ -301,7 +302,7 @@ bool mergeTiffParts(const std::string& outputPath, int numParts)
TIFFClose(first);

TIFF* out = TIFFOpen(finalPath.c_str(), "w");
if (!out) { std::cerr << "Cannot create " << finalPath << "\n"; continue; }
if (!out) { std::cerr << "Cannot create " << finalPath << "\n"; failures++; continue; }
TIFFSetField(out, TIFFTAG_IMAGEWIDTH, w); TIFFSetField(out, TIFFTAG_IMAGELENGTH, h);
TIFFSetField(out, TIFFTAG_TILEWIDTH, tw); TIFFSetField(out, TIFFTAG_TILELENGTH, th);
TIFFSetField(out, TIFFTAG_BITSPERSAMPLE, bps); TIFFSetField(out, TIFFTAG_SAMPLESPERPIXEL, spp);
Expand Down Expand Up @@ -330,6 +331,10 @@ bool mergeTiffParts(const std::string& outputPath, int numParts)
for (auto& pf : partFiles) std::filesystem::remove(pf);
std::cout << " " << finalPath.filename().string() << ": " << merged << " tiles from " << partFiles.size() << " parts\n";
}
if (failures > 0) {
std::cerr << "Merge failed: " << failures << " of " << groups.size() << " TIFF(s) could not be created.\n";
return false;
}
std::cout << "Merge complete.\n";
return true;
}
23 changes: 13 additions & 10 deletions volume-cartographer/core/src/Zarr.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,8 @@ void createPyramidDatasets(const std::filesystem::path& outDir,

std::vector<size_t> prevShape = shape0;
for (int level = 1; level <= 5; level++) {
std::vector<size_t> shape = {(prevShape[0]+1)/2, (prevShape[1]+1)/2, (prevShape[2]+1)/2};
// Keep Z fixed and halve only Y/X at each level (anisotropic scaling).
std::vector<size_t> shape = {prevShape[0], (prevShape[1]+1)/2, (prevShape[2]+1)/2};
size_t chZ = std::min(shape[0], shape0[0]);
std::vector<size_t> chunks = {chZ, std::min(CH, shape[1]), std::min(CW, shape[2])};
vc::createZarrDataset(outDir, std::to_string(level), shape, chunks, dtype, "blosc");
Expand All @@ -338,7 +339,8 @@ void writeZarrAttrs(const std::filesystem::path& outDir,
const std::filesystem::path& volPath, int groupIdx,
size_t baseZ, double sliceStep, double accumStep,
const std::string& accumTypeStr, size_t accumSamples,
const cv::Size& canvasSize, size_t CZ, size_t CH, size_t CW)
const cv::Size& canvasSize, size_t CZ, size_t CH, size_t CW,
double baseVoxelSize, const std::string& voxelUnit)
{
json attrs;
attrs["source_zarr"] = volPath.string();
Expand All @@ -356,19 +358,20 @@ void writeZarrAttrs(const std::filesystem::path& outDir,

json ms;
ms["version"] = "0.4"; ms["name"] = "render";
ms["axes"] = json::array({
json{{"name","z"},{"type","space"}},
json{{"name","y"},{"type","space"}},
json{{"name","x"},{"type","space"}}
});
auto makeAxis = [&](const char* name) -> json {
json ax = {{"name", name}, {"type", "space"}};
if (!voxelUnit.empty()) ax["unit"] = voxelUnit;
return ax;
};
ms["axes"] = json::array({makeAxis("z"), makeAxis("y"), makeAxis("x")});
ms["datasets"] = json::array();
for (int l = 0; l <= 5; l++) {
double s = std::pow(2.0, l);
const double sz = 1.0;
const double sYX = baseVoxelSize * std::pow(2.0, l);
const double sZ = baseVoxelSize;
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P2 Badge Include slice-step in multiscale Z spacing

writeZarrAttrs now writes physical OME-Zarr scales, but it hardcodes sZ to baseVoxelSize and ignores the actual rendered slice spacing. When vc_render_tifxyz is run with --slice-step > 1, consecutive output slices are farther apart than one source voxel, so this metadata underreports Z spacing and yields incorrect physical distances/aspect ratios in downstream consumers. Because sliceStep is already passed into writeZarrAttrs, the Z scale should incorporate it (while remaining constant across pyramid levels if Z is preserved).

Useful? React with 👍 / 👎.

ms["datasets"].push_back({
{"path", std::to_string(l)},
{"coordinateTransformations", json::array({
json{{"type","scale"},{"scale",json::array({sz,s,s})}},
json{{"type","scale"},{"scale",json::array({sZ, sYX, sYX})}},
json{{"type","translation"},{"translation",json::array({0.0,0.0,0.0})}}
})}
});
Expand Down
Loading