diff --git a/.github/generate_job_summary.py b/.github/generate_job_summary.py
deleted file mode 100755
index 93e6c6c..0000000
--- a/.github/generate_job_summary.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import sys
-import json
-import re
-
-if __name__ == "__main__":
- sys.stdout = open(sys.argv[1], "w")
- github = json.load(open(sys.argv[2], "r"))
- steps = json.load(open(sys.argv[3], "r"))
-
- print(f"# {github['workflow']}: Job Summary")
- print("")
-
- print("## Details")
- print(f"- started by: `{github['actor']}`")
- if "event" in github:
- event = github['event']
- if "pull_request" in event:
- print(f"- branch: `{event['pull_request']['head']['ref']}`")
- if "action" in event:
- print(f"- action: `{event['action']}`")
-
- print("")
-
- print("## Summary of Steps")
- print("| Step | Test | Notes | Expected | Reported |")
- print("|---|---|---|---|---|")
-
- all_success = True
- p = re.compile('(?")
- print(json.dumps(github, indent=4, sort_keys=False))
- print("")
-
- print("## steps")
- print("")
- print(json.dumps(steps, indent=4, sort_keys=False))
- print("")
-
- if not all_success:
- sys.exit("One or more tests failed")
diff --git a/.github/gtest_to_markdown.py b/.github/gtest_to_markdown.py
new file mode 100644
index 0000000..675787c
--- /dev/null
+++ b/.github/gtest_to_markdown.py
@@ -0,0 +1,218 @@
+#!/usr/bin/env python3
+
+"""
+Google Test to Markdown Converter
+
+This script converts Google Test (gtest) JSON output into GitHub-flavored markdown format.
+It processes test results and generates a well-formatted markdown document that includes:
+- A timestamped header
+- Summary statistics (total, passed, and failed tests)
+- A detailed table of test results
+- Failure details for any failed tests
+
+The script is designed to be used as a command-line tool, taking two arguments:
+1. context: A string describing the test run context
+2. gtest_json_file: Path to the JSON file containing gtest results
+
+Example usage:
+ python gtest_to_markdown.py "Unit Tests" test_results.json
+
+The output is formatted markdown that can be directly used in GitHub issues, pull requests,
+or documentation.
+"""
+
+import json
+import os
+import sys
+from datetime import datetime
+from typing import Dict, List, Any
+
+def format_duration(milliseconds: float) -> str:
+ """
+ Convert milliseconds to a human-readable duration string.
+
+ Args:
+ milliseconds (float): Duration in milliseconds
+
+ Returns:
+ str: Human-readable duration string in the format:
+ - "X.XXms" for durations < 1 second
+ - "X.XXs" for durations < 1 minute
+ - "X.XXm" for durations >= 1 minute
+
+ Example:
+ >>> format_duration(500)
+ '500.00ms'
+ >>> format_duration(1500)
+ '1.50s'
+ """
+ if milliseconds < 1000:
+ return f"{milliseconds:.2f}ms"
+ seconds = milliseconds / 1000
+ if seconds < 60:
+ return f"{seconds:.2f}s"
+ minutes = seconds / 60
+ return f"{minutes:.2f}m"
+
+def parse_duration(time_value: Any) -> float:
+ """
+ Parse a duration value from gtest output into milliseconds.
+
+ Args:
+ time_value (Any): The duration value from gtest, which could be:
+ - A float (milliseconds)
+ - A string ending in 's' (seconds)
+ - Any other value that should be converted to float
+
+ Returns:
+ float: Duration in milliseconds
+
+ Example:
+ >>> parse_duration(500)
+ 500.0
+ >>> parse_duration("1.5s")
+ 1500.0
+ """
+ try:
+ if isinstance(time_value, str) and time_value.endswith('s'):
+ return float(time_value[:-1]) * 1000 # Convert seconds to milliseconds
+ return float(time_value)
+ except (ValueError, TypeError):
+ return 0.0 # Return 0 for invalid values
+
+def format_failure_message(failure: Dict[str, Any]) -> str:
+ """
+ Format a test failure message from the gtest JSON output.
+
+ Args:
+ failure (Dict[str, Any]): A dictionary containing failure information
+ with optional keys:
+ - failure: The failure message
+ - type: The type of failure
+
+ Returns:
+ str: The message with all newlines replaced with "
"
+
+ Example:
+ >>> failure = {"failure": "Expected 2\nbut got 3"}
+ >>> format_failure_message(failure)
+ 'Expected 2
but got 3'
+ """
+ if "failure" in failure:
+ return "
" + failure["failure"].replace("\n", "
") + ""
+ return ""
+
+def convert_to_markdown(data: Dict[str, Any], context: str) -> str:
+ """
+ Convert gtest JSON data to GitHub-flavored markdown.
+
+ This function processes the gtest JSON output and generates a comprehensive
+ markdown document that includes test results, statistics, and failure details.
+
+ Args:
+ data (Dict[str, Any]): The parsed JSON data from gtest output
+ context (str): A string describing the context of the test run
+ (e.g., "Unit Tests", "Integration Tests")
+
+ Returns:
+ str: A complete markdown document containing:
+ - Header with timestamp
+ - Summary statistics
+ - Detailed test results table
+ - Failure details for failed tests
+
+ The output markdown includes:
+ - A table with columns: Test Suite, Test Case, Status, Duration
+ - Emoji indicators (✅ for pass, ❌ for fail)
+ - Formatted duration strings
+ - Code blocks for failure messages
+ """
+ output = []
+
+ # Add header with timestamp
+ timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+ output.append(f"# {context} Results ({timestamp})\n")
+
+ # Add summary
+ total_tests = data.get("tests", 0)
+ failed_tests = data.get("failures", 0)
+ disabled_tests = data.get("disabled", 0)
+ error_tests = data.get("errors", 0)
+ tests_duration = format_duration(parse_duration(data.get("time", 0)))
+ passed_tests = total_tests - failed_tests
+
+ output.append("## Summary\n")
+ output.append(f"- Tests: {total_tests}")
+ output.append(f"- Passed: {passed_tests}")
+ output.append(f"- Failed: {failed_tests}")
+ output.append(f"- Disabled: {disabled_tests}")
+ output.append(f"- Errors: {error_tests}")
+ output.append(f"- Duration: {tests_duration}\n")
+
+ # Add detailed results table
+ output.append("## Details\n")
+ output.append("| Suite | Case | Status | Duration | Details |")
+ output.append("|-------|------|--------|----------|---------|")
+
+ for suite in data.get("testsuites", []):
+ suite_name = suite.get("name", "Unknown Suite")
+ for test in suite.get("testsuite", []):
+ test_name = test.get("name", "Unknown Test")
+ status = "❌ FAIL" if "failures" in test else "✅ PASS"
+ duration = format_duration(parse_duration(test.get("time", 0)))
+ details = []
+
+ # Add failure details if the test failed
+ if "failures" in test:
+ for failure in test["failures"]:
+ details.append(format_failure_message(failure))
+
+ # Add the test result row
+ output.append(f"| {suite_name} | {test_name} | {status} | {duration} | {'
'.join(details)}")
+
+ return "\n".join(output)
+
+def main():
+ """
+ Main entry point for the script.
+
+ Processes command line arguments and converts gtest JSON output to markdown.
+ The script expects two arguments:
+ 1. context: A string describing the test run context
+ 2. gtest_json_file: Path to the JSON file containing gtest results
+
+ The script will:
+ - Read and parse the JSON file
+ - Convert the data to markdown format
+ - Print the markdown to stdout
+
+ Exits with status code 1 if:
+ - Incorrect number of arguments
+ - File not found
+ - Invalid JSON
+ - Any other error occurs
+ """
+ if len(sys.argv) != 3:
+ print("Usage: python gtest_to_markdown.py ")
+ sys.exit(1)
+
+ context = sys.argv[1]
+ json_file = sys.argv[2]
+
+ try:
+ with open(json_file, 'r') as f:
+ data = json.load(f)
+ markdown = convert_to_markdown(data, context)
+ print(markdown)
+ except FileNotFoundError:
+ print(f"Error: File {json_file} not found", file=sys.stderr)
+ sys.exit(1)
+ except json.JSONDecodeError:
+ print(f"Error: Invalid JSON in {json_file}", file=sys.stderr)
+ sys.exit(1)
+ except Exception as e:
+ print(f"Error: {str(e)}", file=sys.stderr)
+ sys.exit(1)
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff --git a/.github/orc_test_to_github_actions.py b/.github/orc_test_to_github_actions.py
deleted file mode 100755
index 49394a8..0000000
--- a/.github/orc_test_to_github_actions.py
+++ /dev/null
@@ -1,6 +0,0 @@
-import sys
-import json
-
-if __name__ == "__main__":
- test_results = json.load(open(sys.argv[1], "r"))
- print(f"::set-output name=orc_test_out::{test_results}");
diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml
index 5fa8448..e3390e6 100644
--- a/.github/workflows/build-and-test.yml
+++ b/.github/workflows/build-and-test.yml
@@ -71,8 +71,8 @@ jobs:
id: run-orc_test
continue-on-error: true
run: |
- ./build/Release/orc_test ./test/battery --json_mode > test_out.json
- python ${GITHUB_WORKSPACE}/.github/orc_test_to_github_actions.py test_out.json
+ ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json"
+ python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py "Release Tests" test_out.json >> $GITHUB_STEP_SUMMARY
- name: 🛠️ orc_test w/ ASan
id: build-orc_test-asan
continue-on-error: true
@@ -82,8 +82,8 @@ jobs:
id: run-orc_test-asan
continue-on-error: true
run: |
- ./build/Release/orc_test ./test/battery --json_mode > test_out.json
- python ${GITHUB_WORKSPACE}/.github/orc_test_to_github_actions.py test_out.json
+ ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json"
+ python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py "Address Sanitizer Tests" test_out.json >> $GITHUB_STEP_SUMMARY
- name: 🛠️ orc_test w/ TSan
id: build-orc_test-tsan
continue-on-error: true
@@ -93,8 +93,8 @@ jobs:
id: run-orc_test-tsan
continue-on-error: true
run: |
- ./build/Release/orc_test ./test/battery --json_mode > test_out.json
- python ${GITHUB_WORKSPACE}/.github/orc_test_to_github_actions.py test_out.json
+ ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json"
+ python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py "Thread Sanitizer Tests" test_out.json >> $GITHUB_STEP_SUMMARY
- name: 🛠️ orc_test w/ UBSan
id: build-orc_test-ubsan
continue-on-error: true
@@ -104,21 +104,5 @@ jobs:
id: run-orc_test-ubsan
continue-on-error: true
run: |
- ./build/Release/orc_test ./test/battery --json_mode > test_out.json
- python ${GITHUB_WORKSPACE}/.github/orc_test_to_github_actions.py test_out.json
- - name: ✏️ github json
- uses: jsdaniell/create-json@1.1.2
- continue-on-error: true
- with:
- name: "github.json"
- json: ${{ toJSON(github) }}
- - name: ✏️ steps json
- uses: jsdaniell/create-json@1.1.2
- continue-on-error: true
- with:
- name: "steps.json"
- json: ${{ toJSON(steps) }}
- - name: ✍️ job summary
- continue-on-error: false
- run: |
- python ${GITHUB_WORKSPACE}/.github/generate_job_summary.py $GITHUB_STEP_SUMMARY github.json steps.json
+ ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json"
+ python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py "Undefined Behavior Sanitizer Tests" test_out.json >> $GITHUB_STEP_SUMMARY
diff --git a/.github/workflows/tagged-release.yml b/.github/workflows/tagged-release.yml
index 52f3a54..a48dda9 100644
--- a/.github/workflows/tagged-release.yml
+++ b/.github/workflows/tagged-release.yml
@@ -15,6 +15,10 @@ jobs:
python-version: 3.8
- name: ⬇️ Checkout sources
uses: actions/checkout@v3
+ with:
+ lfs: true
+ - name: 🏗️ Checkout LFS objects
+ run: git lfs pull
- name: 🏗️ Setup project files
run: |
mkdir build
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 6cb3250..995fd68 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -63,6 +63,23 @@ if (NOT TARGET nlohmann_json::nlohmann_json)
FetchContent_MakeAvailable(json)
endif()
+####################################################################################################
+#
+# Adds support for Google Test.
+#
+
+if (NOT TARGET GTest::gtest)
+ message(STATUS "ORC third-party: creating target 'GTest::gtest'...")
+ FetchContent_Declare(
+ googletest
+ GIT_REPOSITORY https://github.com/google/googletest.git
+ GIT_TAG v1.14.0
+ )
+ # Prevent overriding the parent project's compiler/linker settings
+ set(gtest_force_shared_crt ON CACHE BOOL "" FORCE)
+ FetchContent_MakeAvailable(googletest)
+endif()
+
####################################################################################################
#
# Adds support for the Tracy profiler.
@@ -185,6 +202,7 @@ target_link_libraries(orc_test
tomlplusplus::tomlplusplus
Tracy::TracyClient
nlohmann_json::nlohmann_json
+ GTest::gtest
)
if (PROJECT_IS_TOP_LEVEL)
target_compile_options(orc_test PRIVATE -Wall -Werror)
diff --git a/include/orc/dwarf_structs.hpp b/include/orc/dwarf_structs.hpp
index 3d4b55a..2984872 100644
--- a/include/orc/dwarf_structs.hpp
+++ b/include/orc/dwarf_structs.hpp
@@ -19,6 +19,7 @@
// application
#include "orc/dwarf_constants.hpp"
+#include "orc/fixed_vector.hpp"
#include "orc/hash.hpp"
#include "orc/string_pool.hpp"
@@ -368,43 +369,44 @@ const char* to_string(arch arch);
* question. This facilitates reporting when ODRVs are found, giving the user a breadcrumb as
* to how the ODRV is being introduced. For efficiency purposes, we fix the max number of ancestors
* at compile time, but this can be adjusted if necessary.
- *
- * TODO: Does it make sense to extract this "static vector" type into a template, so that it can
- * be used in other contexts? (e.g., `attribute_sequence`?)
*/
struct object_ancestry {
- std::array _ancestors;
- std::size_t _count{0};
+ orc::fixed_vector _ancestors;
+ auto size() const { return _ancestors.size(); }
auto begin() const { return _ancestors.begin(); }
- auto end() const { return begin() + _count; }
+ auto end() const { return _ancestors.end(); }
auto& back() {
- assert(_count);
- return _ancestors[_count];
+ assert(!_ancestors.empty());
+ return _ancestors.back();
}
const auto& back() const {
- assert(_count);
- return _ancestors[_count];
+ assert(!_ancestors.empty());
+ return _ancestors.back();
}
void emplace_back(pool_string&& ancestor) {
- assert((_count + 1) < _ancestors.size());
- _ancestors[_count++] = std::move(ancestor);
+ assert(_ancestors.size() < _ancestors.capacity());
+ _ancestors.push_back(std::move(ancestor));
}
bool operator<(const object_ancestry& rhs) const {
- if (_count < rhs._count)
+ if (_ancestors.size() < rhs._ancestors.size())
return true;
- if (_count > rhs._count)
+
+ if (_ancestors.size() > rhs._ancestors.size())
return false;
- for(size_t i=0; i<_count; ++i) {
+
+ for (size_t i = 0; i < _ancestors.size(); ++i) {
if (_ancestors[i].view() < rhs._ancestors[i].view())
return true;
+
if (_ancestors[i].view() > rhs._ancestors[i].view())
return false;
}
+
return false;
}
};
diff --git a/include/orc/fixed_vector.hpp b/include/orc/fixed_vector.hpp
new file mode 100644
index 0000000..3a8650b
--- /dev/null
+++ b/include/orc/fixed_vector.hpp
@@ -0,0 +1,337 @@
+// Copyright 2025 Adobe
+// All Rights Reserved.
+//
+// NOTICE: Adobe permits you to use, modify, and distribute this file in accordance with the terms
+// of the Adobe license agreement accompanying it.
+
+#pragma once
+
+// stdc++
+#include
+#include
+#include
+
+// adobe contract checks
+#include "adobe/contract_checks.hpp"
+
+//--------------------------------------------------------------------------------------------------
+
+namespace orc {
+
+//--------------------------------------------------------------------------------------------------
+
+/**
+ * @brief A fixed-size vector container that provides a subset of `std::vector` functionality
+ *
+ * @tparam T The type of elements stored in the vector
+ * @tparam N The maximum number of elements the vector can hold
+ *
+ * This container provides a fixed-size alternative to `std::vector` with similar interface.
+ * It guarantees that memory is allocated on the stack and never reallocates.
+ * Operations that would exceed the fixed capacity `N` will terminate the program.
+ */
+template
+struct fixed_vector {
+ using value_type = T;
+ using array_type = std::array;
+ using size_type = typename array_type::size_type;
+ using iterator = typename array_type::iterator;
+ using const_iterator = typename array_type::const_iterator;
+ using reverse_iterator = std::reverse_iterator;
+ using const_reverse_iterator = std::reverse_iterator;
+
+ // Constructors
+ fixed_vector() = default;
+ fixed_vector(const fixed_vector&) = default;
+ fixed_vector& operator=(const fixed_vector&) = default;
+
+ /**
+ * @brief Move constructor
+ *
+ * @param rhs The `fixed_vector` to move from
+ *
+ * @post `rhs` is left in an empty state
+ */
+ fixed_vector(fixed_vector&& rhs) : _a(std::move(rhs._a)), _n(rhs._n) {
+ rhs._n = 0;
+ }
+
+ /**
+ * @brief Move assignment operator
+ *
+ * @param rhs The `fixed_vector` to move from
+ * @return Reference to this `fixed_vector`
+ *
+ * @post `rhs` is left in an empty state
+ */
+ fixed_vector& operator=(fixed_vector&& rhs) {
+ _a = std::move(rhs._a);
+ _n = rhs._n;
+ rhs._n = 0;
+ return *this;
+ }
+
+ /**
+ * @brief Constructs a fixed_vector with count copies of value
+ *
+ * @param count Number of elements to create
+ * @param value Value to initialize elements with
+ *
+ * @pre count <= N
+ * @note If count > N, the program will terminate.
+ */
+ fixed_vector(size_type count, const T& value) {
+ ADOBE_PRECONDITION(count <= N, "fixed_vector overflow");
+ for (size_type i = 0; i < count; ++i) {
+ push_back(value);
+ }
+ }
+
+ /**
+ * @brief Access element at specified position with bounds checking
+ *
+ * @param pos Position of the element to return
+ * @return Reference to the requested element
+ *
+ * @throw std::out_of_range if pos >= size()
+ */
+ T& at(size_type pos) {
+ if (pos >= _n) {
+ throw std::out_of_range("fixed_vector::at");
+ }
+ return _a[pos];
+ }
+
+ /**
+ * @brief Access element at specified position with bounds checking (const version)
+ *
+ * @param pos Position of the element to return
+ * @return Const reference to the requested element
+ *
+ * @throw std::out_of_range if pos >= size()
+ */
+ const T& at(size_type pos) const {
+ if (pos >= _n) {
+ throw std::out_of_range("fixed_vector::at");
+ }
+ return _a[pos];
+ }
+
+ /**
+ * @brief Access element at specified position without bounds checking
+ *
+ * @param pos Position of the element to return
+ * @return Reference to the requested element
+ *
+ * @pre pos < size()
+ */
+ T& operator[](size_type pos) { return _a[pos]; }
+
+ /**
+ * @brief Access element at specified position without bounds checking (const version)
+ *
+ * @param pos Position of the element to return
+ * @return Const reference to the requested element
+ *
+ * @pre pos < size()
+ */
+ const T& operator[](size_type pos) const { return _a[pos]; }
+
+ /**
+ * @brief Returns reference to the first element
+ *
+ * @return Reference to the first element
+ *
+ * @pre !empty()
+ */
+ T& front() {
+ ADOBE_PRECONDITION(!empty(), "fixed_vector is empty");
+ return _a[0];
+ }
+
+ /**
+ * @brief Returns const reference to the first element
+ *
+ * @return Const reference to the first element
+ *
+ * @pre !empty()
+ */
+ const T& front() const {
+ ADOBE_PRECONDITION(!empty(), "fixed_vector is empty");
+ return _a[0];
+ }
+
+ /**
+ * @brief Returns reference to the last element
+ *
+ * @return Reference to the last element
+ *
+ * @pre !empty()
+ */
+ T& back() {
+ ADOBE_PRECONDITION(!empty(), "fixed_vector is empty");
+ return _a[_n - 1];
+ }
+
+ /**
+ * @brief Returns const reference to the last element
+ *
+ * @return Const reference to the last element
+ *
+ * @pre !empty()
+ */
+ const T& back() const {
+ ADOBE_PRECONDITION(!empty(), "fixed_vector is empty");
+ return _a[_n - 1];
+ }
+
+ // Capacity
+ size_type size() const { return _n; }
+ bool empty() const { return _n == 0; }
+ size_type max_size() const { return N; }
+ size_type capacity() const { return N; }
+
+ /**
+ * @brief Adds an element to the end
+ *
+ * @param x Value to append
+ *
+ * @pre size() < N, otherwise the program will terminate.
+ */
+ void push_back(const T& x) {
+ ADOBE_PRECONDITION(_n < N, "fixed_vector overflow");
+ _a[_n++] = x;
+ }
+
+ /**
+ * @brief Removes the last element
+ *
+ * @pre !empty(), otherwise the program will terminate.
+ * @post The last element is destroyed and size() is decremented by 1
+ */
+ void pop_back() {
+ ADOBE_PRECONDITION(_n > 0, "fixed_vector underflow");
+ back() = T();
+ --_n;
+ }
+
+ /**
+ * @brief Removes all elements
+ *
+ * @post size() == 0
+ */
+ void clear() {
+ while (!empty()) {
+ pop_back();
+ }
+ }
+
+ /**
+ * @brief Inserts value before pos
+ *
+ * @param pos Iterator before which the content will be inserted
+ * @param value Element value to insert
+ * @return Iterator pointing to the inserted value
+ *
+ * @pre size() < N, otherwise the program will terminate.
+ */
+ iterator insert(iterator pos, const T& value) {
+ auto old_end = end();
+ push_back(value);
+ std::rotate(pos, old_end, end());
+ return pos;
+ }
+
+ /**
+ * @brief Inserts elements from range [first, last) before pos
+ *
+ * @param pos Iterator before which the content will be inserted
+ * @param first Iterator to the first element to insert
+ * @param last Iterator past the last element to insert
+ * @return Iterator pointing to the first inserted element
+ *
+ * @pre size() + std::distance(first, last) <= N, otherwise the program will terminate.
+ */
+ template
+ iterator insert(iterator pos, Iterator first, Iterator last) {
+ iterator old_end = end();
+ while (first != last) {
+ push_back(*first++);
+ }
+ std::rotate(pos, old_end, end());
+ return pos;
+ }
+
+ /**
+ * @brief Removes element at pos
+ *
+ * @param pos Iterator to the element to remove
+ * @return Iterator following the last removed element
+ *
+ * @pre !empty(), otherwise the program will terminate.
+ * @post size() is decremented by 1
+ */
+ auto erase(iterator pos) {
+ ADOBE_PRECONDITION(_n > 0, "fixed_vector underflow");
+ std::rotate(pos, std::next(pos), end());
+ back() = T();
+ --_n;
+ return pos;
+ }
+
+ // Iterators
+ auto begin() { return _a.begin(); }
+ auto begin() const { return _a.begin(); }
+ auto cbegin() const { return _a.begin(); }
+
+ auto end() { return std::next(begin(), _n); }
+ auto end() const { return std::next(begin(), _n); }
+ auto cend() const { return std::next(cbegin(), _n); }
+
+ auto rbegin() { return reverse_iterator(end()); }
+ auto rbegin() const { return const_reverse_iterator(end()); }
+ auto crbegin() const { return const_reverse_iterator(cend()); }
+
+ auto rend() { return reverse_iterator(begin()); }
+ auto rend() const { return const_reverse_iterator(begin()); }
+ auto crend() const { return const_reverse_iterator(cbegin()); }
+
+ friend void swap(fixed_vector& lhs, fixed_vector& rhs) {
+ std::swap(lhs._a, rhs._a);
+ std::swap(lhs._n, rhs._n);
+ }
+
+private:
+ array_type _a;
+ size_type _n{0};
+};
+
+/**
+ * @brief Equality comparison operator
+ *
+ * @param lhs First fixed_vector to compare
+ * @param rhs Second fixed_vector to compare
+ * @return true if the vectors have the same size and elements, false otherwise
+ */
+template
+bool operator==(const fixed_vector& lhs, const fixed_vector& rhs) {
+ return std::equal(lhs.begin(), lhs.end(), rhs.begin(), rhs.end());
+}
+
+/**
+ * @brief Inequality comparison operator
+ *
+ * @param lhs First fixed_vector to compare
+ * @param rhs Second fixed_vector to compare
+ * @return true if the vectors are not equal, false otherwise
+ */
+template
+bool operator!=(const fixed_vector& lhs, const fixed_vector& rhs) {
+ return !(lhs == rhs);
+}
+
+//--------------------------------------------------------------------------------------------------
+
+} // namespace orc
+
+//--------------------------------------------------------------------------------------------------
diff --git a/src/dwarf.cpp b/src/dwarf.cpp
index 808cea5..10dbd95 100644
--- a/src/dwarf.cpp
+++ b/src/dwarf.cpp
@@ -18,6 +18,7 @@
// application
#include "orc/dwarf_structs.hpp"
#include "orc/features.hpp"
+#include "orc/fixed_vector.hpp"
#include "orc/object_file_registry.hpp"
#include "orc/orc.hpp"
#include "orc/settings.hpp"
@@ -531,9 +532,8 @@ struct line_header {
};
//--------------------------------------------------------------------------------------------------
-// It is fixed to keep allocations from happening.
-constexpr std::size_t max_names_k{32};
-using fixed_attribute_array = std::array;
+
+using fixed_attribute_array = orc::fixed_vector;
/**
* @brief Extracts fatal attributes from an attribute sequence
@@ -553,15 +553,17 @@ using fixed_attribute_array = std::array;
* @note The function is limited to processing `max_names_k` fatal attributes.
*/
fixed_attribute_array fatal_attributes_within(const attribute_sequence& attributes) {
- fixed_attribute_array names{dw::at::none};
- std::size_t count{0};
+ fixed_attribute_array names;
for (const auto& attr : attributes) {
- if (nonfatal_attribute(attr._name)) continue;
- ADOBE_INVARIANT(count < (max_names_k - 1), "fatal_attribute_hash names overflow");
- names[count++] = attr._name;
+ if (nonfatal_attribute(attr._name)) {
+ continue;
+ }
+
+ names.push_back(attr._name);
}
- std::sort(&names[0], &names[count]);
+
+ std::sort(names.begin(), names.end());
return names;
}
diff --git a/src/orc.cpp b/src/orc.cpp
index f6d474b..f957d76 100644
--- a/src/orc.cpp
+++ b/src/orc.cpp
@@ -589,9 +589,9 @@ void to_json(nlohmann::json& j, const odrv_report::conflict_details& c) {
auto& location_json = instances[location_str];
for (const auto& ancestry : locations.at(location)) {
auto* node = &location_json;
- for (std::size_t i = 0; i < ancestry._count; ++i) {
+ for (std::size_t i = 0; i < ancestry.size(); ++i) {
const std::string key = ancestry._ancestors[i].allocate_string();
- if (i == (ancestry._count - 1)) {
+ if (i == (ancestry.size() - 1)) {
(*node)["object_files"].push_back(key);
} else {
node = &(*node)[key];
diff --git a/test/src/fixed_vector_tests.cpp b/test/src/fixed_vector_tests.cpp
new file mode 100644
index 0000000..7568050
--- /dev/null
+++ b/test/src/fixed_vector_tests.cpp
@@ -0,0 +1,205 @@
+// identity
+#include "orc/fixed_vector.hpp"
+
+// stdc++
+#include
+#include
+#include
+#include
+
+// gtest
+#include
+
+using namespace orc;
+
+// Constructor tests
+TEST(FixedVectorTest, DefaultConstructor) {
+ fixed_vector vec;
+ EXPECT_TRUE(vec.empty());
+ EXPECT_EQ(vec.size(), 0);
+}
+
+TEST(FixedVectorTest, FillConstructor) {
+ fixed_vector vec(3, 0);
+ EXPECT_EQ(vec.size(), 3);
+}
+
+// Element access tests
+TEST(FixedVectorTest, AtAccess) {
+ fixed_vector vec;
+ vec.push_back(0);
+ EXPECT_NO_THROW(vec.at(0));
+ EXPECT_THROW(vec.at(1), std::out_of_range);
+}
+
+TEST(FixedVectorTest, OperatorBracketAccess) {
+ fixed_vector vec;
+ vec.push_back(0);
+ EXPECT_NO_THROW(vec[0]);
+}
+
+TEST(FixedVectorTest, FrontBackAccess) {
+ fixed_vector vec;
+ vec.push_back(0);
+ vec.push_back(1);
+ EXPECT_NO_THROW(vec.front());
+ EXPECT_EQ(vec.front(), 0);
+ EXPECT_NO_THROW(vec.back());
+ EXPECT_EQ(vec.back(), 1);
+}
+
+// Iterator tests
+TEST(FixedVectorTest, IteratorOperations) {
+ fixed_vector vec;
+ vec.push_back(0);
+ vec.push_back(1);
+
+ EXPECT_EQ(std::distance(vec.begin(), vec.end()), 2);
+ EXPECT_EQ(std::distance(vec.cbegin(), vec.cend()), 2);
+ EXPECT_EQ(std::distance(vec.rbegin(), vec.rend()), 2);
+}
+
+// Capacity tests
+TEST(FixedVectorTest, CapacityOperations) {
+ fixed_vector vec;
+ EXPECT_EQ(vec.max_size(), 5);
+ EXPECT_EQ(vec.capacity(), 5);
+ EXPECT_TRUE(vec.empty());
+
+ vec.push_back(0);
+ EXPECT_FALSE(vec.empty());
+ EXPECT_EQ(vec.size(), 1);
+}
+
+// Modifier tests
+TEST(FixedVectorTest, PushBack) {
+ fixed_vector vec;
+ for (int i = 0; i < 5; ++i) {
+ EXPECT_NO_THROW(vec.push_back(i));
+ EXPECT_EQ(vec.back(), i);
+ }
+}
+
+TEST(FixedVectorTest, PopBack) {
+ fixed_vector vec;
+ vec.push_back(0);
+ EXPECT_EQ(vec.size(), 1);
+ EXPECT_NO_THROW(vec.pop_back());
+ EXPECT_EQ(vec.size(), 0);
+}
+
+TEST(FixedVectorTest, Clear) {
+ fixed_vector vec;
+ vec.push_back(0);
+ vec.push_back(1);
+ EXPECT_EQ(vec.size(), 2);
+ vec.clear();
+ EXPECT_TRUE(vec.empty());
+}
+
+TEST(FixedVectorTest, Insert) {
+ fixed_vector vec;
+ vec.push_back(1);
+ auto it = vec.insert(vec.begin(), 0);
+ EXPECT_EQ(it, vec.begin());
+ EXPECT_EQ(vec.size(), 2);
+ EXPECT_EQ(vec[0], 0);
+ EXPECT_EQ(vec[1], 1);
+}
+
+TEST(FixedVectorTest, Erase) {
+ fixed_vector vec;
+ vec.push_back(0);
+ vec.push_back(1);
+ EXPECT_EQ(vec[0], 0);
+ EXPECT_EQ(vec[1], 1);
+ auto it = vec.erase(vec.begin());
+ EXPECT_EQ(vec.size(), 1);
+ EXPECT_EQ(it, vec.begin());
+ EXPECT_EQ(vec[0], 1);
+}
+
+// Non-member function tests
+TEST(FixedVectorTest, ComparisonOperators) {
+ fixed_vector vec1;
+ fixed_vector vec2;
+
+ vec1.push_back(0);
+ vec2.push_back(0);
+
+ EXPECT_TRUE(vec1 == vec2);
+ EXPECT_FALSE(vec1 != vec2);
+}
+
+TEST(FixedVectorTest, Swap) {
+ fixed_vector vec1;
+ fixed_vector vec2;
+
+ vec1.push_back(0);
+ vec2.push_back(0);
+ vec2.push_back(1);
+
+ swap(vec1, vec2);
+ EXPECT_EQ(vec1.size(), 2);
+ EXPECT_EQ(vec2.size(), 1);
+}
+
+// Special test for string type
+TEST(FixedVectorTest, StringOperations) {
+ fixed_vector vec;
+ vec.push_back("hello");
+ vec.push_back("world");
+
+ EXPECT_EQ(vec[0], "hello");
+ EXPECT_EQ(vec[1], "world");
+ EXPECT_EQ(vec.size(), 2);
+}
+
+// Test for move semantics
+TEST(FixedVectorTest, MoveOperations) {
+ fixed_vector vec1;
+ vec1.push_back("hello");
+
+ fixed_vector vec2(std::move(vec1));
+ EXPECT_TRUE(vec1.empty());
+ EXPECT_EQ(vec2.size(), 1);
+ EXPECT_EQ(vec2[0], "hello");
+
+ fixed_vector vec3;
+ vec3 = std::move(vec2);
+ EXPECT_TRUE(vec2.empty());
+ EXPECT_EQ(vec3.size(), 1);
+ EXPECT_EQ(vec3[0], "hello");
+}
+
+// Test for range-based for loop
+TEST(FixedVectorTest, RangeBasedFor) {
+ fixed_vector vec;
+ vec.push_back(1);
+ vec.push_back(2);
+ vec.push_back(3);
+
+ int sum = 0;
+ for (const auto& x : vec) {
+ sum += x;
+ }
+ EXPECT_EQ(sum, 6);
+}
+
+// Test for reverse iterators
+TEST(FixedVectorTest, ReverseIterators) {
+ fixed_vector vec;
+ vec.push_back(1);
+ vec.push_back(2);
+ vec.push_back(3);
+
+ std::vector reversed;
+ for (auto it = vec.rbegin(); it != vec.rend(); ++it) {
+ reversed.push_back(*it);
+ }
+
+ EXPECT_EQ(reversed.size(), 3);
+ EXPECT_EQ(reversed[0], 3);
+ EXPECT_EQ(reversed[1], 2);
+ EXPECT_EQ(reversed[2], 1);
+}
diff --git a/test/src/main.cpp b/test/src/main.cpp
index 0a394de..4099c72 100644
--- a/test/src/main.cpp
+++ b/test/src/main.cpp
@@ -19,6 +19,9 @@
#include
#include
+// Google Test
+#include
+
//--------------------------------------------------------------------------------------------------
namespace {
@@ -224,6 +227,7 @@ struct expected_odrv {
const std::string& linkage_name() const { return (*this)["linkage_name"]; }
};
+#if 0
std::ostream& operator<<(std::ostream& s, const expected_odrv& x) {
// map is unordered, so we have to sort the keys...
std::vector keys;
@@ -236,32 +240,25 @@ std::ostream& operator<<(std::ostream& s, const expected_odrv& x) {
}
return s;
}
+#endif
//--------------------------------------------------------------------------------------------------
const char* to_string(toml::node_type x) {
+ // clang-format off
switch (x) {
- case toml::node_type::none:
- return "none";
- case toml::node_type::table:
- return "table";
- case toml::node_type::array:
- return "array";
- case toml::node_type::string:
- return "string";
- case toml::node_type::integer:
- return "integer";
- case toml::node_type::floating_point:
- return "floating_point";
- case toml::node_type::boolean:
- return "boolean";
- case toml::node_type::date:
- return "date";
- case toml::node_type::time:
- return "time";
- case toml::node_type::date_time:
- return "date_time";
+ case toml::node_type::none: return "none";
+ case toml::node_type::table: return "table";
+ case toml::node_type::array: return "array";
+ case toml::node_type::string: return "string";
+ case toml::node_type::integer: return "integer";
+ case toml::node_type::floating_point: return "floating_point";
+ case toml::node_type::boolean: return "boolean";
+ case toml::node_type::date: return "date";
+ case toml::node_type::time: return "time";
+ case toml::node_type::date_time: return "date_time";
}
+ // clang-format on
assert(false);
}
@@ -359,7 +356,7 @@ std::vector compile_compilation_units(const std::filesyst
std::vector object_files;
const bool preserve_object_files =
settings["orc_test_flags"]["preserve_object_files"].value_or(false);
- console() << "Compiling " << units.size() << " source file(s):\n";
+ // console() << "Compiling " << units.size() << " source file(s):\n";
for (auto& unit : units) {
auto temp_path = sanitize(object_file_path(home, unit));
if (preserve_object_files) {
@@ -380,8 +377,8 @@ std::vector compile_compilation_units(const std::filesyst
throw std::runtime_error("unexpected compilation failure");
}
object_files.emplace_back(std::move(temp_path));
- console() << " " << unit._src.filename() << " -> " << object_files.back().filename()
- << '\n';
+ // console() << " " << unit._src.filename() << " -> " << object_files.back().filename()
+ // << '\n';
}
return object_files;
}
@@ -439,7 +436,22 @@ bool odrv_report_match(const expected_odrv& odrv, const odrv_report& report) {
}
//--------------------------------------------------------------------------------------------------
-// return `false` if no error, or `true` on error.
+/**
+ * @brief Validates runtime metrics against expected values defined in settings
+ *
+ * This function compares various metrics collected during an ORC test pass
+ * against expected values specified in the TOML configuration. It reports
+ * any mismatches to the error console.
+ *
+ * @param settings The TOML configuration table containing expected metric values
+ *
+ * @pre The settings parameter should contain a "metrics" table with integer values
+ * for the metrics to be validated
+ * @pre The globals singleton should be initialized with the actual metrics
+ *
+ * @return true if any validation failures occurred (metrics didn't match expected values)
+ * @return false if all metrics matched or if no metrics table was found in settings
+ */
bool metrics_validation(const toml::table& settings) {
const toml::table* expected_ptr = settings["metrics"].as_table();
@@ -451,16 +463,13 @@ bool metrics_validation(const toml::table& settings) {
const globals& metrics = globals::instance();
bool failure = false;
- const auto compare_field = [&expected](const std::atomic_size_t& field, const char* key) -> bool {
+ const auto compare_field = [&expected](const std::atomic_size_t& field,
+ const char* key) -> bool {
const toml::value* file_count_ptr = expected[key].as_integer();
if (!file_count_ptr) return false;
int64_t expected = **file_count_ptr;
if (expected == field) return false;
- console_error() << key
- << " mismatch (expected "
- << expected
- << "; calculated "
- << field
+ console_error() << key << " mismatch (expected " << expected << "; calculated " << field
<< ")\n";
return true;
};
@@ -479,22 +488,55 @@ bool metrics_validation(const toml::table& settings) {
constexpr const char* tomlname_k = "odrv_test.toml";
//--------------------------------------------------------------------------------------------------
+/**
+ * @brief Test fixture for ORC tests
+ *
+ * This class represents a test fixture for running ORC tests on a specific test directory.
+ * It handles:
+ * - Loading and parsing the test configuration from a TOML file
+ * - Compiling source files if needed
+ * - Processing object files to detect ODR violations
+ * - Validating metrics and ODRV reports against expected values
+ */
+class orc_test_instance : public ::testing::Test {
+ std::filesystem::path _path;
-std::size_t run_battery_test(const std::filesystem::path& home) {
- static bool first_s = false;
+public:
+ explicit orc_test_instance(std::filesystem::path&& path) : _path(std::move(path)) {}
- if (!first_s) {
- console() << '\n';
- } else {
- first_s = false;
- }
+protected:
+ void SetUp() override { orc_reset(); }
- assume(is_directory(home), "\"" + home.string() + "\" is not a directory");
- std::filesystem::path tomlpath = home / tomlname_k;
- assume(is_regular_file(tomlpath), "\"" + tomlpath.string() + "\" is not a regular file");
- toml::table settings;
+ void TestBody() override;
+};
- console() << "-=-=- Test: " << home << "\n";
+//--------------------------------------------------------------------------------------------------
+/**
+ * @brief Implements the logic for an ORC test
+ *
+ * This method executes the main logic for an ORC test:
+ * 1. Validates and loads the test configuration from a TOML file
+ * 2. Compiles source files (if any are specified)
+ * 3. Collects object files for processing (if any are specified)
+ * 4. Processes all object files to detect ODR violations
+ * 5. Validates metrics against expected values
+ * 6. Validates detected ODR violations against expected violations
+ *
+ * The test will be skipped if the "disable" flag is set in the configuration.
+ *
+ * @pre The `_path` member must point to a valid directory containing a valid TOML configuration
+ * file
+ * @pre The TOML file must follow the expected format for ORC test configuration
+ * @post Test assertions are made to validate metrics and ODR violation reports
+ * @post If the test is disabled in configuration, it will be skipped
+ * @throws std::runtime_error If the TOML file cannot be parsed or other critical errors occur
+ */
+void orc_test_instance::TestBody() {
+ assume(std::filesystem::is_directory(_path), "\"" + _path.string() + "\" is not a directory");
+ std::filesystem::path tomlpath = _path / tomlname_k;
+ assume(std::filesystem::is_regular_file(tomlpath),
+ "\"" + tomlpath.string() + "\" is not a regular file");
+ toml::table settings;
try {
settings = toml::parse_file(tomlpath.string());
@@ -503,110 +545,88 @@ std::size_t run_battery_test(const std::filesystem::path& home) {
throw std::runtime_error("settings file parsing error");
}
- // Save this for debugging purposes.
- // console_error() << toml::json_formatter{settings} << '\n';
-
if (settings["orc_test_flags"]["disable"].value_or(false)) {
logging::notice("test disabled");
- return 0;
+ GTEST_SKIP() << "Test disabled in configuration";
+ return;
}
- auto test_name = home.stem().string();
+ auto compilation_units = derive_compilation_units(_path, settings);
std::vector object_files;
- auto compilation_units = derive_compilation_units(home, settings);
if (!compilation_units.empty()) {
- object_files = compile_compilation_units(home, settings, compilation_units);
+ object_files = compile_compilation_units(_path, settings, compilation_units);
}
- std::vector direct_object_files = derive_object_files(home, settings);
- object_files.insert(object_files.end(), std::move_iterator(direct_object_files.begin()), std::move_iterator(direct_object_files.end()));
-
- // we can have zero of these now, it's okay.
- auto expected_odrvs = derive_expected_odrvs(home, settings);
-
- orc_reset();
-
- // save for debugging.
- // settings::instance()._parallel_processing = false;
+ std::vector direct_object_files = derive_object_files(_path, settings);
+ object_files.insert(object_files.end(), std::make_move_iterator(direct_object_files.begin()),
+ std::make_move_iterator(direct_object_files.end()));
+ auto expected_odrvs = derive_expected_odrvs(_path, settings);
const std::vector reports = orc_process(std::move(object_files));
- const globals& metrics = globals::instance();
-
- console() << "ODRVs expected: " << expected_odrvs.size() << "; reported: " << reports.size()
- << '\n';
-
- toml::table result;
- result.insert("expected", static_cast(expected_odrvs.size()));
- result.insert("reported", static_cast(reports.size()));
-
- toml::table toml_metrics;
- toml_metrics.insert("object_file_count", static_cast(metrics._object_file_count));
- toml_metrics.insert("odrv_count", static_cast(metrics._odrv_count));
- toml_metrics.insert("unique_symbol_count", static_cast(metrics._unique_symbol_count));
- toml_metrics.insert("die_processed_count", static_cast(metrics._die_processed_count));
- toml_metrics.insert("die_skipped_count", static_cast(metrics._die_skipped_count));
- result.insert("metrics", std::move(toml_metrics));
- toml_out().insert(test_name, std::move(result));
-
- //
- // metrics validation
- //
+ // Validate metrics
bool metrics_failure = metrics_validation(settings);
-
- //
- // ODRV report validation
- //
- // At this point, the reports.size() should match the expected_odrvs.size()
- //
- bool unexpected_result = expected_odrvs.size() != reports.size();
-
- // If things are okay so far, make sure each ODRV reported is expected.
- if (!unexpected_result) {
- for (const auto& report : reports) {
- auto found =
- std::find_if(expected_odrvs.begin(), expected_odrvs.end(),
- [&](const auto& odrv) { return odrv_report_match(odrv, report); });
-
- if (found == expected_odrvs.end()) {
- unexpected_result = true;
- break;
- }
-
- console() << " Found expected ODRV: " << report.reporting_categories() << "\n";
- }
- }
-
- if (unexpected_result) {
- console_error() << "Reported ODRV(s):\n";
-
- // If there's an error in the test, dump what we've found to assist debugging.
- for (const auto& report : reports) {
- console() << report << '\n';
- }
-
- console_error() << "Expected ODRV(s):\n";
- std::size_t count{0};
- for (const auto& expected : expected_odrvs) {
- console() << ++count << ":\n" << expected << '\n';
- }
-
- console_error() << "\nIn battery " << home << ": ODRV count mismatch";
+ EXPECT_FALSE(metrics_failure) << "Metrics validation failed for " << _path;
+
+ // Validate ODRV reports
+ EXPECT_EQ(expected_odrvs.size(), reports.size()) << "ODRV count mismatch for " << _path;
+
+ // Check each reported ODRV against expected ones
+ for (const auto& report : reports) {
+ auto found =
+ std::find_if(expected_odrvs.begin(), expected_odrvs.end(),
+ [&](const auto& odrv) { return odrv_report_match(odrv, report); });
+ EXPECT_NE(found, expected_odrvs.end())
+ << "Unexpected ODRV found: " << report << " in " << _path;
}
-
- return metrics_failure + unexpected_result;
}
//--------------------------------------------------------------------------------------------------
+/**
+ * Creates a Google Test case for a single orc_test test.
+ *
+ * This function registers a new test case with Google Test framework using the
+ * directory name as the test name. Any hyphens in the directory name are
+ * replaced with underscores to conform to C++ identifier naming rules.
+ *
+ * @param home The filesystem path to the test battery directory
+ *
+ * @pre The path must exist and be a valid directory containing test files
+ * @post A new test case is registered with Google Test framework that will
+ * create an `orc_test_instance` with the provided path when executed
+ */
+void create_test(const std::filesystem::path& home) {
+ std::string test_name = home.stem().string();
+ std::replace(test_name.begin(), test_name.end(), '-', '_');
+
+ ::testing::RegisterTest(
+ "orc_test", test_name.c_str(), nullptr, nullptr, __FILE__, __LINE__,
+ [_home = home]() mutable -> ::testing::Test* { return new orc_test_instance(std::move(_home)); });
+}
+//--------------------------------------------------------------------------------------------------
+/**
+ * Recursively traverses a directory tree to find and register tests.
+ *
+ * This function walks through the provided directory and all its subdirectories,
+ * looking for directories that contain a TOML configuration file (indicated by
+ * tomlname_k). When such a directory is found, it's registered as a test.
+ *
+ * @param directory The filesystem path to start traversal from
+ * @return The number of errors encountered during traversal
+ *
+ * @pre The path must exist and be a valid directory
+ * @post All valid tests in the directory tree are registered with the
+ * testing framework via create_battery_test()
+ */
std::size_t traverse_directory_tree(const std::filesystem::path& directory) {
assert(is_directory(directory));
std::size_t errors = 0;
if (exists(directory / tomlname_k)) {
- errors += run_battery_test(directory);
+ create_test(directory);
}
for (const auto& entry : std::filesystem::directory_iterator(directory)) {
@@ -645,8 +665,17 @@ int main(int argc, char** argv) try {
test_settings()._json_mode = argc > 2 && std::string(argv[2]) == "--json_mode";
+ // Traverse the directory tree to find and register tests,
+ // adding them dynamically to the Google Test framework.
std::size_t errors = traverse_directory_tree(battery_path);
+ // Initialize and run Google Test
+ ::testing::InitGoogleTest(&argc, argv);
+ int gtest_result = RUN_ALL_TESTS();
+ if (gtest_result != 0) {
+ return gtest_result;
+ }
+
if (test_settings()._json_mode) {
cout_safe([&](auto& s) { s << toml::json_formatter{toml_out()} << '\n'; });
}