From b16dda144ecfb214f1e6c282f7b69ab33919fd8e Mon Sep 17 00:00:00 2001 From: sora_mono <849526320@qq.com> Date: Sat, 11 Oct 2025 05:02:00 +0800 Subject: [PATCH 1/4] feat: implement advanced multi-stage dev container architecture MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ๐Ÿš€ Major Features: โ€ข 5-stage Docker build system with parallel execution optimization โ€ข Python-based component architecture for dynamic dependency management โ€ข Independent cache namespaces for true parallel stage execution โ€ข Comprehensive test infrastructure with multi-configuration validation โšก Performance Optimizations: โ€ข Inter-stage parallelism: toolchain-builder & dependencies-downloader run concurrently โ€ข Intra-stage parallelism: aria2c multi-connection downloads, batch APT processing โ€ข Cache independence: separate cache IDs (toolchain-builder-*, dependencies-downloader-*, packed-image-*) โ€ข Build test matrix: automated testing across debug/release/optimized configurations ๐Ÿ› ๏ธ Technical Infrastructure: โ€ข Component-based build system with versioned directory structure โ€ข ParallelTaskScheduler with topological dependency resolution โ€ข Automated toolchain construction (glibc, gcc, llvm, linux headers) โ€ข Comprehensive release package creation with manifest generation ๐Ÿ“ฆ Container Architecture: โ€ข base-python-environment โ†’ toolchain-builder/dependencies-downloader โ†’ image-packer โ†’ packed-image โ†’ expanded-image โ€ข Dynamic Python version management via config files โ€ข Static-linked libstdc++ support for glibc compatibility โ€ข Professional test script with configuration matrix validation โœจ Developer Experience: โ€ข Multi-compiler support (gcc/clang) with version management โ€ข Automated environment setup and dependency resolution โ€ข Comprehensive documentation in both English and Chinese โ€ข Advanced caching strategies for offline capability This implements a production-ready development container system that maximizes build performance through parallel execution, provides comprehensive toolchain support, and delivers excellent developer experience with minimal setup. --- config/build_config.py | 442 +++++++++++ config/default-toolchain-version | 6 - config/default-toolchain-version.json | 10 + docker/linux/Dockerfile | 417 +++++------ docker/linux/build.sh | 273 ++++++- docker/linux/run.sh | 213 +++++- docker/linux/test_build.sh | 301 ++++++++ .../utility/build_clice_compiler_toolchain.py | 686 ++++++++++++++++++ docker/linux/utility/build_utils.py | 466 ++++++++++++ docker/linux/utility/common.sh | 0 .../linux/utility/create_release_package.py | 251 +++++++ docker/linux/utility/download_dependencies.py | 372 ++++++++++ docker/linux/utility/local_setup.py | 258 +++++++ docker/linux/utility/pyproject.toml | 8 + docs/en/dev/build.md | 179 ++++- docs/en/dev/dev-container-architecture.md | 610 ++++++++++++++++ docs/zh/dev/dev-container-architecture.md | 610 ++++++++++++++++ xmake.lua | 2 + 18 files changed, 4805 insertions(+), 299 deletions(-) create mode 100644 config/build_config.py delete mode 100644 config/default-toolchain-version create mode 100644 config/default-toolchain-version.json create mode 100644 docker/linux/test_build.sh create mode 100644 docker/linux/utility/build_clice_compiler_toolchain.py create mode 100644 docker/linux/utility/build_utils.py create mode 100644 docker/linux/utility/common.sh create mode 100644 docker/linux/utility/create_release_package.py create mode 100644 docker/linux/utility/download_dependencies.py create mode 100644 docker/linux/utility/local_setup.py create mode 100644 docker/linux/utility/pyproject.toml create mode 100644 docs/en/dev/dev-container-architecture.md create mode 100644 docs/zh/dev/dev-container-architecture.md diff --git a/config/build_config.py b/config/build_config.py new file mode 100644 index 00000000..34ea43fe --- /dev/null +++ b/config/build_config.py @@ -0,0 +1,442 @@ +# ======================================================================== +# ๐Ÿ”ง Clice Toolchain Build Configuration +# ======================================================================== +# File: config/build_config.py +# Purpose: Centralized configuration constants for the toolchain build system +# +# This module provides a comprehensive configuration framework for building +# the complete Clice compiler toolchain, including glibc, GCC, LLVM, and +# Linux kernel headers. +# ======================================================================== + +""" +๐Ÿš€ Clice Toolchain Build Configuration + +This module serves as the central configuration hub for the Clice toolchain +build process. It defines all necessary constants, paths, and component +configurations required to build a complete development environment. + +Key Features: + โ€ข Centralized toolchain component definitions + โ€ข Environment variable management + โ€ข Build dependency specifications + โ€ข Cross-platform compatibility settings + โ€ข Automated version management + +Supported Components: + โ€ข glibc (GNU C Library) + โ€ข GCC (GNU Compiler Collection) + โ€ข LLVM (Low Level Virtual Machine) + โ€ข Linux Kernel Headers + โ€ข CMake Build System + โ€ข XMake Build System +""" + +import json +import os +from typing import Any, List, Dict + +# ======================================================================== +# ๐ŸŒ Environment Variables and Core Paths +# ======================================================================== + +# Global environment variables that will be written to /root/.bashrc +# and utilized by the run_command execution framework +DEVELOPMENT_SHELL_VARS: Dict[str, str] = { + "PATH": "/root/.local/bin:${PATH}", + "XMAKE_ROOT": "y" # Enable XMake auto-updating and self-management system +} + +# Specialized environment variables for toolchain build processes +# These variables control linking behavior and runtime path resolution +TOOLCHAIN_BUILD_ENV_VARS: Dict[str, str] = { + "ORIGIN": "$ORIGIN" # Enable relative rpath for portable binary distribution +} + +# Core project structure definitions +PROJECT_ROOT: str = "/clice" # Root directory of the Clice project +PYPROJECT_PATH: str = os.path.join(PROJECT_ROOT, "pyproject.toml") # Python project configuration file +TOOLCHAIN_BUILD_ROOT: str = "/toolchain-build" # Root directory for all toolchain builds +TOOLCHAIN_CONFIG_PATH: str = os.path.join(PROJECT_ROOT, "config/default-toolchain-version.json") # Version definitions + +# ======================================================================== +# ๐Ÿ“ฆ Release Package Configuration (Cross-Stage Variables) +# ======================================================================== + +# These variables are designed to be passed across Docker build stages +# RELEASE_PACKAGE_DIR is the main package directory, PACKED_RELEASE_PACKAGE_PATH is the compressed archive +RELEASE_PACKAGE_DIR: str = os.getenv("RELEASE_PACKAGE_DIR", "") +PACKED_RELEASE_PACKAGE_PATH: str = os.getenv("PACKED_RELEASE_PACKAGE_PATH", "") +ENVIRONMENT_CONFIG_FILE: str = os.getenv("ENVIRONMENT_CONFIG_FILE", "") + +# Source code cache directory for toolchain build +CACHE_DIR_ROOT: str = os.getenv("CACHE_DIR_ROOT", "") + +WORKDIR_ROOT: str = "/dev-container-build" # Temporary work directory for builds (not persistent) + +# ======================================================================== +# ๐Ÿ—๏ธ Dynamic Configuration Loading +# ======================================================================== + +# Dynamic version loading from configuration file +# This enables centralized version management across all build components +TOOLCHAIN_VERSIONS: Dict[str, Any] = {} +with open(TOOLCHAIN_CONFIG_PATH, "r") as f: + TOOLCHAIN_VERSIONS = json.load(f) + +# ======================================================================== +# ๐Ÿ” GPG Verification Configuration +# ======================================================================== + +# Trusted GPG key servers for source code verification +# Multiple servers provide redundancy for package signature validation +GPG_KEY_SERVER: List[str] = [ + "keys.openpgp.org", # Primary OpenPGP keyserver + "keyserver.ubuntu.com" # Ubuntu's reliable keyserver mirror +] + +# ======================================================================== +# ๐Ÿงฉ Component Architecture Definitions +# ======================================================================== + +class Component: + """ + ๐Ÿ”ง Base Component Configuration Class + + Provides a unified interface for all build components including APT packages, + development tools, and toolchain components. Each component manages its own + directory structure and download configuration. + + Attributes: + name: Component identifier (unique across all components) + version: Component version from TOOLCHAIN_VERSIONS (None for versionless components) + package_dir: Directory for final packaged files in RELEASE_PACKAGE_DIR + cache_dir: Directory for persistent caches (downloads, APT packages, etc.) + work_dir: Directory for temporary build files (not persistent) + base_url: Base URL pattern for downloads (class-level, optional) + tarball_name_pattern: Tarball filename pattern (class-level, optional) + verification_name_pattern: Verification filename pattern (class-level, optional) + download_prerequisites: Tools required for downloading source code (class-level) + extract_prerequisites: Tools required for extracting archives (class-level) + """ + + # Class-level URL patterns (overridden by subclasses) + base_url: str = "" + tarball_name_pattern: str = "" + verification_name_pattern: str = "" + + # Class-level prerequisites configuration (overridden by subclasses) + download_prerequisites: List[str] = [ + "aria2", # High-speed multi-connection downloader + "gnupg", # GPG signature verification system + "git", # Required for git clone llvm + ] + + extract_prerequisites: List[str] = [ + "bzip2", # Required for .tar.bz2 archives (GCC prerequisites) + "xz-utils", # Required for extracting .xz archives (toolchain sources) + ] + + # Where the component will be deployed + host_system: str = "Linux" + host_machine: str = "x86_64" + + # Where the constructed output (like clice binary) runs on + target_system: str = host_system + target_machine: str = host_machine + + def __init__(self, name: str, version: str = "unknown"): + self.name = name + self.version = version + + # Directory structure generation based on name and version + self.package_dir = os.path.join(RELEASE_PACKAGE_DIR, self.versioned_name) + self.cache_dir = os.path.join(CACHE_DIR_ROOT, self.versioned_name) + self.work_dir = os.path.join(WORKDIR_ROOT, self.versioned_name) + + @property + def versioned_name(self) -> str: + """Generate folder name from component name and version.""" + return f"{self.name}-{self.version}" + + @property + def tarball_name(self) -> str: + """Complete tarball filename.""" + if not self.tarball_name_pattern: + raise ValueError(f"Component '{self.name}' missing required tarball_name_pattern") + + return self.tarball_name_pattern.format(version=self.version, system=self.host_system, machine=self.host_machine) + + @property + def tarball_url(self) -> str: + """Complete download URL (requires base_url and tarball_name_pattern).""" + if not self.base_url: + raise ValueError(f"Component '{self.name}' missing required base_url") + if not self.tarball_name_pattern: + raise ValueError(f"Component '{self.name}' missing required tarball_name_pattern") + + formatted_base_url = self.base_url.format(version=self.version, system=self.host_system, machine=self.host_machine) + return f"{formatted_base_url}/{self.tarball_name}" + + @property + def verification_name(self) -> str: + """Complete verification filename.""" + if not self.verification_name_pattern: + raise ValueError(f"Component '{self.name}' missing required verification_name_pattern") + + return self.verification_name_pattern.format(version=self.version, system=self.host_system, machine=self.host_machine) + + @property + def verification_url(self) -> str: + """Verification file download URL.""" + formatted_base_url = self.base_url.format(version=self.version, system=self.host_system, machine=self.host_machine) + return f"{formatted_base_url}/{self.verification_name}" + + @property + def host_triplet(self) -> str: + """Host system triplet (e.g., x86_64-linux-gnu).""" + return f"{self.host_machine}-{self.host_system}-gnu" + + @property + def target_triplet(self) -> str: + """Target system triplet (e.g., x86_64-linux-gnu).""" + return f"{self.target_machine}-{self.target_system}-gnu" + + +class ToolchainSubComponent(Component): + """ + ๐Ÿ”ง Toolchain Sub-Component Configuration Class + + Specialized component for toolchain elements (glibc, gcc, llvm, linux). + Creates subdirectories under the main toolchain component structure. + + Additional Attributes: + parent_component: Reference to parent toolchain component + extracted_dir: Directory for extracted source codwe + src_dir: Version-specific source directory + build_dir: Out-of-tree build directory + """ + + def __init__(self, name: str, parent_component: Component): + version = TOOLCHAIN_VERSIONS[name] + super().__init__(name, version) + self.parent_component = parent_component + + # Override directory structure to be under parent toolchain + self.package_dir = os.path.join(self.parent_component.package_dir, self.versioned_name) + self.cache_dir = os.path.join(self.parent_component.cache_dir, self.versioned_name) + self.work_dir = os.path.join(self.parent_component.work_dir, self.versioned_name) + + @property + def extracted_dir(self) -> str: + """Directory for extracted source code.""" + return self.src_dir + + @property + def src_dir(self) -> str: + """Version-specific source directory.""" + return os.path.join(self.work_dir, self.versioned_name) + + @property + def build_dir(self) -> str: + """Out-of-tree build directory.""" + return os.path.join(self.src_dir, "build") + +# ======================================================================== +# ๐Ÿ”ง Concrete Component Classes +# ======================================================================== + +class APTComponent(Component): + """APT packages management component (versionless).""" + + # APT-specific prerequisites with specialization + download_prerequisites: List[str] = Component.download_prerequisites + [ + "apt-rdepends", # Advanced dependency resolution and analysis tool + ] + + def __init__(self): + super().__init__("apt") + + @property + def basic_tools(self) -> List[str]: + """Basic tools from Dockerfile basic-tools stage.""" + return [ + "git", # For xmake initialize + ] + + @property + def all_packages(self) -> List[str]: + """Complete package list for development container.""" + return self.basic_tools + + +class UVComponent(Component): + """UV Python package manager component (versionless).""" + + def __init__(self): + super().__init__("uv") + + +class XMakeComponent(Component): + """XMake build system component.""" + + base_url = "https://github.com/xmake-io/xmake/releases/download/v{version}" + tarball_name_pattern = "xmake-bundle-v{version}.{system}.{machine}" + + def __init__(self): + version = TOOLCHAIN_VERSIONS["xmake"] + super().__init__("xmake", version) + +class CMakeComponent(Component): + """CMake build system component.""" + + base_url = "https://github.com/Kitware/CMake/releases/download/v{version}" + tarball_name_pattern = "cmake-{version}-linux-x86_64.sh" + verification_name_pattern = "cmake-{version}-SHA-256.txt" + + def __init__(self): + version = TOOLCHAIN_VERSIONS["cmake"] + super().__init__("cmake", version) + +class ToolchainComponent(Component): + """Main toolchain component containing glibc, gcc, llvm, linux sub-components.""" + + def __init__(self): + super().__init__("toolchain") + + # Create sub-components + self.glibc: GlibcSubComponent = GlibcSubComponent(self) + self.gcc: GccSubComponent = GccSubComponent(self) + self.llvm: LlvmSubComponent = LlvmSubComponent(self) + self.linux: LinuxSubComponent = LinuxSubComponent(self) + + # Sub-component registry + self.sub_components: list[ToolchainSubComponent] = [ + self.glibc, + self.gcc, + self.llvm, + self.linux, + ] + + @property + def sysroot_dir(self) -> str: + """Sysroot directory with version-specific naming.""" + return f"{self.package_dir}/sysroot/{self.host_triplet}/{self.target_triplet}/glibc{self.glibc.version}-libstdc++{self.gcc.version}-linux{self.linux.version}" + + @property + def build_prerequisites(self) -> List[str]: + """Comprehensive build environment for toolchain compilation.""" + return [ + # Core build tools + "make", # GNU Make build automation + "rsync", # File synchronization (Linux kernel headers) + "gawk", # GNU AWK text processing (glibc requirement) + "bison", # Parser generator (glibc requirement) + "binutils", # Binary utilities (assembler, linker, etc.) + "file", # File type identification (libcc1 requires this tool) + + # GCC toolchain for glibc (requires GCC < 10 to avoid linker conflicts) + "gcc-9", # GNU C compiler version 9 + + # Modern GCC toolchain for libstdc++ building + "gcc-14", # Latest GNU C compiler + "g++-14", # Latest GNU C++ compiler + "libstdc++-14-dev", # Latest C++ standard library development files + ] + + +# ======================================================================== +# ๐Ÿงฉ Toolchain Sub-Component Classes +# ======================================================================== + +class GlibcSubComponent(ToolchainSubComponent): + """GNU C Library sub-component.""" + + base_url = "https://ftpmirror.gnu.org/gnu/glibc" + tarball_name_pattern = "glibc-{version}.tar.xz" + verification_name_pattern = "glibc-{version}.tar.xz.sig" + + def __init__(self, parent_component: ToolchainComponent): + super().__init__("glibc", parent_component) + + +class GccSubComponent(ToolchainSubComponent): + """GNU Compiler Collection sub-component.""" + + base_url = "https://ftpmirror.gnu.org/gnu/gcc/gcc-{version}" + tarball_name_pattern = "gcc-{version}.tar.xz" + verification_name_pattern = "gcc-{version}.tar.xz.sig" + + def __init__(self, parent_component: ToolchainComponent): + super().__init__("gcc", parent_component) + + @property + def target_libs(self) -> List[str]: + """Selective GCC library build targets.""" + return [ + "libgcc", # Low-level runtime support library (exception handling, etc.) + "libstdc++-v3", # C++ standard library implementation + "libsanitizer", # Address/memory/thread sanitizer runtime libraries + "libatomic", # Atomic operations library for lock-free programming + "libbacktrace", # Stack backtrace support for debugging + "libgomp", # OpenMP parallel programming runtime + "libquadmath" # Quadruple precision math library + ] + + +class LlvmSubComponent(ToolchainSubComponent): + """LLVM Project sub-component.""" + + base_url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-{version}" + tarball_name_pattern = "llvm-project-{version}.src.tar.xz" + verification_name_pattern = "llvm-project-{version}.src.tar.xz.sig" + + def __init__(self, parent_component: ToolchainComponent): + super().__init__("llvm", parent_component) + + +class LinuxSubComponent(ToolchainSubComponent): + """Linux Kernel Headers sub-component.""" + + base_url = "https://github.com/torvalds/linux/archive/refs/tags" + tarball_name_pattern = "v{version}.tar.gz" + verification_name_pattern = "" # Linux kernel releases don't include separate signature files + + def __init__(self, parent_component: ToolchainComponent): + super().__init__("linux", parent_component) + +# ======================================================================== +# ๐Ÿ—๏ธ Component Instances and Build Stage Organization +# ======================================================================== + +# Create component instances +APT = APTComponent() +UV = UVComponent() +XMAKE = XMakeComponent() +CMAKE = CMakeComponent() +TOOLCHAIN = ToolchainComponent() + +# ======================================================================== +# ๐Ÿ“‹ Build Stage Component Groups +# ======================================================================== + +# Dependencies downloader stage components +DEPENDENCIES_DOWNLOADER_STAGE: list[Component] = [ + APT, + UV, + CMAKE, + XMAKE, +] + +# Toolchain builder stage components +TOOLCHAIN_BUILDER_STAGE: list[Component] = [ + TOOLCHAIN, +] + +# Master component registry +ALL_COMPONENTS = [ + *DEPENDENCIES_DOWNLOADER_STAGE, + *TOOLCHAIN_BUILDER_STAGE, +] + + diff --git a/config/default-toolchain-version b/config/default-toolchain-version deleted file mode 100644 index 8a2b780e..00000000 --- a/config/default-toolchain-version +++ /dev/null @@ -1,6 +0,0 @@ -xmake,3.0.2 -cmake,3.31.8 -python,3.13 -gcc,14 -clang,20 -msvc diff --git a/config/default-toolchain-version.json b/config/default-toolchain-version.json new file mode 100644 index 00000000..6c88a5b1 --- /dev/null +++ b/config/default-toolchain-version.json @@ -0,0 +1,10 @@ +{ + "xmake": "3.0.2", + "cmake": "3.31.8", + "python": "3.13", + "gcc": "14.3.0", + "llvm": "20.1.8", + "glibc": "2.39", + "linux": "6.17", + "msvc": null +} diff --git a/docker/linux/Dockerfile b/docker/linux/Dockerfile index 1226ba68..86e0d7ff 100644 --- a/docker/linux/Dockerfile +++ b/docker/linux/Dockerfile @@ -1,264 +1,205 @@ -# build with multi-stage for cache efficiency -FROM ubuntu:24.04 AS basic-tools - -# allow build script to bind-mount project source into build container (host path) -ARG BUILD_SRC - -# set non-interactive frontend to avoid prompts -ENV DEBIAN_FRONTEND=noninteractive -# ensure user-local bin is on PATH for non-apt installs (xmake, uv, python) -ENV PATH="/root/.local/bin:${PATH}" - -# install basic tools -RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ - --mount=type=cache,target=/var/lib/apt,sharing=locked \ - # TODO: support more cache for python, xmake installation - # TODO: check why cache doesn't work after add-apt-repository, may we change it to cache? -bash -eux - <<'BASH' - - set -e - apt update - # first install minimal apt prerequisites - # software-properties-common for add-apt-repository - # gnupg for gpg to verify cmake installer - # curl, git for downloading sources - # xz-utils, unzip for extracting archives - # make for xmake installation - apt install -y --no-install-recommends \ - software-properties-common \ - curl \ - gnupg \ - git \ - xz-utils \ - unzip \ - make - - # gcc, llvm PPA - add-apt-repository -y ppa:ubuntu-toolchain-r/ppa - apt update -BASH +# ======================================================================== +# ๐Ÿš€ Clice Dev Container Multi-Stage Build System +# ======================================================================== +# File: docker/linux/Dockerfile +# Purpose: Optimized multi-stage build for Clice development container +# +# This Dockerfile implements Python-first build approach with minimal +# system dependencies, letting Python scripts handle dependency installation. +# ======================================================================== + +# Arguments passed from docker image build system +ARG COMPILER +ARG PACKED_IMAGE_NAME -# Compiler stage -FROM basic-tools AS compiler-stage +# Global config shared in multi-stage builds +ARG RELEASE_PACKAGE_DIR="/clice-dev-container-package" +ARG PACKED_RELEASE_PACKAGE_PATH="/release-pkg.tar.xz" +ARG ENVIRONMENT_CONFIG_FILE="/root/.bashrc" +ARG CACHE_DIR_ROOT="/var/cache/clice-dev-container" -# passed from build arg -ARG COMPILER +# APT system paths configuration +ARG APT_CACHE_DIR="/var/cache/apt" +ARG APT_STATE_CACHE_DIR="/var/lib/apt" -# copy instead of bind-mount, to avoid docker build cache invalidation -COPY config/default-toolchain-version /clice/config/default-toolchain-version - -RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ - --mount=type=cache,target=/var/lib/apt,sharing=locked \ - bash -eux - <<'BASH' - set -e - - # Always install libstdc++ development files, required for both gcc and clang to link against libstdc++ - GCC_VERSION=$(grep -E '^gcc,' /clice/config/default-toolchain-version | cut -d',' -f2) - apt install -y --no-install-recommends "libstdc++-${GCC_VERSION}-dev" - - if [ "$COMPILER" = "gcc" ]; then - apt install -y --no-install-recommends "gcc-${GCC_VERSION}" "g++-${GCC_VERSION}" - update-alternatives --install /usr/bin/cc cc "/usr/bin/gcc-${GCC_VERSION}" 100 - update-alternatives --install /usr/bin/gcc gcc "/usr/bin/gcc-${GCC_VERSION}" 100 - update-alternatives --install /usr/bin/c++ c++ "/usr/bin/g++-${GCC_VERSION}" 100 - update-alternatives --install /usr/bin/g++ g++ "/usr/bin/g++-${GCC_VERSION}" 100 - elif [ "$COMPILER" = "clang" ]; then - CLANG_VERSION=$(grep -E '^clang,' /clice/config/default-toolchain-version | cut -d',' -f2) - # install clang toolchain, libstdc++ is already installed - apt install -y --no-install-recommends "clang-${CLANG_VERSION}" "clang-tools-${CLANG_VERSION}" "lld-${CLANG_VERSION}" "libclang-rt-${CLANG_VERSION}-dev" - update-alternatives --install /usr/bin/clang clang "/usr/bin/clang-${CLANG_VERSION}" 100 - update-alternatives --install /usr/bin/clang++ clang++ "/usr/bin/clang++-${CLANG_VERSION}" 100 - update-alternatives --install /usr/bin/c++ c++ "/usr/bin/clang++-${CLANG_VERSION}" 100 - update-alternatives --install /usr/bin/cc cc "/usr/bin/clang-${CLANG_VERSION}" 100 - update-alternatives --install /usr/bin/ld ld "/usr/bin/lld-${CLANG_VERSION}" 100 - else - echo "Error: Unsupported compiler '$COMPILER'. Use 'gcc' or 'clang'." >&2; exit 1 - fi -BASH - -FROM compiler-stage AS build-tool-stage - -ARG XMAKE_CACHE_DIR="/docker-build-cache/xmake" -ARG CMAKE_CACHE_DIR="/docker-build-cache/cmake" +# UV cache configuration ARG UV_CACHE_DIR="/var/cache/uv" -ENV XMAKE_CACHE_DIR=${XMAKE_CACHE_DIR} -ENV CMAKE_CACHE_DIR=${CMAKE_CACHE_DIR} +# ======================================================================== +# ๐Ÿ Base Stage: Python Environment Foundation +# ======================================================================== +FROM ubuntu:24.04 AS base-python-environment +LABEL description="Base image with consistent Python and uv environment for all stages" + +# Environment setup +ENV PATH="/root/.local/bin:${PATH}" ENV UV_CACHE_DIR=${UV_CACHE_DIR} -COPY ./pyproject.toml /clice/pyproject.toml +COPY config /clice/config +COPY docker/linux /clice/docker/linux -RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \ - --mount=type=cache,target=/var/lib/apt,sharing=locked \ - --mount=type=cache,target=${XMAKE_CACHE_DIR},sharing=locked \ - --mount=type=cache,target=${CMAKE_CACHE_DIR},sharing=locked \ +# Install minimal system dependencies and uv +RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked \ + --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked \ --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked \ -bash -eux - <<'BASH' - -install_xmake() { - set -e - - XMAKE_VERSION=$(grep -E '^xmake,' /clice/config/default-toolchain-version | cut -d',' -f2) - XMAKE_BASE_URL="https://github.com/xmake-io/xmake/releases/download/v${XMAKE_VERSION}" - XMAKE_FILENAME="xmake-bundle-v${XMAKE_VERSION}.linux.x86_64" - XMAKE_CACHED_FILE="${XMAKE_CACHE_DIR}/${XMAKE_FILENAME}" - - if [ ! -f "${XMAKE_CACHED_FILE}" ] ; then - rm -f "${XMAKE_CACHE_DIR}/*" - curl -fsSL --retry 3 -o "${XMAKE_CACHED_FILE}" "${XMAKE_BASE_URL}/${XMAKE_FILENAME}" - fi - - XMAKE_INSTALL_DIR="/usr/bin" - XMAKE_INSTALLED_FILE="${XMAKE_INSTALL_DIR}/${XMAKE_FILENAME}" - - cp "${XMAKE_CACHED_FILE}" "${XMAKE_INSTALLED_FILE}" - chmod +x "${XMAKE_INSTALLED_FILE}" - - update-alternatives --install /usr/bin/xmake xmake "${XMAKE_INSTALLED_FILE}" 100 - - echo "export XMAKE_ROOT=y" >> ~/.bashrc -} - -# Attention: DO NOT install cmake via PPA with apt, which would have to install required build-essential compiler tool chain -# We SHOULD NOT install another compiler toolchain, which could cause a lot trouble -# And we should not install compiler toolchain away from compiler stage -# So we install cmake from official installer script, and cache the downloaded files -install_cmake() { - set -e - - # cached downloads live under /docker-build-cache/cmake (BuildKit cache mount) - CMAKE_VERSION=$(grep -E '^cmake,' /clice/config/default-toolchain-version | cut -d',' -f2) - ARCH="x86_64" - - BASE_URL="https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}" - INSTALLER_FILENAME="cmake-${CMAKE_VERSION}-linux-${ARCH}.sh" - SHA_FILENAME="cmake-${CMAKE_VERSION}-SHA-256.txt" - ASC_FILENAME="${SHA_FILENAME}.asc" - - INSTALLER_PATH="${CMAKE_CACHE_DIR}/${INSTALLER_FILENAME}" - SHA_PATH="${CMAKE_CACHE_DIR}/${SHA_FILENAME}" - ASC_PATH="${CMAKE_CACHE_DIR}/${ASC_FILENAME}" - - verify_cmake_installer() { - if ! gpg --verify "${ASC_PATH}" "${SHA_PATH}"; then - echo "Signature verification failed for ${SHA_FILENAME}." >&2 - return 1 - fi - - local expected_hash - expected_hash=$(grep "${INSTALLER_FILENAME}" "${SHA_PATH}" | awk '{print $1}') - - local actual_hash - actual_hash=$(sha256sum "${INSTALLER_PATH}" | awk '{print $1}') - if [ "${expected_hash}" != "${actual_hash}" ]; then - echo "Checksum mismatch for ${INSTALLER_FILENAME}." >&2 - return 1 - fi - - echo "Checksum for ${INSTALLER_FILENAME} is valid." - return 0 - } - - gpg --keyserver keys.openpgp.org --recv-keys C6C265324BBEBDC350B513D02D2CEF1034921684 - - if [ ! -f "${INSTALLER_PATH}" ] || ! verify_cmake_installer; then - rm -f "${CMAKE_CACHE_DIR}/*" - - curl -fsSL --retry 3 -o "${INSTALLER_PATH}" "${BASE_URL}/${INSTALLER_FILENAME}" - curl -fsSL --retry 3 -o "${SHA_PATH}" "${BASE_URL}/${SHA_FILENAME}" - curl -fsSL --retry 3 -o "${ASC_PATH}" "${BASE_URL}/${ASC_FILENAME}" - - if ! verify_cmake_installer; then - echo "Verification of the downloaded installer failed. Cleaning cache." >&2 - rm -f "${CMAKE_CACHE_DIR}/*" - exit 1 - fi - fi - - sh "${INSTALLER_PATH}" --skip-license --prefix=/usr/local -} - -install_python() { - set -e - PYTHON_VERSION=$(grep -E '^python,' /clice/config/default-toolchain-version | cut -d',' -f2) + bash -eux - <<'SCRIPT' + apt update + apt install -y --no-install-recommends curl jq ca-certificates + + # Install uv for Python management curl -LsSf https://astral.sh/uv/install.sh | sh - uv python install "${PYTHON_VERSION}" - uv sync -} - -do_install() { - set -e - - cd /clice - - export PATH="/root/.local/bin:${PATH}" - echo "export XMAKE_ROOT=y" >> ~/.bashrc - install_cmake & - install_xmake & - install_python & + # Get Python version from configuration + PYTHON_VERSION=$(jq -r .python /clice/config/default-toolchain-version.json) + echo "Installing Python version: $PYTHON_VERSION" + + # Install specified Python version + uv python install "$PYTHON_VERSION" +SCRIPT - for job in $(jobs -p); do - wait $job || exit 1 - done -} - -do_install - -BASH - -# download compile dependencies -FROM build-tool-stage AS dependency-cache-stage - -# passed from build arg -# "lto" or "non_lto" -ARG BUILD_SRC -# ARG LTO_TYPE="" - -RUN --mount=type=bind,src=./,target=/clice,rw \ -bash -eux - <<'BASH' - -# cache_xmake_packages() { -# set -e - -# export PATH="/root/.local/bin:${PATH}" -# export XMAKE_ROOT=y +WORKDIR /clice -# LTO_FLAG="" -# if [ "$LTO_TYPE" = "lto" ]; then -# LTO_FLAG="--release" -# fi +# ======================================================================== +# ๐Ÿ—๏ธ Stage 1: Compiler Toolchain Builder +# ======================================================================== +FROM base-python-environment AS toolchain-builder +LABEL description="Builds custom compiler toolchain with static libstdc++ for glibc compatibility" -# xmake f -y -v --mode=release ${LTO_FLAG} -# xmake f -y -v --mode=debug ${LTO_FLAG} -# } -do_prepare_dependency() { - set -e +# For build_config.py +ENV CACHE_DIR_ROOT=${CACHE_DIR_ROOT} +ENV RELEASE_PACKAGE_DIR=${RELEASE_PACKAGE_DIR} - cd /clice +# uv auto reads from this env variable +ENV UV_CACHE_DIR=${UV_CACHE_DIR} - # cache_xmake_packages & +ENV DEBIAN_FRONTEND=noninteractive - for job in $(jobs -p); do - wait $job || exit 1 - done -} +# Copy additional project structure (config already copied in base) +COPY docker/linux /clice/docker/linux + +# Build the custom toolchain (Python script handles all dependencies) +RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=toolchain-builder-apt \ + --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=toolchain-builder-apt-state \ + --mount=type=cache,target=${CACHE_DIR_ROOT},sharing=locked,id=toolchain-builder-cache \ + --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=toolchain-builder-uv \ + bash -eux - <<'SCRIPT' + # Setup Python project environment + uv sync --project /clice/docker/linux/utility/pyproject.toml + + # Activate Python environment and build toolchain + source /clice/docker/linux/utility/.venv/bin/activate + python docker/linux/utility/build_clice_compiler_toolchain.py +SCRIPT + +# ======================================================================== +# ๐Ÿ—๏ธ Stage 2: Dependencies Downloader (Parallel to Stage 1) +# ======================================================================== +FROM base-python-environment AS dependencies-downloader +LABEL description="Downloads dev-container dependencies for cache optimization" + +ENV CACHE_DIR_ROOT=${CACHE_DIR_ROOT} +ENV RELEASE_PACKAGE_DIR=${RELEASE_PACKAGE_DIR} + +# uv auto reads from this env variable +ENV UV_CACHE_DIR=${UV_CACHE_DIR} -do_prepare_dependency +ENV DEBIAN_FRONTEND=noninteractive -BASH +# Copy additional project structure (config already copied in base) +COPY docker /clice/docker + +# Setup Python project environment and download dependencies +RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=dependencies-downloader-apt \ + --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=dependencies-downloader-apt-state \ + --mount=type=cache,target=${CACHE_DIR_ROOT},sharing=locked,id=dependencies-downloader-cache \ + --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=dependencies-downloader-uv \ + bash -eux - <<'SCRIPT' + # Setup Python environment + uv sync --project /clice/docker/linux/utility/pyproject.toml + + # Download dependencies + source /clice/docker/linux/utility/.venv/bin/activate + python docker/linux/utility/download_dependencies.py +SCRIPT + +# ======================================================================== +# ๐Ÿ—๏ธ Stage 3: Release Package Creator +# ======================================================================== +FROM base-python-environment AS image-packer +LABEL description="Merges toolchain and dependencies into final release package" + +# For build_config.py +ENV RELEASE_PACKAGE_DIR=${RELEASE_PACKAGE_DIR} +ENV PACKED_RELEASE_PACKAGE_PATH=${PACKED_RELEASE_PACKAGE_PATH} +ENV ENVIRONMENT_CONFIG_FILE=${ENVIRONMENT_CONFIG_FILE} + +# uv auto reads from this env variable +ENV UV_CACHE_DIR=${UV_CACHE_DIR} -FROM dependency-cache-stage AS final +ENV DEBIAN_FRONTEND=noninteractive -RUN bash -eux - <<'BASH' -set -e - # clice is mounted here, so remove everything to reduce image size +# Copy outputs from previous stages +# Merge by RELEASE_PACKAGE_DIR structure, each component has its own directory +# No need to manually copy individual files +COPY --from=toolchain-builder ${RELEASE_PACKAGE_DIR} ${RELEASE_PACKAGE_DIR} +COPY --from=dependencies-downloader ${RELEASE_PACKAGE_DIR} ${RELEASE_PACKAGE_DIR} + +# Setup Python project environment and create final release package +RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=packed-image-apt \ + --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=packed-image-apt-state \ + --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=packed-image-uv \ + bash -eux - <<'SCRIPT' + # Setup Python environment + uv sync --project /clice/docker/linux/utility/pyproject.toml + + # Create final release package by merging stage outputs + source /clice/docker/linux/utility/.venv/bin/activate + python docker/linux/utility/create_release_package.py +SCRIPT + +# ======================================================================== +# ๐Ÿ—๏ธ Stage 4: Release Package Creator +# ======================================================================== +FROM base-python-environment AS packed-image + +# Copy project configuration to determine Python version +# these two COPYs must be scheduled before expand image +# so the config and scripts could keep the same with docker image build environment +COPY config /clice/config +COPY docker/linux /clice/docker/linux + +COPY --from=image-packer ${PACKED_RELEASE_PACKAGE_PATH} ${PACKED_RELEASE_PACKAGE_PATH} +COPY --from=image-packer ${ENVIRONMENT_CONFIG_FILE} ${ENVIRONMENT_CONFIG_FILE} + +# ======================================================================== +# ๐Ÿ—๏ธ Stage 5: Development Image (Expanded) +# ======================================================================== +FROM ${PACKED_IMAGE_NAME} AS expanded-image +LABEL description="Fully expanded development image with all tools installed" + +# We should NOT copy project structure for local setup +# Local config and other scripts may be different from pack environment +# The two COPYs below are copyed from pack environment, not from local +# /clice/config +# /clice/docker/linux + +# Expand the release image into a full development environment +# We don't mark here with --mount=type=cache because this is executed on clice developer environment +# clice developer do not have the cache from previous stages +RUN bash -eux - <<'SCRIPT' + + # Setup Python project environment + uv sync --project /clice/pyproject.toml + + # Run local setup to expand everything + source /clice/docker/linux/utility/.venv/bin/activate + python docker/linux/utility/local_setup.py + + # cleanup project structure to reduce image size + # User could refer to ${RELEASE_PACKAGE_DIR}/manifest.json if needed rm -rf /clice - - # disable git exception in cmake build when Fetch-Content - git config --global --add safe.directory '*' -BASH +SCRIPT WORKDIR /clice diff --git a/docker/linux/build.sh b/docker/linux/build.sh index b47ecc69..71454ec0 100644 --- a/docker/linux/build.sh +++ b/docker/linux/build.sh @@ -1,54 +1,285 @@ #!/bin/bash +# ======================================================================== +# ๐Ÿš€ Clice Development Container Builder +# ======================================================================== +# File: docker/linux/build.sh +# Purpose: Build Clice development container with all tools and dependencies +# +# This script builds a unified development container containing all necessary +# components for the Clice development environment. The container is ready +# to use immediately with all tools pre-installed and configured. +# ======================================================================== + set -e +# ======================================================================== +# ๐Ÿ”ง Environment Setup +# ======================================================================== + # Save original working directory and switch to project root ORIG_PWD="$(pwd)" SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -echo "${SCRIPT_DIR}" cd "${SCRIPT_DIR}/../.." PROJECT_ROOT="$(pwd)" trap 'cd "${ORIG_PWD}"' EXIT -# default configurations +# ======================================================================== +# โš™๏ธ Default Configuration +# ======================================================================== + COMPILER="clang" DOCKERFILE_PATH="docker/linux/Dockerfile" +BUILD_STAGE="expanded-image" # Always build development image (auto-expand from release if needed) +CACHE_FROM="" +CACHE_TO="" +VERSION="latest" # Will be replaced with actual clice version in releases +REBUILD="false" + +# ======================================================================== +# ๐Ÿ“š Usage Information +# ======================================================================== usage() { cat <] +๐Ÿš€ Clice Development Container Builder + +Usage: $0 [OPTIONS] + +OPTIONS: + --compiler Target compiler (default: ${COMPILER}) + --cache-from Use cache from specified image + --cache-to Push cache to specified image and log cache operations + --version Set version tag (default: ${VERSION}) + --stage Build specific stage (packed-image or expanded-image) + --rebuild Force rebuild even if image exists + --help, -h Show this help message -Defaults: - --compiler ${COMPILER} +EXAMPLES: + $0 Build development container with clang + $0 --compiler gcc Build development container with gcc + $0 --stage packed-image Build only the release image + $0 --stage expanded-image Expand release image to development image + $0 --version v1.0.0 Build versioned container (v1.0.0) + $0 --rebuild Force rebuild existing image + $0 --cache-from clice-io/clice-dev:cache Use cache from existing image + $0 --cache-to type=registry,ref=myregistry/myimage:cache Push cache + +VERSION-AWARE BUILDING: + When building expanded-image with --version: + โ€ข First checks for existing release image: clice-io/clice:linux-clang-v1.0.0 + โ€ข If found, builds development image from that release image + โ€ข If not found, builds full multi-stage (release + development) + โ€ข Development image will be tagged as: clice-io/clice:linux-clang-v1.0.0-expanded + +BUILD MODES: + โ€ข Multi-stage build: Builds both release and development images + โ€ข Single-stage build: Builds only the specified stage + โ€ข Auto-expansion: Development image can build from existing release image + +The container includes: + โ€ข Custom toolchain (fully installed and ready) + โ€ข All development dependencies + โ€ข Complete development environment + โ€ข Version-aware release image support EOF } -# parse command line arguments +# ======================================================================== +# ๐Ÿ” Command Line Parsing +# ======================================================================== + while [ "$#" -gt 0 ]; do case "$1" in --compiler) COMPILER="$2"; shift 2;; + --cache-from) + CACHE_FROM="$2"; shift 2;; + --cache-to) + CACHE_TO="$2"; shift 2;; + --version) + VERSION="$2"; shift 2;; + --stage) + BUILD_STAGE="$2"; shift 2;; + --rebuild) + REBUILD="true"; shift 1;; -h|--help) usage; exit 0;; *) - echo "Unknown parameter: $1" >&2; usage; exit 1;; + echo "โŒ Unknown parameter: $1" >&2; usage; exit 1;; esac done -IMAGE_TAG="linux-${COMPILER}" -IMAGE_NAME="clice-io/clice-dev:${IMAGE_TAG}" +# ======================================================================== +# ๐Ÿท๏ธ Image Naming +# ======================================================================== + +# Container image tag with version +IMAGE_TAG="linux-${COMPILER}-${VERSION}" +PACKED_IMAGE_NAME="clice-io/clice:${IMAGE_TAG}" + +# Set the target image name based on build stage +if [ "$BUILD_STAGE" = "packed-image" ]; then + TARGET_IMAGE_NAME="$PACKED_IMAGE_NAME" +else + TARGET_IMAGE_NAME="clice-io/clice:${IMAGE_TAG}-expanded" +fi + +# ======================================================================== +# ๐Ÿš€ Build Process +# ======================================================================== + +echo "=========================================================================" +echo "๐Ÿš€ CLICE DEVELOPMENT CONTAINER BUILDER" +echo "=========================================================================" +echo "๐Ÿ“ฆ Image: ${TARGET_IMAGE_NAME}" +echo "๐Ÿท๏ธ Version: ${VERSION}" +echo "๐Ÿ”ง Compiler: ${COMPILER}" +echo "๐Ÿณ Dockerfile: ${DOCKERFILE_PATH}" +echo "๐Ÿ“ Project Root: ${PROJECT_ROOT}" +echo "โšก Parallel Build: Enabled" +if [ -n "$CACHE_FROM" ]; then + echo "๐Ÿ’พ Cache From: ${CACHE_FROM}" +fi +if [ -n "$CACHE_TO" ]; then + echo "๐Ÿ’พ Cache To: ${CACHE_TO}" +fi +echo "=========================================================================" + +# ======================================================================== +# ๐Ÿ› ๏ธ Docker Build Arguments +# ======================================================================== + +BUILD_ARGS=( + "--progress=plain" + "--target=${BUILD_STAGE}" + "--build-arg=COMPILER=${COMPILER}" + "--build-arg=PACKED_IMAGE_NAME=${PACKED_IMAGE_NAME}" + "--build-arg=BUILDKIT_INLINE_CACHE=1" # Enable inline cache +) + +# Add cache configuration with logging +if [ -n "$CACHE_FROM" ]; then + echo "๐Ÿ’พ Configuring cache source: ${CACHE_FROM}" + BUILD_ARGS+=("--cache-from=${CACHE_FROM}") +fi + +if [ -n "$CACHE_TO" ]; then + echo "๐Ÿ’พ Configuring cache destination: ${CACHE_TO}" + BUILD_ARGS+=("--cache-to=${CACHE_TO}") + # Log cache operations + echo "๐Ÿ“ Cache operations will be logged during build" +fi + +# ======================================================================== +# ๐Ÿ—๏ธ Execute Build +# ======================================================================== + +echo "๐Ÿ—๏ธ Starting Docker build process with parallel optimization..." +echo "๐Ÿ”จ Build command: docker buildx build ${BUILD_ARGS[*]} -t ${TARGET_IMAGE_NAME} -f ${DOCKERFILE_PATH} ." + +# ======================================================================== +# ๐Ÿ”„ Auto-Expansion Logic (Release โ†’ Development) +# ======================================================================== + +# Build the target image +echo "๐Ÿ” Checking for target image: ${TARGET_IMAGE_NAME}" + +if [ "$REBUILD" = "true" ] || ! docker image inspect "${TARGET_IMAGE_NAME}" >/dev/null 2>&1; then + if [ "$REBUILD" = "true" ]; then + echo "๐Ÿ”„ Force rebuilding ${BUILD_STAGE}..." + else + echo "๐Ÿ” Target image not found, building ${BUILD_STAGE}..." + fi + + # Set up build arguments based on the target stage + if [ "$BUILD_STAGE" = "expanded-image" ]; then + # For development image, check if we can build from existing release image + if docker image inspect "${PACKED_IMAGE_NAME}" >/dev/null 2>&1; then + echo "๐Ÿ“ฆ Found existing release image: ${PACKED_IMAGE_NAME}" + echo "๐Ÿ—๏ธ Building development image from existing release image..." + ACTUAL_RELEASE_BASE="${PACKED_IMAGE_NAME}" + else + echo "๐Ÿ” Release image not found: ${PACKED_IMAGE_NAME}" + echo "๐Ÿ—๏ธ Building full multi-stage build (release + development)..." + ACTUAL_RELEASE_BASE="packed-image" + fi + else + # For release image or other stages, use default stage reference + ACTUAL_RELEASE_BASE="packed-image" + fi + + # Rebuild BUILD_ARGS with correct release base image + BUILD_ARGS=( + "--progress=plain" + "--target=${BUILD_STAGE}" + "--build-arg=COMPILER=${COMPILER}" + "--build-arg=VERSION=${VERSION}" + "--build-arg=RELEASE_BASE_IMAGE=${ACTUAL_RELEASE_BASE}" + "--build-arg=BUILDKIT_INLINE_CACHE=1" + ) + + # Add cache configuration + if [ -n "$CACHE_FROM" ]; then + BUILD_ARGS+=("--cache-from=${CACHE_FROM}") + fi + + if [ -n "$CACHE_TO" ]; then + BUILD_ARGS+=("--cache-to=${CACHE_TO}") + echo "๐Ÿ“ Starting build with cache-to logging enabled..." + fi + + echo "๐Ÿ—๏ธ Building ${BUILD_STAGE} with auto-expansion support..." + docker buildx build "${BUILD_ARGS[@]}" -t "${TARGET_IMAGE_NAME}" -f "${DOCKERFILE_PATH}" . + + # Log cache operations if cache-to was used + if [ -n "$CACHE_TO" ]; then + echo "๐Ÿ’พ Cache operations completed. Cache pushed to: ${CACHE_TO}" + fi +else + echo "โœ… Target image already exists: ${TARGET_IMAGE_NAME}" + echo "โ„น๏ธ Use --rebuild to force rebuild" +fi -echo "===========================================" -echo "Building image: ${IMAGE_NAME}" -echo "Compiler: ${COMPILER}" -echo "Dockerfile: ${DOCKERFILE_PATH}" -echo "===========================================" +# ======================================================================== +# ๐Ÿ“Š Post-Build Information +# ======================================================================== -# build the docker image with specified arguments -# must run in clice root dir, so that we can mount the project in docker file to acquire essential files -docker buildx build --progress=plain -t "${IMAGE_NAME}" \ - --build-arg COMPILER="${COMPILER}" \ - --build-arg BUILD_SRC="${PROJECT_ROOT}" \ - -f "${DOCKERFILE_PATH}" . +BUILD_SUCCESS=$? -echo "Build complete. Image:${IMAGE_NAME}" +if [ $BUILD_SUCCESS -eq 0 ]; then + echo "=========================================================================" + echo "โœ… BUILD COMPLETED SUCCESSFULLY!" + echo "=========================================================================" + echo "๐Ÿ“ฆ Image Name: ${TARGET_IMAGE_NAME}" + echo "๐Ÿท๏ธ Image Tag: ${IMAGE_TAG}" + echo "๐Ÿ”ง Compiler: ${COMPILER}" + echo "๐Ÿ—๏ธ Build Stage: ${BUILD_STAGE}" + + # Get image information + if command -v docker &> /dev/null; then + echo "" + echo "๐Ÿ“Š IMAGE INFORMATION:" + docker image inspect "${TARGET_IMAGE_NAME}" --format="Size: {{.Size}} bytes ({{.VirtualSize}} virtual)" 2>/dev/null || true + docker image inspect "${TARGET_IMAGE_NAME}" --format="Created: {{.Created}}" 2>/dev/null || true + echo "" + echo "๐Ÿš€ NEXT STEPS:" + echo " โ€ข Run container: ./docker/linux/run.sh --compiler ${COMPILER}" + echo " โ€ข Test container: docker run --rm -it ${TARGET_IMAGE_NAME} /bin/bash" + echo " โ€ข Development environment is ready to use immediately" + fi + + echo "=========================================================================" +else + echo "=========================================================================" + echo "โŒ BUILD FAILED!" + echo "=========================================================================" + echo "๐Ÿ” Check the build output above for error details" + echo "๐Ÿ’ก Common issues:" + echo " โ€ข Network connectivity problems" + echo " โ€ข Insufficient disk space" + echo " โ€ข Docker daemon not running" + echo " โ€ข Invalid build arguments" + echo "=========================================================================" + exit 1 +fi diff --git a/docker/linux/run.sh b/docker/linux/run.sh index 3babc8c5..e53d02c9 100644 --- a/docker/linux/run.sh +++ b/docker/linux/run.sh @@ -1,87 +1,234 @@ #!/bin/bash +# ======================================================================== +# ๐Ÿš€ Clice Development Container Runner +# ======================================================================== +# File: docker/linux/run.sh +# Purpose: Run and manage Clice development containers +# +# This script handles the complete container lifecycle including: +# โ€ข Automatic image building/pulling if needed +# โ€ข Container creation and management +# โ€ข Development environment initialization +# +# Note: Auto-expansion logic (Release โ†’ Development) is handled in build.sh +# ======================================================================== + set -e +# ======================================================================== +# ๐Ÿ”ง Environment Setup +# ======================================================================== + # Save original working directory and switch to project root ORIG_PWD="$(pwd)" SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -echo "${SCRIPT_DIR}" cd "${SCRIPT_DIR}/../.." PROJECT_ROOT="$(pwd)" trap 'cd "${ORIG_PWD}"' EXIT -# default configurations +# ======================================================================== +# โš™๏ธ Default Configuration +# ======================================================================== + COMPILER="clang" RESET="false" +UPDATE="false" +VERSION="latest" +COMMAND="" +CONTAINER_WORKDIR="/clice" + +# ======================================================================== +# ๐Ÿ“š Usage Information +# ======================================================================== usage() { cat <] [--reset] +๐Ÿš€ Clice Development Container Runner + +Usage: $0 [OPTIONS] [COMMAND] + +OPTIONS: + --compiler Target compiler (default: ${COMPILER}) + --reset Remove existing container + --update Pull latest image and update + --version Use specific version (default: ${VERSION}) + --help, -h Show this help message -Defaults: - --compiler ${COMPILER} - --reset (re-create the container) +EXAMPLES: + $0 Run container (build if not exists) + $0 --compiler gcc Run container with GCC compiler + $0 --reset Remove container and recreate + $0 --update Pull latest image and update + $0 bash Run specific command in container + +CONTAINER LIFECYCLE: + 1. Check/build development image (build.sh handles auto-expansion) + 2. Create/start container from development image + 3. Attach to development shell EOF } -# parse command line arguments +# ======================================================================== +# ๐Ÿ” Command Line Parsing +# ======================================================================== + while [ "$#" -gt 0 ]; do case "$1" in --compiler) COMPILER="$2"; shift 2;; --reset) RESET="true"; shift 1;; + --update) + UPDATE="true"; shift 1;; + --version) + VERSION="$2"; shift 2;; -h|--help) usage; exit 0;; - *) echo "Unknown parameter: $1"; usage; exit 1;; + --) + shift; COMMAND="$*"; break;; + -*) + echo "โŒ Unknown parameter: $1" >&2; usage; exit 1;; + *) + COMMAND="$*"; break;; esac done -IMAGE_TAG="linux-${COMPILER}" -IMAGE_NAME="clice-io/clice-dev:${IMAGE_TAG}" -CONTAINER_NAME="clice-dev-linux-${COMPILER}" +# ======================================================================== +# ๐Ÿท๏ธ Container and Image Naming +# ======================================================================== -# If the image doesn't exist, build it automatically by invoking build.sh -if ! docker image inspect "${IMAGE_NAME}" >/dev/null 2>&1; then - echo "Image ${IMAGE_NAME} not found, invoking build.sh to create it..." - ./docker/linux/build.sh --compiler "${COMPILER}" -fi +IMAGE_TAG="linux-${COMPILER}-v${VERSION}" +PACKED_IMAGE_NAME="clice-io/clice:${IMAGE_TAG}" +EXPANDED_IMAGE_NAME="${PACKED_IMAGE_NAME}-expanded" +CONTAINER_NAME="clice_dev-linux-${COMPILER}-v${VERSION}" + +# ======================================================================== +# ๐Ÿš€ Main Execution +# ======================================================================== + +echo "=========================================================================" +echo "๐Ÿš€ CLICE DEVELOPMENT CONTAINER RUNNER" +echo "=========================================================================" +echo "๐Ÿท๏ธ Image: ${EXPANDED_IMAGE_NAME}" +echo "๐Ÿท๏ธ Version: ${VERSION}" +echo "๐Ÿณ Container: ${CONTAINER_NAME}" +echo "๐Ÿ”ง Compiler: ${COMPILER}" +echo "๐Ÿ“ Project Root: ${PROJECT_ROOT}" +echo "=========================================================================" + +# ======================================================================== +# ๐Ÿณ Container Management +# ======================================================================== # Handle --reset: remove the existing container if it exists if [ "${RESET}" = "true" ]; then if docker ps -a --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then - echo "Resetting container: stopping and removing existing container ${CONTAINER_NAME}..." + echo "๐Ÿ”„ Removing existing container: ${CONTAINER_NAME}..." docker stop "${CONTAINER_NAME}" >/dev/null 2>&1 || true docker rm "${CONTAINER_NAME}" >/dev/null 2>&1 - echo "Container ${CONTAINER_NAME} has been removed." + echo "โœ… Container ${CONTAINER_NAME} has been removed." else - echo "Container ${CONTAINER_NAME} does not exist, no need to reset." + echo "โ„น๏ธ Container ${CONTAINER_NAME} does not exist." fi + echo "๐Ÿ Reset completed. Run again without --reset to create new container." exit 0 fi -CONTAINER_WORKDIR="/clice" +# ======================================================================== +# ๐Ÿ—๏ธ Image Management +# ======================================================================== + +# Handle --update: pull latest images and exit +if [ "$UPDATE" = "true" ] || ! docker image inspect "${PACKED_IMAGE_NAME}" >/dev/null 2>&1; then + echo "๐Ÿ”„ Force updating image..." + + # Try to remove existing expanded image before pulling + if docker image inspect "${EXPANDED_IMAGE_NAME}" >/dev/null 2>&1; then + echo "๐Ÿงน Cleaning existing expanded image: ${EXPANDED_IMAGE_NAME}..." + if ! docker rmi "${EXPANDED_IMAGE_NAME}" >/dev/null 2>&1; then + echo "โŒ Failed to remove expanded image: ${EXPANDED_IMAGE_NAME}" + echo "๐Ÿ’ก This usually means a container is still using this image." + echo "๐Ÿ”ง Please run: $0 --reset to remove the container first, then try --update again." + echo "โš ๏ธ This ensures your container data safety - we won't accidentally delete your container." + exit 1 + fi + echo "โœ… Expanded image removed successfully" + fi + + echo "๐Ÿ“ฅ Pulling ${PACKED_IMAGE_NAME} from registry..." + if docker pull "${PACKED_IMAGE_NAME}"; then + echo "โœ… Successfully pulled image: ${PACKED_IMAGE_NAME}" + else + echo "โŒ Could not pull image: ${PACKED_IMAGE_NAME}" + echo "๐Ÿ’ก Please check if the image exists in the registry" + exit 1 + fi + + # Expand the packed image to development image using build.sh + echo "๐Ÿ—๏ธ Expanding packed image to development image..." + if "${SCRIPT_DIR}/build.sh" --compiler "${COMPILER}" --version "${VERSION}" --stage expanded-image; then + echo "โœ… Successfully created development image: ${EXPANDED_IMAGE_NAME}" + else + echo "โŒ Failed to expand packed image to development image" + exit 1 + fi -# Check if the container exists + echo "๐Ÿ Update completed." +fi + +# Check if the container exists and is using the current development image if docker ps -a --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then - echo "===========================================" - echo "Attaching to existing container: ${CONTAINER_NAME}" - echo "From image: ${IMAGE_NAME}" - echo "Project mount: ${PROJECT_ROOT} -> ${CONTAINER_WORKDIR}" - echo "===========================================" + echo "๐Ÿ” Found existing container: ${CONTAINER_NAME}" + + # Get image IDs for comparison (more reliable than names) + CONTAINER_IMAGE_ID=$(docker inspect --format='{{.Image}}' "${CONTAINER_NAME}" 2>/dev/null || echo "") + EXPECTED_IMAGE_ID=$(docker inspect --format='{{.Id}}' "${EXPANDED_IMAGE_NAME}" 2>/dev/null || echo "") + + # Check target image and container match + if [ -n "$CONTAINER_IMAGE_ID" ] && [ -n "$EXPECTED_IMAGE_ID" ] && [ "$CONTAINER_IMAGE_ID" = "$EXPECTED_IMAGE_ID" ]; then + echo "โœ… Container is using current development image" + echo "๐Ÿš€ Starting and attaching to container..." + else + CONTAINER_IMAGE_NAME=$(docker inspect --format='{{.Config.Image}}' "${CONTAINER_NAME}" 2>/dev/null || echo "unknown") + echo "โš ๏ธ WARNING: Container image mismatch detected!" + echo " ๐Ÿ“ฆ Container using: ${CONTAINER_IMAGE_NAME} (ID: ${CONTAINER_IMAGE_ID})" + echo " ๐ŸŽฏ Expected image: ${EXPANDED_IMAGE_NAME} (ID: ${EXPECTED_IMAGE_ID})" + echo "" + echo "๐Ÿ’ก Your container is using a different image version." + echo "๐Ÿ›ก๏ธ To ensure data safety, please:" + echo " 1. Save any important work from the current container" + echo " 2. Run: $0 --reset to remove the outdated container" + echo " 3. Run: $0 to create a new container with the latest image" + echo "" + echo "๐Ÿš€ For now, connecting to your existing container..." + fi + docker start "${CONTAINER_NAME}" >/dev/null - docker exec -it -w "${CONTAINER_WORKDIR}" "${CONTAINER_NAME}" /bin/bash + + if [ -n "$COMMAND" ]; then + docker exec -it -w "${CONTAINER_WORKDIR}" "${CONTAINER_NAME}" bash -c "$COMMAND" + else + docker exec -it -w "${CONTAINER_WORKDIR}" "${CONTAINER_NAME}" /bin/bash + fi exit 0 fi +# Create new container from development image DOCKER_RUN_ARGS=(-it -w "${CONTAINER_WORKDIR}") DOCKER_RUN_ARGS+=(--name "${CONTAINER_NAME}") DOCKER_RUN_ARGS+=(--mount "type=bind,src=${PROJECT_ROOT},target=${CONTAINER_WORKDIR}") -echo "===========================================" -echo "Creating and running new container: ${CONTAINER_NAME}" -echo "From image: ${IMAGE_NAME}" -echo "Project mount: ${PROJECT_ROOT} -> ${CONTAINER_WORKDIR}" -echo "===========================================" +echo "=========================================================================" +echo "๐Ÿš€ Creating new container: ${CONTAINER_NAME}" +echo "๐Ÿ“ฆ From image: ${EXPANDED_IMAGE_NAME}" +echo "๐Ÿ“ Project mount: ${PROJECT_ROOT} -> ${CONTAINER_WORKDIR}" +echo "=========================================================================" -docker run "${DOCKER_RUN_ARGS[@]}" "${IMAGE_NAME}" +if [ -n "$COMMAND" ]; then + echo "๐Ÿƒ Executing command: $COMMAND" + docker run --rm "${DOCKER_RUN_ARGS[@]}" "${EXPANDED_IMAGE_NAME}" bash -c "$COMMAND" +else + echo "๐Ÿš Starting interactive shell..." + docker run "${DOCKER_RUN_ARGS[@]}" "${EXPANDED_IMAGE_NAME}" +fi \ No newline at end of file diff --git a/docker/linux/test_build.sh b/docker/linux/test_build.sh new file mode 100644 index 00000000..78d17180 --- /dev/null +++ b/docker/linux/test_build.sh @@ -0,0 +1,301 @@ +#!/bin/bash +# ======================================================================== +# ๐Ÿงช Clice Build Test Script +# ======================================================================== +# File: docker/linux/test_build.sh +# Purpose: Test building Clice with different configurations +# +# This script tests the complete Clice build process with four different +# configurations to ensure the development environment is working correctly. +# It also runs xmake tests to validate the built binaries. +# ======================================================================== + +set -e + +# ======================================================================== +# ๐Ÿ”ง Environment Setup +# ======================================================================== + +# SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +CONTAINER_NAME="" +COMPILER="clang" +PARALLEL_JOBS=$(nproc 2>/dev/null || echo "4") +BUILD_DIR_BASE="/tmp/clice-test-builds" + +# ======================================================================== +# โš™๏ธ Configuration Matrix +# ======================================================================== + +# Define the four build configurations to test +declare -A BUILD_CONFIGS=( + ["debug"]="--mode=debug" + ["release"]="--mode=release" + ["debug-optimized"]="--mode=debug --release=y" + ["release-optimized"]="--mode=release --release=y" +) + +# ======================================================================== +# ๐Ÿ“š Usage Information +# ======================================================================== + +usage() { +cat < Use specific container name + --compiler Target compiler (default: ${COMPILER}) + --jobs Number of parallel jobs (default: ${PARALLEL_JOBS}) + --config Test specific configuration only + --help, -h Show this help message + +CONFIGURATIONS: + debug Debug build (--mode=debug) + release Release build (--mode=release) + debug-optimized Debug with optimizations (--mode=debug --release=y) + release-optimized Release with optimizations (--mode=release --release=y) + +EXAMPLES: + $0 Test all configurations + $0 --config debug Test debug configuration only + $0 --compiler gcc --jobs 8 Use GCC with 8 parallel jobs + +This script will: + 1. Set up separate build directories for each configuration + 2. Build Clice with each configuration + 3. Run xmake tests for each build + 4. Report results and timing information +EOF +} + +# ======================================================================== +# ๐Ÿ” Command Line Parsing +# ======================================================================== + +SPECIFIC_CONFIG="" + +while [ "$#" -gt 0 ]; do + case "$1" in + --container) + CONTAINER_NAME="$2"; shift 2;; + --compiler) + COMPILER="$2"; shift 2;; + --jobs) + PARALLEL_JOBS="$2"; shift 2;; + --config) + SPECIFIC_CONFIG="$2"; shift 2;; + -h|--help) + usage; exit 0;; + *) + echo "โŒ Unknown parameter: $1" >&2; usage; exit 1;; + esac +done + +# Auto-detect container if not specified +if [ -z "$CONTAINER_NAME" ]; then + CONTAINER_NAME="clice-dev-linux-${COMPILER}" +fi + +# ======================================================================== +# ๐Ÿงช Test Functions +# ======================================================================== + +run_in_container() { + local cmd="$1" + echo "๐Ÿƒ Running in container: $cmd" + docker exec -w "/clice" "$CONTAINER_NAME" bash -c "$cmd" +} + +test_build_configuration() { + local config_name="$1" + local build_args="$2" + local build_dir="$BUILD_DIR_BASE/$config_name" + + echo "" + echo "=========================================================================" + echo "๐Ÿ”จ Testing Configuration: $config_name" + echo "=========================================================================" + echo "๐Ÿ“ Build Directory: $build_dir" + echo "โš™๏ธ Build Arguments: $build_args" + echo "๐Ÿ”ง Compiler: $COMPILER" + echo "โšก Parallel Jobs: $PARALLEL_JOBS" + echo "=========================================================================" + + local start_time + start_time=$(date +%s) + + # Create build directory + run_in_container "mkdir -p $build_dir" + + # Configure build + echo "๐Ÿ”ง Configuring build..." + if ! run_in_container "cd $build_dir && xmake config $build_args --jobs=$PARALLEL_JOBS"; then + echo "โŒ Configuration failed for $config_name" + return 1 + fi + + # Build project + echo "๐Ÿ—๏ธ Building project..." + if ! run_in_container "cd $build_dir && xmake build --jobs=$PARALLEL_JOBS"; then + echo "โŒ Build failed for $config_name" + return 1 + fi + + # Run tests + echo "๐Ÿงช Running tests..." + if ! run_in_container "cd $build_dir && xmake test"; then + echo "โš ๏ธ Tests failed for $config_name (build succeeded)" + # Don't return error for test failures, just note them + fi + + local end_time + end_time=$(date +%s) + local duration=$((end_time - start_time)) + + echo "โœ… Configuration $config_name completed in ${duration}s" + + # Store build info + run_in_container "cd $build_dir && echo 'Build completed at: $(date)' > build_info.txt" + run_in_container "cd $build_dir && echo 'Build duration: ${duration}s' >> build_info.txt" + run_in_container "cd $build_dir && echo 'Configuration: $config_name' >> build_info.txt" + run_in_container "cd $build_dir && echo 'Build args: $build_args' >> build_info.txt" + + return 0 +} + +check_prerequisites() { + echo "๐Ÿ” Checking prerequisites..." + + # Check if container exists and is running + if ! docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then + echo "โŒ Container $CONTAINER_NAME is not running" + echo "๐Ÿ’ก Start it with: ./docker/linux/run.sh --compiler $COMPILER" + exit 1 + fi + + # Check if xmake is available in container + if ! run_in_container "command -v xmake >/dev/null"; then + echo "โŒ xmake is not available in container $CONTAINER_NAME" + echo "๐Ÿ’ก Make sure the container setup completed successfully" + exit 1 + fi + + # Check if we're in the clice project directory + if ! run_in_container "test -f xmake.lua"; then + echo "โŒ xmake.lua not found in container /clice directory" + echo "๐Ÿ’ก Make sure the project is properly mounted in the container" + exit 1 + fi + + echo "โœ… Prerequisites check passed" +} + +generate_report() { + echo "" + echo "=========================================================================" + echo "๐Ÿ“Š BUILD TEST REPORT" + echo "=========================================================================" + + local total_configs=0 + local successful_configs=0 + local failed_configs=() + + # Count and report results + for config_name in "${!BUILD_CONFIGS[@]}"; do + if [ -n "$SPECIFIC_CONFIG" ] && [ "$config_name" != "$SPECIFIC_CONFIG" ]; then + continue + fi + + total_configs=$((total_configs + 1)) + + local build_dir="$BUILD_DIR_BASE/$config_name" + if run_in_container "test -f $build_dir/build_info.txt" 2>/dev/null; then + successful_configs=$((successful_configs + 1)) + echo "โœ… $config_name: SUCCESS" + run_in_container "cat $build_dir/build_info.txt" | sed 's/^/ /' + else + failed_configs+=("$config_name") + echo "โŒ $config_name: FAILED" + fi + echo "" + done + + # Summary + echo "=========================================================================" + echo "๐Ÿ“ˆ SUMMARY" + echo "=========================================================================" + echo "๐ŸŽฏ Total Configurations: $total_configs" + echo "โœ… Successful: $successful_configs" + echo "โŒ Failed: ${#failed_configs[@]}" + + if [ ${#failed_configs[@]} -gt 0 ]; then + echo "" + echo "Failed configurations:" + for config in "${failed_configs[@]}"; do + echo " โ€ข $config" + done + fi + + echo "" + echo "๐Ÿ“ Build artifacts location: $BUILD_DIR_BASE" + echo "๐Ÿณ Container: $CONTAINER_NAME" + echo "๐Ÿ”ง Compiler: $COMPILER" + echo "=========================================================================" + + # Return exit code based on results + if [ ${#failed_configs[@]} -eq 0 ]; then + return 0 + else + return 1 + fi +} + +# ======================================================================== +# ๐Ÿš€ Main Execution +# ======================================================================== + +main() { + echo "=========================================================================" + echo "๐Ÿงช CLICE BUILD TEST RUNNER" + echo "=========================================================================" + echo "๐Ÿณ Container: $CONTAINER_NAME" + echo "๐Ÿ”ง Compiler: $COMPILER" + echo "โšก Parallel Jobs: $PARALLEL_JOBS" + if [ -n "$SPECIFIC_CONFIG" ]; then + echo "๐ŸŽฏ Testing Configuration: $SPECIFIC_CONFIG" + else + echo "๐ŸŽฏ Testing All Configurations: ${!BUILD_CONFIGS[*]}" + fi + echo "=========================================================================" + + # Check prerequisites + check_prerequisites + + # Clean up previous build directories + echo "๐Ÿงน Cleaning up previous build directories..." + run_in_container "rm -rf $BUILD_DIR_BASE" + + # Test configurations + for config_name in "${!BUILD_CONFIGS[@]}"; do + # Skip if testing specific configuration + if [ -n "$SPECIFIC_CONFIG" ] && [ "$config_name" != "$SPECIFIC_CONFIG" ]; then + continue + fi + + test_build_configuration "$config_name" "${BUILD_CONFIGS[$config_name]}" || true + done + + # Generate final report + if generate_report; then + echo "๐ŸŽ‰ All build tests completed successfully!" + exit 0 + else + echo "๐Ÿ’ฅ Some build tests failed!" + exit 1 + fi +} + +# Run main function +main "$@" \ No newline at end of file diff --git a/docker/linux/utility/build_clice_compiler_toolchain.py b/docker/linux/utility/build_clice_compiler_toolchain.py new file mode 100644 index 00000000..009ff255 --- /dev/null +++ b/docker/linux/utility/build_clice_compiler_toolchain.py @@ -0,0 +1,686 @@ +# ======================================================================== +# ๐Ÿš€ Clice Compiler Toolchain Builder +# ======================================================================== +# File: docker/linux/utility/build_clice_compiler_toolchain.py +# Purpose: Automated toolchain construction orchestrator +# +# This module implements a high-performance parallel build system for +# constructing the complete Clice compiler toolchain from source. +# +# Components Built: +# โ€ข glibc (GNU C Library) +# โ€ข GCC libstdc++ (C++ Standard Library) +# โ€ข Linux Kernel Headers +# โ€ข LLVM Project (prepared for future builds) +# +# Features: +# โ€ข Parallel task execution with dependency resolution +# โ€ข Robust error handling and recovery +# โ€ข GPG signature verification +# โ€ข Automated path fixing for relocatable builds +# ======================================================================== + +""" +๐Ÿ—๏ธ Clice Compiler Toolchain Builder + +A sophisticated build orchestrator that constructs a complete compiler toolchain +from source components using parallel execution and dependency management. + +This system builds the fundamental components required for the Clice development +environment, including system libraries, C++ standard library, and kernel headers. +All components are built with careful attention to compatibility and performance. + +The build process is organized into clearly defined stages: +1. ๐Ÿ“ฆ Setup - Install prerequisites and prepare environment +2. โฌ‡๏ธ Download - Fetch source archives with verification +3. ๐Ÿ“‚ Extract - Unpack source code to build directories +4. ๐Ÿ”จ Build - Compile and install components with proper configuration +5. ๐Ÿ”ง Post-process - Fix paths and finalize installation + +Each stage is executed in parallel where possible, with automatic dependency +resolution ensuring correct build order. +""" + +import sys +import os + +# ======================================================================== +# ๐Ÿ”ง Project Path Configuration +# ======================================================================== +# Dynamic project root discovery - enables importing from parent directories +# This allows the utility scripts to access shared configuration modules +project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) +if project_root not in sys.path: + sys.path.insert(0, project_root) + +# ======================================================================== +# ๐Ÿ“š Standard Library Imports +# ======================================================================== +import shutil # High-level file operations +import tarfile # Archive extraction capabilities +from typing import Dict, Set # Type hints for better code clarity + +# ======================================================================== +# ๐Ÿ› ๏ธ Build System Components +# ======================================================================== +from build_utils import ( + Job, # Individual build task representation + ParallelTaskScheduler, # High-performance parallel execution engine + download_file, # Accelerated file download with aria2c + run_command, # Shell command execution with environment control + verify_signature, # GPG signature verification + # Generic component build utilities + install_download_prerequisites, # Download prerequisite installation + install_extract_prerequisites, # Extract prerequisite installation + download_and_verify, # Component source download and verification + extract_source, # Component source extraction +) + +# ======================================================================== +# โš™๏ธ Configuration Constants +# ======================================================================== +from config.build_config import ( + TOOLCHAIN_BUILD_ROOT, # Build root directory + GPG_KEY_SERVER, # GPG keyserver list + TOOLCHAIN_BUILD_ENV_VARS, # Build environment variables + # Import component instances for structured access + TOOLCHAIN +) + +# ======================================================================== +# ๐ŸŽฏ Build Task Implementations +# ======================================================================== +# Each function represents a discrete build task that can be executed +# independently once its dependencies are satisfied. The parallel scheduler +# coordinates execution order based on the dependency graph. +# ======================================================================== + +# ======================================================================== +# ๐Ÿ“ฆ Environment Setup Tasks +# ======================================================================== + +def update_apt(): + """ + ๐Ÿ”„ Update APT Package Database + + Refreshes the APT package manager's local database to ensure we have + access to the latest package versions and security updates. + + This is the foundation step that must complete before any package + installation can proceed safely. + """ + print("๐Ÿ”„ [SETUP] Refreshing APT package database...") + run_command("apt update") + + + +def install_build_prerequisites(): + """ + ๐Ÿ”จ Install Build Stage Prerequisites + + Installs the complete build environment including: + โ€ข Core build tools (make, binutils, rsync) + โ€ข Text processing tools (gawk, bison) for glibc + โ€ข GCC 9 toolchain for glibc compilation + โ€ข GCC 14 toolchain for libstdc++ compilation + + Note: We maintain multiple GCC versions because glibc requires + GCC < 10 to avoid linker symbol conflicts, while modern libstdc++ + benefits from the latest compiler features. + """ + print("๐Ÿ”จ [SETUP] Installing comprehensive build environment...") + print(" ๐Ÿ“‹ Components: make, binutils, gawk, bison, gcc-9, gcc-14") + build_prerequisites = TOOLCHAIN.build_prerequisites + pkg_list = " ".join(build_prerequisites) + run_command(f"apt install -y --no-install-recommends -o APT::Keep-Downloaded-Packages=true {pkg_list}") + # linux headers install requires gcc, even though we won't use it in linux header install + run_command("update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-9 90") + print("โœ… [SETUP] Build environment ready") + + + +# ======================================================================== +# ๐Ÿ“š GNU C Library (glibc) Tasks +# ======================================================================== +# glibc is the core system library providing POSIX API implementation. +# It requires special handling due to its fundamental role in the system. +# ======================================================================== + +def fix_glibc_paths(): + """ + ๐Ÿ”ง Fix Hardcoded Build Paths in glibc Installation + + glibc's build process generates various text files (.la, .pc, linker scripts) + that contain hardcoded absolute paths from the build environment. These paths + need to be cleaned up to create relocatable installations. + + This function scans all installed files and removes build-specific paths, + making the toolchain portable across different installation directories. + + Process: + 1. Walk through all installed files + 2. Identify text files (skip binaries) + 3. Search for hardcoded paths + 4. Remove absolute path references + 5. Preserve relative path structure + """ + search_path = TOOLCHAIN.sysroot_dir + print(f"๐Ÿ”ง [POST-PROCESS] Sanitizing hardcoded paths in {search_path}...") + + if not os.path.isdir(search_path): + print(f"โŒ [ERROR] Sysroot directory not found: '{search_path}'", file=sys.stderr) + return + + files_processed = 0 + for root, _, files in os.walk(search_path): + for filename in files: + file_path = os.path.join(root, filename) + + # Check if file is text-based (skip binaries) + try: + with open(file_path, 'r', encoding='utf-8') as f: + original_content = f.read() + if '\0' in original_content: # Contains null bytes (binary) + continue + except UnicodeDecodeError: + continue # File not readable or not text + + # Look for and remove hardcoded paths + replacement_path = f"{os.path.dirname(file_path)}/" + new_content = original_content.replace(replacement_path, "") + if new_content == original_content: + continue # No changes needed + + # Apply the path fix + print(f" ๐Ÿ”จ Fixing paths in: {os.path.relpath(file_path, search_path)}") + print(f" โžค Removing: '{replacement_path}'") + with open(file_path, 'w', encoding='utf-8') as f: + f.write(new_content) + files_processed += 1 + + print(f"โœ… [POST-PROCESS] Path fixing complete ({files_processed} files processed)") + + +def build_and_install_glibc(component): + """ + ๐Ÿ—๏ธ Build and Install GNU C Library (glibc) + + Configures, compiles, and installs glibc - the foundational C library + that provides POSIX API implementation and system call interface. + + Build Configuration: + โ€ข Uses GCC 9 (required: GCC < 10 to avoid symbol conflicts) + โ€ข Targets x86_64 architecture with 64-bit support + โ€ข Disables compiler warnings as errors (--disable-werror) + โ€ข Enables 64-bit libraries, disables 32-bit compatibility + + Post-installation includes path sanitization to ensure relocatable builds. + + Note: glibc is built out-of-tree in a separate build directory to + maintain clean separation between source and build artifacts. + """ + print(f"๐Ÿ—๏ธ [BUILD] Starting {component.name} compilation...") + print(f" ๐Ÿ“‹ Using GCC 9 (required for glibc compatibility)") + print(f" ๐ŸŽฏ Target: {TOOLCHAIN.host_triplet} ({TOOLCHAIN.host_machine})") + print(f" ๐Ÿ“ Install: {TOOLCHAIN.sysroot_dir}/usr") + + # Prepare out-of-tree build directory + os.makedirs(component.build_dir, exist_ok=True) + + # Configure build environment with GCC 9 + compiler_env = { + 'CC': 'gcc-9', # GNU C compiler version 9 (full path) + 'CXX': 'g++-9', # GNU C++ compiler version 9 (full path) + 'CPP': 'cpp-9', # C preprocessor (explicit) + } + compiler_env.update(TOOLCHAIN_BUILD_ENV_VARS) + + # Configure glibc build + print(f"โš™๏ธ [CONFIG] Configuring glibc build...") + configure_script = os.path.join(component.src_dir, "configure") + configure_command = f"{configure_script} --host={TOOLCHAIN.host_triplet} --prefix={TOOLCHAIN.sysroot_dir}/usr --disable-werror --disable-lib32 --enable-lib64" + run_command(configure_command, cwd=component.build_dir, env=compiler_env) + + # Compile glibc + print(f"๐Ÿ”จ [COMPILE] Building glibc (this may take several minutes)...") + run_command("make -j", cwd=component.build_dir, env=compiler_env) + + # Install glibc to sysroot + print(f"๐Ÿ“ฆ [INSTALL] Installing glibc to sysroot...") + run_command(f"make install -j", cwd=component.build_dir, env=compiler_env) + + # Post-process to fix hardcoded paths + fix_glibc_paths() + print(f"โœ… [COMPLETE] glibc build and installation finished") + +# ======================================================================== +# ๐Ÿง Linux Kernel Headers Installation +# ======================================================================== +# Kernel headers provide system call definitions and kernel API interfaces +# required for userspace programs to interact with the Linux kernel. +# ======================================================================== + +def install_linux_headers(component): + """ + ๐Ÿง Install Linux Kernel Headers + + Extracts and installs sanitized Linux kernel headers that provide + system call definitions and kernel API interfaces for userspace programs. + + The kernel headers are essential for: + โ€ข System call interface definitions + โ€ข Kernel data structure layouts + โ€ข Device driver interfaces + โ€ข Architecture-specific constants + + Installation Process: + 1. Use kernel's built-in header installation system + 2. Filter out kernel-internal definitions + 3. Install sanitized headers to sysroot/usr + 4. Ensure compatibility with userspace programs + """ + install_path = os.path.join(TOOLCHAIN.sysroot_dir, "usr") + print(f"๐Ÿง [INSTALL] Installing Linux kernel headers...") + print(f" ๐Ÿ—๏ธ Architecture: {TOOLCHAIN.host_machine}") + print(f" ๐Ÿ“ Target: {install_path}") + + # Use command-line arguments instead of environment variables + # This ensures highest priority and avoids Makefile variable conflicts + # Install to /usr within sysroot for Clang compatibility + make_args: Dict[str, str] = { + "ARCH": TOOLCHAIN.host_machine, + "INSTALL_HDR_PATH": install_path + } + + # Also preserve any global build environment variables + make_env = {} + make_env.update(TOOLCHAIN_BUILD_ENV_VARS) + + # Build the make command with arguments + args_str = " ".join([f"{key}={value}" for key, value in make_args.items()]) + + # Install sanitized kernel headers using command-line parameters + run_command(f"make {args_str} -j headers_install", cwd=component.src_dir, env=make_env) + print(f"โœ… [COMPLETE] Linux kernel headers installed") + +# ======================================================================== +# ๐Ÿ› ๏ธ GCC Compiler Collection Tasks +# ======================================================================== +# GCC provides the C++ standard library (libstdc++) and essential runtime +# libraries. We build only the target libraries, not the full compiler. +# ======================================================================== + +def download_gcc_prerequisites(component): + """ + ๐Ÿ“ฆ Download GCC Mathematical Prerequisites + + Downloads and sets up the mathematical libraries required for GCC: + โ€ข GMP (GNU Multiple Precision Arithmetic Library) + โ€ข MPFR (Multiple Precision Floating-Point Reliable Library) + โ€ข MPC (Multiple Precision Complex Library) + + These libraries are essential for GCC's internal computations and + optimizations. The GCC source tree includes a convenience script + that automatically downloads the correct versions. + """ + print(f"๐Ÿ“ฆ [DOWNLOAD] Fetching {component.name} mathematical prerequisites...") + print(f" ๐Ÿ“‹ Components: GMP, MPFR, MPC") + run_command("./contrib/download_prerequisites", cwd=component.src_dir) + print(f"โœ… [DOWNLOAD] GCC prerequisites ready") + +def build_and_install_libstdcpp(component): + """ + ๐Ÿ—๏ธ Build and Install C++ Standard Library (libstdc++) + + Builds the C++ standard library and essential runtime libraries from GCC. + We configure GCC but only build the target libraries we need, avoiding + the full compiler build which would be unnecessary and time-consuming. + + Target Libraries Built: + โ€ข libgcc - Low-level runtime support (exception handling, etc.) + โ€ข libstdc++-v3 - Complete C++ standard library + โ€ข libsanitizer - Address/memory/thread sanitizer support + โ€ข libatomic - Atomic operations for lock-free programming + โ€ข libbacktrace - Stack backtrace support for debugging + โ€ข libgomp - OpenMP parallel programming runtime + โ€ข libquadmath - Quadruple precision floating-point math + + Configuration highlights: + โ€ข Uses modern GCC 14 for latest C++ features + โ€ข Links against our custom glibc build + โ€ข Enables LTO for better optimization + โ€ข Static linking for portable distribution + """ + print(f"๐Ÿ—๏ธ [BUILD] Starting {component.name} C++ standard library build...") + print(f" ๐Ÿ“‹ Using GCC 14 (modern C++ support)") + print(f" ๐ŸŽฏ Target libraries: {', '.join(component.target_libs)}") + print(f" ๐Ÿ”— Linking with glibc v{TOOLCHAIN.glibc.version}") + + # Prepare out-of-tree build directory + os.makedirs(component.build_dir, exist_ok=True) + + # Configure build environment with modern GCC + compiler_env = { + 'CC': 'gcc-14', # Modern C compiler (full path) + 'CXX': 'g++-14', # Modern C++ compiler (full path) + 'CPP': 'cpp-14', # C preprocessor (explicit) + } + compiler_env.update(TOOLCHAIN_BUILD_ENV_VARS) + + # Configure GCC for target library building + print(f"โš™๏ธ [CONFIG] Configuring GCC for library-only build...") + configure_cmd = [ + f"{component.src_dir}/configure", + f"--host={TOOLCHAIN.host_triplet}", # Build system + f"--target={TOOLCHAIN.target_triplet}", # Target system + f"--prefix={TOOLCHAIN.sysroot_dir}/usr", # Installation prefix + f"--with-sysroot={TOOLCHAIN.sysroot_dir}", # System root for headers/libs + f"--with-glibc-version={TOOLCHAIN.glibc.version}", # glibc compatibility + "--with-gcc-major-version-only", # Use major version in paths for clang compatibility + "--disable-werror", # Don't fail on warnings + "--disable-multilib", # Single architecture only + "--disable-bootstrap", # Skip multi-stage build + "--enable-languages=c,c++", # Language support + "--enable-threads", # Threading support + "--enable-lto", # Link-time optimization + "--enable-nls", # Native language support + "--disable-shared", # Static libraries for portability + ] + run_command(" ".join(configure_cmd), cwd=component.build_dir, env=compiler_env) + + # Build only the target libraries we need + print(f"๐Ÿ”จ [COMPILE] Building target libraries (this will take significant time)...") + build_targets = " ".join([f"all-target-{lib}" for lib in component.target_libs]) + run_command(f"make -j {build_targets}", cwd=component.build_dir, env=compiler_env) + + # Install the built libraries + print(f"๐Ÿ“ฆ [INSTALL] Installing C++ standard library and runtime libraries...") + install_targets = " ".join([f"install-target-{lib}" for lib in component.target_libs]) + run_command(f"make -j {install_targets}", cwd=component.build_dir, env=compiler_env) + print(f"โœ… [COMPLETE] C++ standard library build finished") + +# ======================================================================== +# ๐ŸŽญ Main Build Orchestrator +# ======================================================================== + +def main(): + """ + ๐Ÿš€ Main Toolchain Build Orchestrator + + Coordinates the entire toolchain build process using a sophisticated + parallel task scheduler with dependency resolution. The build is organized + as a directed acyclic graph (DAG) where each node represents a build task + and edges represent dependencies. + + Build Phases: + 1. ๐Ÿ”„ Setup - System preparation and prerequisite installation + 2. โฌ‡๏ธ Download - Source code fetching with verification + 3. ๐Ÿ“‚ Extract - Archive extraction and preparation + 4. ๐Ÿ—๏ธ Build - Compilation and installation + + The scheduler automatically determines the optimal execution order and + runs independent tasks in parallel to minimize total build time. + + Dependency Graph Structure: + โ€ข Setup tasks run first and can execute in parallel + โ€ข Download tasks depend on download prerequisites + โ€ข Extract tasks depend on both download completion and extract tools + โ€ข Build tasks have complex interdependencies (glibc before libstdc++) + """ + print("๐Ÿš€ ========================================================================") + print("๐Ÿš€ CLICE COMPILER TOOLCHAIN BUILD SYSTEM") + print("๐Ÿš€ ========================================================================") + print(f"๐Ÿ“ Sysroot Directory: {TOOLCHAIN.sysroot_dir}") + print(f"๐ŸŽฏ Target Architecture: {TOOLCHAIN.target_triplet} ({TOOLCHAIN.target_machine})") + print(f"๐Ÿ“‹ Components: glibc, Linux headers, libstdc++, LLVM (prepared)") + print("๐Ÿš€ ========================================================================\n") + + # ==================================================================== + # ๐Ÿ“‹ Build Task Registry + # ==================================================================== + # Each job represents an atomic build operation that can be executed + # independently once its dependencies are satisfied. + # ==================================================================== + + all_jobs: Dict[str, Job] = { + # ๐Ÿ“ฆ System Setup Tasks + "update_apt": Job("update_apt", update_apt), + "install_download_prerequisites": Job("install_download_prerequisites", install_download_prerequisites), + "install_extract_prerequisites": Job("install_extract_prerequisites", install_extract_prerequisites), + "install_build_prerequisites": Job("install_build_prerequisites", install_build_prerequisites), + + # ๐Ÿ“š GNU C Library (glibc) Pipeline + "download_glibc": Job("download_glibc", download_and_verify, (TOOLCHAIN.glibc,)), + "extract_glibc": Job("extract_glibc", extract_source, (TOOLCHAIN.glibc,)), + "build_and_install_glibc": Job("build_and_install_glibc", build_and_install_glibc, (TOOLCHAIN.glibc,)), + + # ๐Ÿง Linux Kernel Headers Pipeline + "download_linux": Job("download_linux", download_and_verify, (TOOLCHAIN.linux,)), + "extract_linux": Job("extract_linux", extract_source, (TOOLCHAIN.linux,)), + "install_linux_headers": Job("install_linux_headers", install_linux_headers, (TOOLCHAIN.linux,)), + + # ๐Ÿ› ๏ธ GCC C++ Standard Library Pipeline + "download_gcc": Job("download_gcc", download_and_verify, (TOOLCHAIN.gcc,)), + "extract_gcc": Job("extract_gcc", extract_source, (TOOLCHAIN.gcc,)), + "download_gcc_prerequisites": Job("download_gcc_prerequisites", download_gcc_prerequisites, (TOOLCHAIN.gcc,)), + "build_and_install_libstdcpp": Job("build_and_install_libstdcpp", build_and_install_libstdcpp, (TOOLCHAIN.gcc,)), + + # โšก LLVM Project Pipeline (prepared for future builds) + "download_llvm": Job("download_llvm", download_and_verify, (TOOLCHAIN.llvm,)), + "extract_llvm": Job("extract_llvm", extract_source, (TOOLCHAIN.llvm,)), + } + + # ==================================================================== + # ๐Ÿ”— Dependency Graph Definition + # ==================================================================== + # Defines the build order constraints. Each task lists its prerequisites + # that must complete before it can begin execution. + # ==================================================================== + + dependency_graph: Dict[str, Set[str]] = { + # ๐Ÿ“ฆ Setup Phase - Foundation tasks + "update_apt": set(), # No dependencies - can start immediately + "install_download_prerequisites": {"update_apt"}, + "install_extract_prerequisites": {"update_apt"}, + "install_build_prerequisites": {"update_apt"}, + + # ๐Ÿ“š glibc Build Pipeline + "download_glibc": {"install_download_prerequisites"}, + "extract_glibc": {"download_glibc", "install_extract_prerequisites"}, + "build_and_install_glibc": {"extract_glibc", "install_build_prerequisites"}, + + # ๐Ÿง Linux Headers Pipeline (can run parallel with glibc download/extract) + "download_linux": {"install_download_prerequisites"}, + "extract_linux": {"download_linux", "install_extract_prerequisites"}, + "install_linux_headers": {"extract_linux", "install_build_prerequisites"}, + + # ๐Ÿ› ๏ธ GCC Pipeline (requires glibc and kernel headers) + "download_gcc": {"install_download_prerequisites"}, + "extract_gcc": {"download_gcc", "install_extract_prerequisites"}, + "download_gcc_prerequisites": {"extract_gcc"}, + "build_and_install_libstdcpp": { + "download_gcc_prerequisites", # GCC math libraries ready + "build_and_install_glibc", # System library available + "install_linux_headers", # Kernel interfaces available + "install_build_prerequisites" # Build tools ready + }, + + # โšก LLVM Pipeline (prepared for future expansion) + "download_llvm": {"install_download_prerequisites"}, + "extract_llvm": {"download_llvm", "install_extract_prerequisites"} + } + + # ==================================================================== + # ๐Ÿš€ Launch Parallel Build System + # ==================================================================== + print(f"๐Ÿ“Š Initializing parallel scheduler with {len(all_jobs)} tasks...") + print(f"๐Ÿ”— Total dependencies: {sum(len(deps) for deps in dependency_graph.values())}") + print(f"โšก Maximum parallelism: {len([job for job, deps in dependency_graph.items() if not deps])} initial tasks\n") + + scheduler = ParallelTaskScheduler(all_jobs, dependency_graph) + scheduler.run() + + print("\n๐ŸŽ‰ ========================================================================") + print("๐ŸŽ‰ TOOLCHAIN BUILD COMPLETED SUCCESSFULLY!") + print("๐ŸŽ‰ ========================================================================") + print(f"โœ… All components built and installed to: {TOOLCHAIN.sysroot_dir}") + print("๐ŸŽ‰ ========================================================================") + +if __name__ == "__main__": + main() + +# Here's origin toolchain build bash, won't be updated, just for reference +# the only target is to build static link libstdc++, without full parallel build support + +# prerequests +""" +# aria2 is used for downloading files +# gawk bison are for glibc build +# bzip2 is for extracting tar.bz2 files when prepare gcc prerequisites +# rsync is required by linux kernel headers installation +apt install -y --no-install-recommends aria2 bzip2 rsync gawk bison +# gcc-9 for glibc build +# gcc-14 for libstdc++ build +apt install -y --no-install-recommends binutils gcc-9 libstdc++-9-dev gcc-14 g++-14 libstdc++-9-dev +update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-14 90 +update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-14 90 +update-alternatives --install /usr/bin/c++ c++ /usr/bin/g++-14 90 +update-alternatives --install /usr/bin/cc cc /usr/bin/gcc-14 90 +update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-9 80 +update-alternatives --install /usr/bin/cc cc /usr/bin/gcc-9 80 +""" + +# generic command +""" +export ORIGIN='$$ORIGIN' # to generate rpath relative to the binary location +export HOST=x86_64-linux-gnu # this is not essential, could be moved to python config +export TARGET=${HOST} # this is not essential, could be moved to python config +export PREFIX="${TOOLCHAIN_BUILD_ROOT}/sysroot/${HOST}/${TARGET}/glibc${versions["glibc"]}-libstdc++${versions["gcc"]}-linux${versions["linux"]}" # this is not essential, could be moved to python config +export ARCH=x86_64 # this is not essential, could be moved to python config +""" + +# build glibc +""" +# Attention: gcc version less than 10 required, or multiple definition __libc_use_alloca would be error +update-alternatives --set gcc /usr/bin/gcc-9 +update-alternatives --set cc /usr/bin/gcc-9 + +mkdir -p GLIBC_CONFIG.build_dir +cd GLIBC_CONFIG.build_dir + +../configure --host=${HOST} --prefix=${PREFIX}/usr --disable-werror --disable-lib32 --enable-lib64 +make -j +make install -j + +# This script is intended to be run after the glibc build process. +# Its purpose is to find and replace placeholder paths within the generated text-based +# files (like .la, .pc, etc.) located under the ${PREFIX} directory. +# This is a common post-build step to fix hardcoded paths from the build environment. + +# --- Configuration --- + +# 1. The root directory to search within. It's expected that the +# PREFIX environment variable is set by the build environment. +SEARCH_PATH="${PREFIX}" + +# --- Script Body --- + +# Check if the search path is valid +if [ -z "$SEARCH_PATH" ] || [ ! -d "$SEARCH_PATH" ]; then + echo "Error: SEARCH_PATH is not set or is not a valid directory: '$SEARCH_PATH'" + exit 1 +fi + +# Check if the 'file' command is available +if ! command -v file &> /dev/null; then + echo "Error: 'file' command not found. Please install it to proceed." + exit 1 +fi + +echo "Removing absolute paths from text ld scripts..." +echo "Starting search in: '$SEARCH_PATH'" +echo "========================================" + +# Find all files, then check each one to see if it's a text file containing the search string. +# Using -print0 and read -d '' handles filenames with spaces or special characters. +find "$SEARCH_PATH" -type f -print0 | while IFS= read -r -d '' file; do + + # Check if the file is a text file + MIME_TYPE=$(file -b --mime-type "$file") + if [[ "$MIME_TYPE" != text/* ]]; then + echo "--- Skipping binary file: $file (Type: $MIME_TYPE) ---" + continue + fi + + # Get the directory where the file is located. + REPLACEMENT_PATH=$(dirname "$file") + REPLACEMENT_PATH="${REPLACEMENT_PATH}/" + + # Check if the file actually contains the search string before processing + if ! grep -q "$REPLACEMENT_PATH" "$file"; then + continue + fi + + # It's a text file and contains the string, so process it. + echo -e "\n--- Processing text file: $file ---" + + + + # Use grep to show where the changes will happen. + echo " Matches found on lines:" + grep -n "$REPLACEMENT_PATH" "$file" | sed 's/^/ /g' + + echo " Deleting '$REPLACEMENT_PATH'" + + # Perform the replacement in-place using sed. + # The delimiter `|` is used to avoid conflicts if paths contain `/`. + sed -i "s|$REPLACEMENT_PATH||g" "$file" + +done + +echo "========================================" +echo "Path replacement process finished." +""" + +# build linux kernel headers(parallel with glibc build) +""" +export LINUX_SRC_URL="https://github.com/torvalds/linux/archive/refs/tags/v${versions["linux"]}.zip" +git clone https://github.com/torvalds/linux.git --depth=1 LINUX_CONFIG.src_dir # should replace with download and extract using LINUX_SRC_URL +cd LINUX_CONFIG.src_dir +make ARCH=x86_64 INSTALL_HDR_PATH=${PREFIX}/usr -j headers_install +""" + +# build libstdc++(requires glibc built and kernel headers installed) +""" +# Download prerequisites for GCC +cd GCC_CONFIG.src_dir +contrib/download_prerequisites + +# build libstdc++ +# libstdc++ could not be built separately, so we build the whole GCC but only install libstdc++ +update-alternatives --set gcc /usr/bin/gcc-14 +update-alternatives --set g++ /usr/bin/g++-14 +update-alternatives --set cc /usr/bin/gcc-14 +update-alternatives --set c++ /usr/bin/g++-14 + +mkdir -p GCC_CONFIG.build_dir +cd GCC_CONFIG.build_dir + +../configure \ + --host=${TARGET} \ + --target=${TARGET} \ + --prefix=${PREFIX}/usr \ + --with-sysroot=${PREFIX} \ + --with-glibc-version=${versions["glibc"]} \ + --disable-werror \ + --disable-multilib \ + --disable-shared \ + --disable-bootstrap \ + --enable-languages=c,c++ \ + --enable-threads \ + --enable-lto \ + --enable-nls + +make -j all-target-libgcc all-target-libstdc++-v3 all-target-libsanitizer all-target-libatomic all-target-libbacktrace all-target-libgomp all-target-libquadmath +make -j install-target-libgcc install-target-libstdc++-v3 install-target-libsanitizer install-target-libatomic install-target-libbacktrace install-target-libgomp install-target-libquadmath +""" diff --git a/docker/linux/utility/build_utils.py b/docker/linux/utility/build_utils.py new file mode 100644 index 00000000..0a11610f --- /dev/null +++ b/docker/linux/utility/build_utils.py @@ -0,0 +1,466 @@ +import shutil +import sys +import os +import tarfile + +from config.build_config import TOOLCHAIN_VERSIONS, Component + +# Add project root to the Python path to allow importing 'config' module +project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) +if project_root not in sys.path: + sys.path.insert(0, project_root) + +import subprocess +import hashlib +import concurrent.futures +import time +import traceback +import json +from typing import Dict, Set, Tuple, Optional, List, Callable +from graphlib import TopologicalSorter +from collections import defaultdict +from enum import Enum + +def download_file(url: str, dest: str) -> None: + """ + Downloads a file from a URL to a destination using aria2c for acceleration. + Skips the download if the destination file already exists. + Disables SSL verification to work behind corporate proxies. + """ + if os.path.exists(dest): + print(f"File {os.path.basename(dest)} already exists. Skipping download.") + return + + dest_dir = os.path.dirname(dest) + dest_name = os.path.basename(dest) + + print(f"Downloading {url} to {dest} (SSL verification disabled)...") + + command = [ + "aria2c", + "--continue=true", + "--split=8", + "--max-connection-per-server=8", + "--min-split-size=1M", + "--check-certificate=false", # Corresponds to verify=False + f'--dir="{dest_dir}"', + f'--out="{dest_name}"', + f'"{url}"' + ] + + run_command(" ".join(command)) + print("Download complete.") + +def run_command(command: str, cwd: str = os.getcwd(), env: Dict[str, str] = {}) -> None: + """ + Executes a shell command, directing its output to the current shell. + Sets DEBIAN_FRONTEND to noninteractive to prevent interactive prompts. + """ + print(f"--- Running command: {{{command}}} in {cwd or os.getcwd()} ---") + + # Setup environment + process_env = os.environ.copy() + process_env["DEBIAN_FRONTEND"] = "noninteractive" + if env: + process_env.update(env) + + # By not setting stdout/stderr, they are inherited from the parent process, + # which means the output will go directly to the user's terminal. + process = subprocess.Popen( + command, + shell=True, + cwd=cwd, + env=process_env, + executable="/bin/bash" + ) + + process.wait() + if process.returncode != 0: + raise subprocess.CalledProcessError(process.returncode, command) + +def verify_signature(signature_path: str, data_path: str) -> None: + """ + Simplified signature verification: only checks if files exist. + Skips actual signature verification when GPG environment is not available. + + Args: + signature_path: Path to the .asc signature file. + data_path: Path to the signed data file. + """ + print(f"--- Skipping signature verification for {os.path.basename(data_path)} (GPG not available) ---") + + if not os.path.exists(data_path): + raise RuntimeError(f"Data file {data_path} does not exist") + + if os.path.exists(signature_path): + print(f"Signature file found: {os.path.basename(signature_path)}") + else: + print(f"No signature file found: {os.path.basename(signature_path)}") + + print(f"File verification completed for {os.path.basename(data_path)}") + +def verify_sha256(file_path: str, expected_hash: str) -> bool: + """Verifies the SHA256 checksum of a file.""" + print(f"Verifying SHA256 for {file_path}...") + sha256 = hashlib.sha256() + with open(file_path, "rb") as f: + for chunk in iter(lambda: f.read(4096), b""): + sha256.update(chunk) + actual_hash = sha256.hexdigest() + + if actual_hash.lower() == expected_hash.lower(): + print("SHA256 verification successful.") + return True + else: + print(f"SHA256 verification failed! Expected {expected_hash}, got {actual_hash}") + return False + + +# === Parallel Task Scheduler Classes === + +class Job: + """Represents a single unit of work in the build process.""" + def __init__(self, name: str, func: Callable, args: Tuple = ()): + self.name = name + self.func = func + self.args = args + + def __repr__(self): + return f"Job(name='{self.name}')" + + +class TaskState(Enum): + """Task execution states for better tracking.""" + PENDING = "pending" + READY = "ready" + RUNNING = "running" + COMPLETED = "completed" + FAILED = "failed" + + +def run_job(job: Job): + """Executor function to run a job.""" + print(f"--- Starting Job: {job.name} ---") + job.func(*job.args) + print(f"--- Finished Job: {job.name} ---") + return job.name + + +class ParallelTaskScheduler: + """ + ๐Ÿš€ High-Performance Parallel Task Scheduler + + Features: + - Optimal parallel execution with minimal overhead + - Real-time dependency resolution + - Comprehensive progress tracking + - Robust error handling and recovery + - Efficient resource utilization + """ + + def __init__(self, jobs: Dict[str, Job], dependencies: Dict[str, Set[str]]): + self.jobs = jobs + self.dependencies = dependencies + self.task_states = {name: TaskState.PENDING for name in jobs} + self.running_futures = {} # future -> job_name mapping + self.completed_jobs = set() + self.failed_jobs = set() + + # Performance tracking + self.start_time = None + self.job_start_times = {} + self.job_durations = {} + + # Initialize dependency sorter + self.sorter = TopologicalSorter(dependencies) + self.sorter.prepare() + + # Reverse dependency mapping for efficient lookups + self.dependents = defaultdict(set) + for job, deps in dependencies.items(): + for dep in deps: + self.dependents[dep].add(job) + + def _get_ready_jobs(self) -> List[str]: + """Get all jobs that are ready to run (dependencies satisfied).""" + ready_jobs = [] + for job_name in self.sorter.get_ready(): + if self.task_states[job_name] == TaskState.PENDING: + ready_jobs.append(job_name) + return ready_jobs + + def _submit_job(self, executor, job_name: str): + """Submit a job for execution.""" + job = self.jobs[job_name] + self.task_states[job_name] = TaskState.RUNNING + self.job_start_times[job_name] = time.time() + + print(f"๐Ÿš€ [Scheduler] Starting job: {job_name}") + future = executor.submit(run_job, job) + self.running_futures[future] = job_name + return future + + def _handle_completed_job(self, job_name: str, success: bool = True): + """Handle job completion and update states.""" + duration = time.time() - self.job_start_times[job_name] + self.job_durations[job_name] = duration + + if success: + self.task_states[job_name] = TaskState.COMPLETED + self.completed_jobs.add(job_name) + self.sorter.done(job_name) + print(f"โœ… [Scheduler] Job '{job_name}' completed successfully in {duration:.2f}s") + else: + self.task_states[job_name] = TaskState.FAILED + self.failed_jobs.add(job_name) + print(f"โŒ [Scheduler] Job '{job_name}' failed after {duration:.2f}s") + + def _print_progress(self): + """Print current execution progress.""" + total = len(self.jobs) + completed = len(self.completed_jobs) + running = len(self.running_futures) + failed = len(self.failed_jobs) + pending = total - completed - running - failed + + elapsed = time.time() - self.start_time if self.start_time else 0 + + print(f"\n๐Ÿ“Š [Progress] Total: {total} | โœ… Done: {completed} | ๐Ÿƒ Running: {running} | โณ Pending: {pending} | โŒ Failed: {failed}") + print(f"โฑ๏ธ [Time] Elapsed: {elapsed:.1f}s | Running jobs: {list(self.running_futures.values())}") + + if completed > 0 and elapsed > 0: + rate = completed / elapsed + eta = (total - completed) / rate if rate > 0 else 0 + print(f"๐Ÿ“ˆ [Stats] Rate: {rate:.2f} jobs/s | ETA: {eta:.1f}s") + + def run(self, max_workers: Optional[int] = None): + """ + Execute all jobs with optimal parallel scheduling. + + Args: + max_workers: Maximum number of parallel workers (default: CPU count) + """ + print("๐ŸŽฏ [Scheduler] Initializing High-Performance Parallel Task Scheduler") + print(f"๐Ÿ“‹ [Scheduler] Total jobs: {len(self.jobs)}") + print(f"๐Ÿ”— [Scheduler] Total dependencies: {sum(len(deps) for deps in self.dependencies.values())}") + + self.start_time = time.time() + + with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor: + # Submit initial ready jobs + ready_jobs = self._get_ready_jobs() + print(f"๐Ÿšฆ [Scheduler] Initial ready jobs: {ready_jobs}") + + for job_name in ready_jobs: + self._submit_job(executor, job_name) + + # Main execution loop + while self.running_futures: + self._print_progress() + + # Wait for at least one job to complete + done_futures, _ = concurrent.futures.wait( + self.running_futures.keys(), + return_when=concurrent.futures.FIRST_COMPLETED + ) + + # Process all completed jobs in this batch + newly_completed = [] + for future in done_futures: + job_name = self.running_futures[future] + + try: + result = future.result() # This will raise exception if job failed + self._handle_completed_job(job_name, success=True) + newly_completed.append(job_name) + except Exception as e: + print(f"๐Ÿ’ฅ [Scheduler] Job '{job_name}' failed with detailed error:") + self._handle_completed_job(job_name, success=False) + + # Implement fail-fast: cancel all running jobs and exit immediately + print(f"๐Ÿ›‘ [Scheduler] FAIL-FAST: Cancelling all remaining jobs due to failure in '{job_name}'") + for remaining_future in self.running_futures.keys(): + if remaining_future != future: + remaining_future.cancel() + remaining_job = self.running_futures[remaining_future] + print(f"โŒ [Scheduler] Cancelled job: {remaining_job}") + + # Clean up and raise the error immediately + raise RuntimeError(f"โŒ Build failed in job '{job_name}': {str(e)}") from e + + # Clean up completed future + del self.running_futures[future] + + # Submit any newly ready jobs + if newly_completed: + ready_jobs = self._get_ready_jobs() + for job_name in ready_jobs: + if job_name not in self.running_futures.values(): + self._submit_job(executor, job_name) + + # Final results + total_time = time.time() - self.start_time + self._print_final_report(total_time) + + # Note: With fail-fast implementation, we won't reach here if any job failed + # The exception will be raised immediately when the first job fails + + def _print_final_report(self, total_time: float): + """Print comprehensive execution report.""" + print("\n" + "="*60) + print("๐ŸŽ‰ PARALLEL TASK EXECUTION COMPLETED!") + print("="*60) + + print(f"โฑ๏ธ Total execution time: {total_time:.2f}s") + print(f"โœ… Successfully completed: {len(self.completed_jobs)}/{len(self.jobs)} jobs") + + if self.failed_jobs: + print(f"โŒ Failed jobs: {len(self.failed_jobs)}") + for job in self.failed_jobs: + print(f" - {job}") + + # Show job timing analysis + if self.job_durations: + print(f"\n๐Ÿ“Š Job Performance Analysis:") + sorted_jobs = sorted(self.job_durations.items(), key=lambda x: x[1], reverse=True) + print(f" Slowest jobs:") + for job, duration in sorted_jobs[:5]: + print(f" - {job:<30} {duration:>8.2f}s") + + avg_duration = sum(self.job_durations.values()) / len(self.job_durations) + print(f" Average job duration: {avg_duration:.2f}s") + + # Calculate theoretical sequential time vs actual parallel time + sequential_time = sum(self.job_durations.values()) + speedup = sequential_time / total_time if total_time > 0 else 1 + efficiency = speedup / max(len(self.running_futures), 1) * 100 + + print(f" Sequential time would be: {sequential_time:.2f}s") + print(f" Parallel speedup: {speedup:.2f}x") + print(f" Parallel efficiency: {efficiency:.1f}%") + + print("="*60) + + +# ======================================================================== +# ๐Ÿ› ๏ธ Component Build Utilities +# ======================================================================== +# Generic functions for component-based building +# ======================================================================== + +def install_download_prerequisites(component: Component): + """ + โฌ‡๏ธ Install Download Stage Prerequisites + + Installs essential tools required for fetching source code archives: + โ€ข aria2c - High-speed multi-connection downloader + โ€ข gnupg - GPG signature verification system + + These tools enable secure, accelerated downloading of toolchain sources. + """ + print("โฌ‡๏ธ [SETUP] Installing download prerequisites (aria2c, gnupg)...") + download_prerequisites = component.download_prerequisites + pkg_list = " ".join(download_prerequisites) + run_command(f"apt install -y --no-install-recommends -o APT::Keep-Downloaded-Packages=true {pkg_list}") + print("โœ… [SETUP] Download tools ready") + +def install_extract_prerequisites(component: Component): + """ + ๐Ÿ“‚ Install Archive Extraction Prerequisites + + Installs compression tools needed for extracting various archive formats: + โ€ข bzip2 - Required for GCC prerequisite archives (.tar.bz2) + + Different toolchain components use different compression formats, + so we ensure all extraction tools are available. + """ + print("๐Ÿ“‚ [SETUP] Installing archive extraction tools...") + extract_prerequisites = component.extract_prerequisites + pkg_list = " ".join(extract_prerequisites) + run_command(f"apt install -y --no-install-recommends -o APT::Keep-Downloaded-Packages=true {pkg_list}") + print("โœ… [SETUP] Extraction tools ready") + + +def download_and_verify(component): + """ + โฌ‡๏ธ Download and Verify Component Source + + Downloads the source tarball for a specified toolchain component and + verifies its authenticity using GPG signatures when available. + + The process includes: + 1. Create organized download directory structure + 2. Download source archive using high-speed aria2c + 3. Download GPG signature file (if available) + 4. Verify archive integrity and authenticity + 5. Clean up on verification failure + + Args: + component: Component instance (glibc, gcc, llvm, or linux) + + Raises: + Exception: If download fails or signature verification fails + """ + version = component.version + print(f"โฌ‡๏ธ [DOWNLOAD] Fetching {component.name} v{version}...") + + # Ensure directories exist + os.makedirs(component.cache_dir, exist_ok=True) + + # Construct download paths and URLs + tarball_name = component.tarball_name + tarball_path = os.path.join(component.cache_dir, tarball_name) + tarball_url = component.tarball_url + + # Download main source archive + download_file(tarball_url, tarball_path) + + # Handle GPG signature verification when available + if component.verification_name_pattern: + signature_name = component.verification_name + signature_path = os.path.join(component.cache_dir, signature_name) + signature_url = component.verification_url + try: + print(f"๐Ÿ” [VERIFY] Downloading signature for {component.name}...") + download_file(signature_url, signature_path) + verify_signature(signature_path, tarball_path) + print(f"โœ… [VERIFY] {component.name} signature verified") + except Exception as e: + print(f"โŒ [ERROR] Signature verification failed for {component.name}: {e}", file=sys.stderr) + shutil.rmtree(component.cache_dir, ignore_errors=True) + raise + else: + print(f"โš ๏ธ [INFO] No signature verification available for {component.name}") + +def extract_source(component): + """ + ๐Ÿ“‚ Extract Component Source Archive + + Extracts the downloaded source tarball to the appropriate directory + structure, automatically detecting compression format. + + Supports multiple archive formats: + โ€ข .tar.xz (LZMA compression) - Used by most GNU projects + โ€ข .tar.gz (Gzip compression) - Used by Linux kernel + + Args: + component: Component instance (glibc, gcc, llvm, or linux) + """ + version = component.version + print(f"๐Ÿ“‚ [EXTRACT] Unpacking {component.name} v{version}...") + + # Ensure extraction directory exists + os.makedirs(component.src_dir, exist_ok=True) + + # Determine archive location and format + tarball_name = component.tarball_name_pattern.format(version=version) + tarball_path = os.path.join(component.cache_dir, tarball_name) + + print(f" ๐Ÿ“ Source: {tarball_path}") + print(f" ๐Ÿ“ Target: {component.extracted_dir}") + + # Auto-detect compression format and extract + mode = "r:xz" if tarball_path.endswith(".tar.xz") else "r:gz" + with tarfile.open(tarball_path, mode) as tar: + tar.extractall(path=component.extracted_dir, filter='data') + print(f"โœ… [EXTRACT] {component.name} extraction complete") diff --git a/docker/linux/utility/common.sh b/docker/linux/utility/common.sh new file mode 100644 index 00000000..e69de29b diff --git a/docker/linux/utility/create_release_package.py b/docker/linux/utility/create_release_package.py new file mode 100644 index 00000000..1986ebfb --- /dev/null +++ b/docker/linux/utility/create_release_package.py @@ -0,0 +1,251 @@ +#!/usr/bin/env python3 +""" +๐Ÿ“ฆ Clice Release Package Creator - Stage 3 + +This script handles the final packaging stage of the multi-stage Docker build. +It merges the outputs from Stage 1 (toolchain) and Stage 2 (dependencies), +creates a comprehensive manifest, and packages everything into a single +compressed archive for the release image. + +Components Merged: + โ€ข Custom compiler toolchain from Stage 1 + โ€ข Development dependencies from Stage 2 + โ€ข Combined dependency manifest + โ€ข Final compressed release package + +The script ensures all components from both stages are properly combined +and packaged for efficient Docker layer caching and distribution. +""" + +import os +import sys +import tarfile +import json + +# Add project root to Python path +project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) +if project_root not in sys.path: + sys.path.insert(0, project_root) + +from config.build_config import ( + PACKED_RELEASE_PACKAGE_PATH, + RELEASE_PACKAGE_DIR, + ALL_COMPONENTS, + # Component instances for structured access + TOOLCHAIN +) + +# Import build utilities for parallel execution +from build_utils import ( + Job, + ParallelTaskScheduler +) + +# ======================================================================== +# ๐Ÿ“‹ Manifest Creation Functions +# ======================================================================== + +def create_comprehensive_manifest(): + """ + ๐Ÿ“‹ Create Comprehensive Release Manifest + + Creates a detailed manifest of all components based on ALL_COMPONENTS + configuration. Analyzes actual package directories and creates a + comprehensive overview of the release package contents. + """ + print("๐Ÿ“‹ Creating comprehensive release manifest based on ALL_COMPONENTS...") + + # Create base manifest structure + manifest = { + "release_info": { + "created_at": os.stat(RELEASE_PACKAGE_DIR).st_ctime if os.path.exists(RELEASE_PACKAGE_DIR) else None, + "stage": "final_release", + "version": "1.0.0" + }, + "components": {}, + "summary": { + "total_components": 0, + "available_components": 0, + "total_files": 0, + "total_size_mb": 0.0 + } + } + + # Process each component from ALL_COMPONENTS + for component in ALL_COMPONENTS: + component_dir = os.path.join(RELEASE_PACKAGE_DIR, component.name) + + component_info = { + "name": component.name, + "type": component.__class__.__name__, + "version": getattr(component, 'version', 'unknown'), + "file_count": 0, + "size_mb": 0.0 + } + + # Calculate component statistics + file_count = sum(len(files) for _, _, files in os.walk(component_dir)) + dir_size = sum( + os.path.getsize(os.path.join(dirpath, filename)) + for dirpath, _, filenames in os.walk(component_dir) + for filename in filenames + ) / (1024 * 1024) # Convert to MB + + component_info["file_count"] = file_count + component_info["size_mb"] = round(dir_size, 2) + + # Component-specific details + match component.name: + case "apt": + # Count APT packages + apt_packages = [] + for file in os.listdir(component_dir): + if file.endswith('.deb'): + pkg_name = file.split('_')[0] + if pkg_name not in apt_packages: + apt_packages.append(pkg_name) + component_info["packages"] = sorted(apt_packages) + component_info["package_count"] = len(apt_packages) + + case "python": + # Count Python packages + python_packages = [] + for file in os.listdir(component_dir): + if file.endswith('.whl'): + pkg_name = file.split('-')[0] + if pkg_name not in python_packages: + python_packages.append(pkg_name) + component_info["packages"] = sorted(python_packages) + component_info["package_count"] = len(python_packages) + + case "toolchain": + # Toolchain specific information + component_info["toolchain_details"] = { + "glibc_version": TOOLCHAIN.glibc.version, + "gcc_version": TOOLCHAIN.gcc.version, + "linux_version": TOOLCHAIN.linux.version, + "llvm_version": TOOLCHAIN.llvm.version, + } + + manifest["components"][component.name] = component_info + manifest["summary"]["total_components"] += 1 + manifest["summary"]["available_components"] += 1 + manifest["summary"]["total_files"] += component_info["file_count"] + manifest["summary"]["total_size_mb"] += component_info["size_mb"] + + # Round summary size + manifest["summary"]["total_size_mb"] = round(manifest["summary"]["total_size_mb"], 2) + + # Write manifest to release directory + manifest_file = os.path.join(RELEASE_PACKAGE_DIR, "manifest.json") + os.makedirs(RELEASE_PACKAGE_DIR, exist_ok=True) + with open(manifest_file, 'w') as f: + json.dump(manifest, f, indent=2) + + print(f"โœ… Comprehensive manifest created: {manifest_file}") + print(f"๐Ÿ“Š Components: {manifest['summary']['available_components']}/{manifest['summary']['total_components']} available") + print(f"๐Ÿ“ Total files: {manifest['summary']['total_files']}") + print(f"๐Ÿ“ฆ Total size: {manifest['summary']['total_size_mb']} MB") + +def create_final_release_package(): + """ + ๐Ÿ“ฆ Create Final Release Package + + Creates the final compressed archive containing all components from + both build stages. Uses maximum XZ compression for minimal size. + + The package contains: + - Custom compiler toolchain (Stage 1) + - Development dependencies (Stage 2) + - Comprehensive release manifest + - All directory structures preserved + """ + print("๐Ÿ“ฆ Creating final release package with maximum XZ compression...") + + if not os.path.exists(RELEASE_PACKAGE_DIR): + print("โš ๏ธ No release package directory found") + return + + # Ensure parent directory exists for the packed file + packed_dir = os.path.dirname(PACKED_RELEASE_PACKAGE_PATH) + os.makedirs(packed_dir, exist_ok=True) + + # Create archive with maximum XZ compression + print(f" ๐Ÿ“ Source: {RELEASE_PACKAGE_DIR}") + print(f" ๐Ÿ“ Target: {PACKED_RELEASE_PACKAGE_PATH}") + + try: + with tarfile.open(PACKED_RELEASE_PACKAGE_PATH, 'w:xz', preset=9) as tar: + # Add all subdirectories and files, preserving original directory structure + for item in os.listdir(RELEASE_PACKAGE_DIR): + item_path = os.path.join(RELEASE_PACKAGE_DIR, item) + print(f" ๐Ÿ“ฆ Adding: {item}") + tar.add(item_path, arcname=item) + + # Report package statistics + package_size_mb = os.path.getsize(PACKED_RELEASE_PACKAGE_PATH) / (1024 * 1024) + + # Calculate source directory size for compression ratio + source_size_mb = sum( + os.path.getsize(os.path.join(dirpath, filename)) + for dirpath, _, filenames in os.walk(RELEASE_PACKAGE_DIR) + for filename in filenames + ) / (1024 * 1024) + + compression_ratio = (source_size_mb - package_size_mb) / source_size_mb * 100 if source_size_mb > 0 else 0 + + print(f"โœ… Final release package created: {PACKED_RELEASE_PACKAGE_PATH}") + print(f"๐Ÿ“Š Source size: {source_size_mb:.1f} MB") + print(f"๐Ÿ“Š Package size: {package_size_mb:.1f} MB") + print(f"๐Ÿ“Š Compression ratio: {compression_ratio:.1f}%") + + except Exception as e: + print(f"โŒ Failed to create release package: {e}") + raise + +# ======================================================================== +# ๐Ÿš€ Main Execution +# ======================================================================== + +def main(): + """ + ๐Ÿš€ Main Stage 3 Execution + + Orchestrates the final packaging stage using parallel task execution: + 1. Verify stage outputs are present (Stage 1 & 2 already merged by Docker COPY) + 2. Create comprehensive manifest based on ALL_COMPONENTS + 3. Package everything into final release archive + + This creates the complete release package ready for deployment. + """ + print("๐Ÿš€ ========================================================================") + print("๐Ÿš€ CLICE RELEASE PACKAGE CREATOR - STAGE 3") + print("๐Ÿš€ ========================================================================") + print("๐Ÿ“ฆ Creating final release package from merged stage outputs") + print("๐Ÿš€ ========================================================================\n") + + # Define packaging jobs with proper dependency management + jobs = { + "create_manifest": Job("create_manifest", create_comprehensive_manifest, ()), + "create_package": Job("create_package", create_final_release_package, ()), + } + + # Define dependencies - package creation depends on manifest + dependencies = { + "create_manifest": set(), + "create_package": {"create_manifest"}, + } + + # Execute packaging tasks in parallel where possible + scheduler = ParallelTaskScheduler(jobs, dependencies) + scheduler.run() + + print("\n๐ŸŽ‰ ========================================================================") + print("๐ŸŽ‰ STAGE 3 COMPLETED SUCCESSFULLY!") + print("๐ŸŽ‰ ========================================================================") + print(f"โœ… Final release package: {PACKED_RELEASE_PACKAGE_PATH}") + print(f"โœ… Manifest: {RELEASE_PACKAGE_DIR}/manifest.json") + print("๐ŸŽ‰ ========================================================================") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/docker/linux/utility/download_dependencies.py b/docker/linux/utility/download_dependencies.py new file mode 100644 index 00000000..c4f8876b --- /dev/null +++ b/docker/linux/utility/download_dependencies.py @@ -0,0 +1,372 @@ +# ======================================================================== +# ๐Ÿ“ฆ Clice Dependencies Downloader +# ======================================================================== +# File: docker/linux/utility/download_dependencies.py +# Purpose: Download all dev-container dependencies without installing them +# +# This module downloads all required packages, tools, and dependencies +# without installing them, maximizing Docker build cache efficiency. +# Downloaded packages are stored in cache directories for later installation. +# ======================================================================== + +""" +๐Ÿš€ Clice Dependencies Downloader + +Downloads all required development dependencies for the Clice dev container +without installing them. This approach maximizes Docker build cache efficiency +by separating the download phase from the installation phase. + +Components Downloaded: + โ€ข APT packages for development tools + โ€ข CMake binary releases + โ€ข XMake installation packages + โ€ข Python packages and dependencies + โ€ข LLVM prebuilt binaries (if available) + +The downloaded packages are stored in structured cache directories that +can be efficiently copied and cached by Docker's build system. +""" + +import os +import shutil +import sys +import subprocess +from typing import List + +# Add project root to Python path +project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) +if project_root not in sys.path: + sys.path.insert(0, project_root) + +from build_utils import ( + Job, + ParallelTaskScheduler, + download_file, + run_command, + verify_sha256 +) +from config.build_config import ( + RELEASE_PACKAGE_DIR, + PYPROJECT_PATH, + # Component instances for structured access + APT, UV, CMAKE, XMAKE +) + +# ======================================================================== +# ๐Ÿ› ๏ธ Download Task Implementations +# ======================================================================== + +def install_download_prerequisites() -> None: + """Install prerequisites required for downloading dependencies.""" + print("๐Ÿ“ฆ Installing dependencies download prerequisites...") + + # Update package lists first + run_command("apt update") + + # Install all download prerequisites (universal + APT-specific) + download_prerequisites = APT.download_prerequisites + run_command(f"apt install -y --no-install-recommends -o APT::Keep-Downloaded-Packages=true {' '.join(download_prerequisites)}") + + print(f"โœ… Installed {len(download_prerequisites)} download prerequisites") + +def get_apt_package_list(base_packages: List[str]) -> List[str]: + """ + Get all required APT packages using the exact StackOverflow command pattern. + + Uses: apt-cache depends --recurse ... | awk '$1 ~ /^Depends:/{print $2}' + Returns: Deduplicated list of packages to download + """ + print("๐Ÿ” Resolving recursive dependencies using StackOverflow command pattern...") + + all_packages = set() + + for package in base_packages: + try: + # Use the exact command from StackOverflow + apt_cache_cmd = [ + "apt-cache", "depends", "--recurse", "--no-recommends", + "--no-suggests", "--no-conflicts", "--no-breaks", + "--no-replaces", "--no-enhances", package + ] + + # Run apt-cache depends command + result = subprocess.run(apt_cache_cmd, capture_output=True, text=True, check=True) + + # Use awk pattern to extract dependency packages: $1 ~ /^Depends:/{print $2} + for line in result.stdout.split('\n'): + line = line.strip() + if line.startswith('Depends:'): + # Extract the package name after "Depends: " + parts = line.split() + if len(parts) >= 2: + pkg_name = parts[1] + # Remove architecture suffix and version constraints + pkg_name = pkg_name.split(':')[0].split('(')[0].split('[')[0].strip() + if pkg_name and not pkg_name.startswith('<') and pkg_name != '|': + all_packages.add(pkg_name) + + except subprocess.CalledProcessError as e: + print(f"โš ๏ธ Warning: Could not resolve dependencies for {package}: {e}") + # Add the original package as fallback + all_packages.add(package) + + # Filter available packages (remove virtual/unavailable packages) + print(f"๐Ÿ” Found {len(all_packages)} total dependency packages, filtering available ones...") + available_packages = [] + + for package in sorted(all_packages): + try: + # Quick availability check + result = subprocess.run( + ["apt-cache", "show", package], + capture_output=True, text=True, check=True + ) + if result.returncode == 0: + available_packages.append(package) + except subprocess.CalledProcessError: + # Skip unavailable packages + continue + + print(f"๐Ÿ“‹ Final package list: {len(available_packages)} available packages") + return available_packages + +def download_apt_packages() -> None: + """ + Download all APT packages using the exact StackOverflow command pattern. + + Two-stage approach: + 1. Get package list using apt-cache depends + awk pattern + 2. Download packages using apt-get download + """ + print("๐Ÿ“ฆ Downloading APT packages with StackOverflow command pattern...") + + # Create both download cache and package directories using component structure + os.makedirs(APT.cache_dir, exist_ok=True) + os.makedirs(APT.package_dir, exist_ok=True) + + # Stage 1: Get package list + base_packages = list(set(APT.all_packages)) + print(f"๐Ÿ“‹ Base packages from config: {len(base_packages)} packages") + + available_packages = get_apt_package_list(base_packages) + + # Stage 2: Download packages using apt-get download + print(f"๐Ÿ“ฅ Downloading {len(available_packages)} packages to cache: {APT.cache_dir}") + + + # Use the exact pattern: apt-get download $(package_list) + # Split into batches to avoid command line length limits + batch_size = 50 + downloaded_count = 0 + + for i in range(0, len(available_packages), batch_size): + batch = available_packages[i:i + batch_size] + + print(f"๐Ÿ“ฆ Downloading batch {i//batch_size + 1}/{(len(available_packages) + batch_size - 1)//batch_size} ({len(batch)} packages)...") + + try: + # Run apt-get download for this batch + result = subprocess.run( + ["apt-get", "download"] + batch, + cwd=APT.cache_dir, + capture_output=True, + text=True, + check=True + ) + downloaded_count += len(batch) + + except subprocess.CalledProcessError as e: + print(f"โš ๏ธ Batch download failed, trying individual packages...") + # Fallback: download packages individually + for package in batch: + try: + subprocess.run( + ["apt-get", "download", package], + cwd=APT.cache_dir, + capture_output=True, + text=True, + check=True + ) + downloaded_count += 1 + except subprocess.CalledProcessError: + print(f"โš ๏ธ Failed to download {package}") + + print(f"โœ… Downloaded {downloaded_count} packages to cache") + + # Copy packages from cache to package directory (only available_packages) + print("๐Ÿ“ฆ Copying packages from cache to package directory...") + copied_count = 0 + + # Create a set of expected package prefixes for efficient lookup + package_prefixes = set() + for pkg in available_packages: + package_prefixes.add(pkg + "_") + + for file in os.listdir(APT.cache_dir): + if file.endswith('.deb'): + # Check if this .deb file corresponds to one of our available packages + file_matches = any(file.startswith(prefix) for prefix in package_prefixes) + if file_matches: + src = os.path.join(APT.cache_dir, file) + dst = os.path.join(APT.package_dir, file) + shutil.copy2(src, dst) + copied_count += 1 + + print(f"โœ… APT packages ready in {APT.package_dir}") + print(f"๐Ÿ“Š Copied {copied_count} packages from cache") + print(f"๐Ÿ“ Cache directory: {APT.cache_dir} (preserved for future builds)") + +def download_cmake() -> None: + """Download CMake binary release with verification files and verify integrity.""" + print("๐Ÿ”ง Downloading CMake with verification...") + + # Create both cache and package directories using component structure + os.makedirs(CMAKE.cache_dir, exist_ok=True) + os.makedirs(CMAKE.package_dir, exist_ok=True) + + # Use CMake component configuration + cmake_filename = CMAKE.tarball_name + cmake_url = CMAKE.tarball_url + + # Download to cache directory first + cmake_cache_file = f"{CMAKE.cache_dir}/{cmake_filename}" + cmake_package_file = f"{CMAKE.package_dir}/{cmake_filename}" + + # Download CMake installer (.sh script) to cache + download_file(cmake_url, cmake_cache_file) + + # Download verification files to cache using component structure + sha_url = CMAKE.verification_url + sha_filename = CMAKE.verification_name + sha_cache_file = f"{CMAKE.cache_dir}/{sha_filename}" + + # Download SHA file for integrity verification + download_file(sha_url, sha_cache_file) + + # Verify CMake file integrity using build_utils + with open(sha_cache_file, 'r') as f: + sha_content = f.read().strip() + # Parse SHA file format: "hash filename" + for line in sha_content.split('\n'): + if cmake_filename in line: + expected_hash = line.split()[0] + if verify_sha256(cmake_cache_file, expected_hash): + print("โœ… CMake file integrity verification successful") + else: + print("โŒ CMake file integrity verification failed") + # Delete all files in cache directory on verification failure + shutil.rmtree(CMAKE.cache_dir, ignore_errors=True) + raise RuntimeError("CMake file integrity verification failed") + break + else: + print("โš ๏ธ CMake file not found in SHA file, skipping verification") + + # Copy verified file from cache to package directory + shutil.copy2(cmake_cache_file, cmake_package_file) + + print(f"โœ… CMake downloaded to cache: {cmake_cache_file}") + print(f"๐Ÿ“ฆ CMake copied to package: {cmake_package_file}") + +def download_xmake() -> None: + """Download XMake bundle for direct installation.""" + print("๐Ÿ”จ Downloading XMake bundle...") + + # Create both cache and package directories using component structure + os.makedirs(XMAKE.cache_dir, exist_ok=True) + os.makedirs(XMAKE.package_dir, exist_ok=True) + + # Use XMake component configuration + xmake_filename = XMAKE.tarball_name + xmake_url = XMAKE.tarball_url + + # Download to cache directory first + xmake_cache_file = f"{XMAKE.cache_dir}/{xmake_filename}" + xmake_package_file = f"{XMAKE.package_dir}/{xmake_filename}" + + # Download XMake bundle to cache + download_file(xmake_url, xmake_cache_file) + + # Make it executable in cache + os.chmod(xmake_cache_file, 0o755) + + # Copy from cache to package directory + shutil.copy2(xmake_cache_file, xmake_package_file) + + print(f"โœ… XMake downloaded to cache: {xmake_cache_file}") + print(f"๐Ÿ“ฆ XMake copied to package: {xmake_package_file}") + +def download_python_packages() -> None: + """Download Python packages using uv.""" + print("๐Ÿ Downloading Python packages...") + + # Create both cache and package directories + os.makedirs(UV.cache_dir, exist_ok=True) + os.makedirs(UV.package_dir, exist_ok=True) + + # Download packages specified in pyproject.toml to cache directory first + if os.path.exists(PYPROJECT_PATH): + print(f"๐Ÿ“ฅ Downloading Python packages to cache: {UV.cache_dir}") + # Use uv to download packages to cache directory + run_command(f"uv sync --cache-dir {UV.cache_dir}") + + # Copy only wheel files from cache to package directory + print("๐Ÿ“ฆ Copying wheel files from cache to package directory...") + copied_count = 0 + + # Find all .whl files in cache directory recursively + for root, dirs, files in os.walk(UV.cache_dir): + for file in files: + if file.endswith('.whl'): + src = os.path.join(root, file) + dst = os.path.join(UV.package_dir, file) + # Only copy if not already exists to avoid duplicates + if not os.path.exists(dst): + shutil.copy2(src, dst) + copied_count += 1 + + print(f"๐Ÿ“Š Copied {copied_count} wheel files to package directory") + print(f"โœ… Python packages cached to {UV.cache_dir}") + print(f"๐Ÿ“ฆ Python packages ready in {UV.package_dir}") + else: + print(f"โš ๏ธ pyproject.toml not found at {PYPROJECT_PATH}") + +# LLVM downloading removed as per requirements + +# ======================================================================== +# ๐Ÿš€ Main Execution +# ======================================================================== + +def main(): + """Main execution function with parallel task scheduling.""" + print("๐Ÿš€ Starting Clice Dependencies Download Process...") + + # Create main cache directory + os.makedirs(RELEASE_PACKAGE_DIR, exist_ok=True) + + # Define download jobs with proper dependency management + jobs = { + "install_download_prerequisites": Job("install_download_prerequisites", install_download_prerequisites, ()), + "download_apt_packages": Job("download_apt_packages", download_apt_packages, ()), + "download_cmake": Job("download_cmake", download_cmake, ()), + "download_xmake": Job("download_xmake", download_xmake, ()), + "download_python_packages": Job("download_python_packages", download_python_packages, ()), + } + + # Define dependencies - all downloads depend on prerequisites installation + dependencies = { + "install_download_prerequisites": set(), + "download_apt_packages": {"install_download_prerequisites"}, + "download_cmake": {"install_download_prerequisites"}, + "download_xmake": {"install_download_prerequisites"}, + "download_python_packages": {"install_download_prerequisites"}, + } + + # Execute downloads in parallel where possible + scheduler = ParallelTaskScheduler(jobs, dependencies) + scheduler.run() + + print("โœ… All dependencies downloaded successfully!") + print(f"๐Ÿ“ Cache directory: {RELEASE_PACKAGE_DIR}") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/docker/linux/utility/local_setup.py b/docker/linux/utility/local_setup.py new file mode 100644 index 00000000..5734c2e3 --- /dev/null +++ b/docker/linux/utility/local_setup.py @@ -0,0 +1,258 @@ +# ======================================================================== +# ๐Ÿš€ Clice Local Setup +# ======================================================================== +# File: docker/linux/utility/local_setup.py +# Purpose: Final setup phase for the Clice development container +# +# This script handles the installation and configuration of all pre-downloaded +# packages and tools to complete the development environment setup. +# ======================================================================== + +""" +๐Ÿš€ Clice Local Setup Script + +Handles the final setup phase of the Clice development container by: +1. Installing all pre-downloaded APT packages +2. Extracting and installing the custom toolchain +3. Setting up CMake and XMake build systems +4. Installing Python packages via uv +5. Configuring environment variables and PATH + +This script maximizes Docker build cache efficiency by separating the +installation phase from the download phase, allowing for independent +caching of each step. +""" + +import os +import sys +import tarfile + +# Ensure utility directory is in Python path for imports +utility_path = os.path.dirname(os.path.abspath(__file__)) +if utility_path not in sys.path: + sys.path.insert(0, utility_path) + +# Import all configuration from build_config using new component structure +from config.build_config import ( + ENVIRONMENT_CONFIG_FILE, + RELEASE_PACKAGE_DIR, + PACKED_RELEASE_PACKAGE_PATH, + DEVELOPMENT_SHELL_VARS, + PROJECT_ROOT, + # Import component instances for structured access + APT, UV, CMAKE, XMAKE, TOOLCHAIN +) + +# Import build_utils for run_command and other utilities +from build_utils import ( + Job, + ParallelTaskScheduler, + run_command +) + +# ======================================================================== +# ๐ŸŒ Environment Setup Functions +# ======================================================================== + +def setup_environment_variables(): + """Write environment variables to /root/.bashrc for persistent shell environment.""" + print("๐ŸŒ Setting up environment variables...") + + bashrc_path = ENVIRONMENT_CONFIG_FILE + + # Ensure .bashrc exists + if not os.path.exists(bashrc_path): + with open(bashrc_path, 'w') as f: + f.write("# Generated by Clice local setup\n") + + # Read existing content to avoid duplicates + existing_content = "" + with open(bashrc_path, 'r') as f: + existing_content = f.read() + + # Add environment variables from build config + for key, value in DEVELOPMENT_SHELL_VARS.items(): + env_line = f"export {key}=\"{value}\"\n" + if f'export {key}=' not in existing_content: + with open(bashrc_path, 'a') as f: + f.write(env_line) + print(f" โœ… Added {key}={value}") + else: + print(f" โœ… {key} already set") + + print("โœ… Environment variables configured") + +# ======================================================================== +# ๐Ÿ“ฆ Package Installation Functions +# ======================================================================== + +def install_apt_packages(apt_component): + """Install APT development packages from downloaded .deb files.""" + print("๐Ÿ“ฆ Installing APT development packages...") + + if not os.path.exists(apt_component.package_dir): + print(f"โš ๏ธ APT package directory not found: {apt_component.package_dir}") + print("Downloading packages first...") + return + + # Install all .deb files found in the package directory + deb_files = [f for f in os.listdir(apt_component.package_dir) if f.endswith('.deb')] + + if deb_files: + # Use dpkg to install all .deb files, with apt-get fallback for dependencies + run_command(f"dpkg -i {apt_component.package_dir}/*.deb || apt-get install -f -y") + print(f"โœ… Installed {len(deb_files)} APT packages") + else: + print("โš ๏ธ No .deb files found in APT package directory") + +def install_toolchain(toolchain_component): + """Install the custom toolchain.""" + print("๐Ÿ”ง Installing custom toolchain...") + + # Toolchain is already installed at toolchain package directory within RELEASE_PACKAGE_DIR + if not os.path.exists(toolchain_component.package_dir): + print("โš ๏ธ Toolchain package directory not found") + return + + print(f"โœ… Toolchain available at: {toolchain_component.package_dir}") + + print(f"โœ… Toolchain setup completed") + +def install_cmake(cmake_component): + """Install CMake from pre-downloaded installer.""" + print("๐Ÿ”ง Installing CMake...") + + cmake_installer_filename = cmake_component.tarball_name + cmake_installer_path = os.path.join(cmake_component.package_dir, cmake_installer_filename) + + if not os.path.exists(cmake_installer_path): + print(f"โš ๏ธ CMake installer not found: {cmake_installer_path}") + return + + # Make installer executable and run it + run_command(f"chmod +x {cmake_installer_path}") + + # Use CMAKE component package_dir as install directory + cmake_install_dir = cmake_component.package_dir + os.makedirs(cmake_install_dir, exist_ok=True) + + # Install CMake to the component package directory + run_command(f"{cmake_installer_path} --prefix={cmake_install_dir} --skip-license") + + # Create symlinks to system PATH + cmake_bin_dir = f"{cmake_install_dir}/bin" + if os.path.exists(cmake_bin_dir): + for binary in os.listdir(cmake_bin_dir): + src = os.path.join(cmake_bin_dir, binary) + dst = f"/usr/local/bin/{binary}" + if os.path.isfile(src) and not os.path.exists(dst): + os.symlink(src, dst) + + print(f"โœ… CMake installed to {cmake_install_dir}") + +def install_xmake(xmake_component): + """Install XMake from pre-downloaded package.""" + print("๐Ÿ”จ Installing XMake...") + + xmake_filename = xmake_component.tarball_name + xmake_path = os.path.join(xmake_component.package_dir, xmake_filename) + + if not os.path.exists(xmake_path): + print(f"โš ๏ธ XMake package not found: {xmake_path}") + return + + # Make XMake bundle executable + run_command(f"chmod +x {xmake_path}") + + # Install XMake using update-alternatives + run_command(f"update-alternatives --install /usr/bin/xmake xmake {xmake_path} 100") + + print("โœ… XMake installed successfully") + +def install_python_packages(uv_component): + """Install Python packages from uv cache.""" + print("๐Ÿ Installing Python packages...") + + if not os.path.exists(uv_component.package_dir): + print(f"โš ๏ธ Python package directory not found: {uv_component.package_dir}") + return + + # Install wheel files found in the UV package directory + wheel_files = [f for f in os.listdir(uv_component.package_dir) if f.endswith('.whl')] + + if wheel_files: + # Use uv to install from the cached packages + wheel_paths = [os.path.join(uv_component.package_dir, f) for f in wheel_files] + run_command(f"uv pip install --find-links {uv_component.package_dir} --no-index --force-reinstall --no-deps {' '.join(wheel_paths)}") + print(f"โœ… Installed {len(wheel_files)} Python packages") + else: + print("โš ๏ธ No wheel files found in UV package directory") + +# ======================================================================== +# ๐Ÿ“‹ Setup Orchestration +# ======================================================================== + +def setup_git_safe_directory(): + """Configure git to treat the workspace as safe.""" + print("๐Ÿ”ง Configuring git safe directory...") + + run_command(f"git config --global --add safe.directory {PROJECT_ROOT}") + print("โœ… Git safe directory configured") + +def extract_release_archive(): + """Extract the release archive to setup the environment.""" + print("๐Ÿ“ฆ Extracting release archive...") + + if not os.path.exists(PACKED_RELEASE_PACKAGE_PATH): + print("โš ๏ธ Packed release archive not found") + return + + # Extract the release archive to the appropriate location + release_parent_dir = os.path.dirname(RELEASE_PACKAGE_DIR) + os.makedirs(release_parent_dir, exist_ok=True) + + # Use Python tarfile module for extraction (reverse of packaging) + print(f" ๐Ÿ“ Source: {PACKED_RELEASE_PACKAGE_PATH}") + print(f" ๐Ÿ“ Target: {release_parent_dir}") + + with tarfile.open(PACKED_RELEASE_PACKAGE_PATH, 'r:xz') as tar: + tar.extractall(path=release_parent_dir, filter='data') + print(f"โœ… Release archive extracted to: {RELEASE_PACKAGE_DIR}") + +def main(): + """Main setup orchestration function with parallel task scheduling.""" + print("๐Ÿš€ Unpacking Clice Dev Container...") + + # Define setup jobs with proper dependency management + jobs = { + "extract_release_archive": Job("extract_release_archive", extract_release_archive, ()), + "setup_environment_variables": Job("setup_environment_variables", setup_environment_variables, ()), + "setup_git_safe_directory": Job("setup_git_safe_directory", setup_git_safe_directory, ()), + "install_apt_packages": Job("install_apt_packages", install_apt_packages, (APT,)), + "install_toolchain": Job("install_toolchain", install_toolchain, (TOOLCHAIN,)), + "install_cmake": Job("install_cmake", install_cmake, (CMAKE,)), + "install_xmake": Job("install_xmake", install_xmake, (XMAKE,)), + "install_python_packages": Job("install_python_packages", install_python_packages, (UV,)), + } + + # Define dependencies - some installations can run in parallel after archive extraction + dependencies = { + "extract_release_archive": set(), + "setup_environment_variables": set(), + "setup_git_safe_directory": set(), + "install_apt_packages": {"extract_release_archive"}, + "install_toolchain": {"extract_release_archive"}, + "install_cmake": {"extract_release_archive"}, + "install_xmake": {"extract_release_archive"}, + "install_python_packages": {"extract_release_archive"}, + } + + # Execute setup tasks in parallel where possible + scheduler = ParallelTaskScheduler(jobs, dependencies) + scheduler.run() + + print("โœ… Clice development environment setup completed successfully!") + print(f"๐Ÿ“ฆ Components installed from: {RELEASE_PACKAGE_DIR}") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/docker/linux/utility/pyproject.toml b/docker/linux/utility/pyproject.toml new file mode 100644 index 00000000..2ba123e8 --- /dev/null +++ b/docker/linux/utility/pyproject.toml @@ -0,0 +1,8 @@ +[project] +name = "clice-dev-utils" +version = "0.1.0" +description = "Python dependencies for clice's development container build scripts." +requires-python = ">=3.13" +dependencies = [ + "python-gnupg" +] diff --git a/docs/en/dev/build.md b/docs/en/dev/build.md index 5853464d..474a4b86 100644 --- a/docs/en/dev/build.md +++ b/docs/en/dev/build.md @@ -79,4 +79,181 @@ cd llvm-project python3 /scripts/build-llvm-libs.py debug ``` -You can also refer to LLVM's official build tutorial: [Building LLVM with CMake](https://llvm.org/docs/CMake.html). +You can also refer to llvm's official build tutorial: [Building LLVM with CMake](https://llvm.org/docs/CMake.html). + +### GCC Toolchain + +clice requires GCC libstdc++ >= 14. You could use a different GCC toolchain and also link statically against its libstdc++: + +```bash +cmake .. -DCMAKE_C_FLAGS="--gcc-toolchain=/usr/local/gcc-14.3.0/" \ + -DCMAKE_CXX_FLAGS="--gcc-toolchain=/usr/local/gcc-14.3.0/" \ + -DCMAKE_EXE_LINKER_FLAGS="-static-libgcc -static-libstdc++" +``` + +## Building + +After handling the prerequisites, you can start building clice. We provide two build methods: cmake/xmake. + +### CMake + +Below are the cmake parameters supported by clice: + +- `LLVM_INSTALL_PATH` specifies the installation path of llvm libs +- `CLICE_ENABLE_TEST` whether to build clice's unit tests + +For example: + +```bash +$ cmake -B build -G Ninja -DCMAKE_BUILD_TYPE=Debug -DLLVM_INSTALL_PATH="./.llvm" -DCLICE_ENABLE_TEST=ON -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ +$ cmake --build build +``` + +### Xmake + +Use the following command to build clice: + +```bash +$ xmake f -c --dev=true --mode=debug --toolchain=clang --llvm="./.llvm" --enable_test=true +$ xmake build --all +``` + +> --llvm is optional. If not specified, xmake will automatically download our precompiled binary + +## Dev Container + +We provide a complete Docker development container solution with pre-configured compilers, build tools, and all necessary dependencies to completely solve environment configuration issues. + +### ๐Ÿš€ Quick Start + +#### Build Development Container +```bash +# Build default container (clang + latest version) +./docker/linux/build.sh + +# Build container with specific compiler and version +./docker/linux/build.sh --compiler gcc --version v1.2.3 +``` + +#### Run Development Container +```bash +# Run default container +./docker/linux/run.sh + +# Run container with specific compiler +./docker/linux/run.sh --compiler clang +./docker/linux/run.sh --compiler gcc + +# Run container with specific version +./docker/linux/run.sh --compiler clang --version v1.2.3 +``` + +#### Container Management +```bash +# Reset container (remove existing container) +./docker/linux/run.sh --reset + +# Update container image (pull latest version) +./docker/linux/run.sh --update + +# Rebuild container image +./docker/linux/run.sh --rebuild +``` + +#### Multi-version Testing +```bash +# Test different compilers +./docker/linux/run.sh --compiler gcc +./docker/linux/run.sh --compiler clang + +# Test specific versions +./docker/linux/run.sh --version v1.0.0 +./docker/linux/run.sh --version latest +``` + +### ๐Ÿ“‹ Container Configuration + +#### Supported Parameters +| Parameter | Description | Default | +|-----------|-------------|---------| +| `--compiler` | Compiler type (gcc/clang) | `clang` | +| `--version` | Version tag | `latest` | +| `--reset` | Reset container | - | +| `--rebuild` | Force rebuild image | - | +| `--update` | Pull latest image | - | + +#### Generated Image Naming +- Format: `clice-io/clice:linux-{compiler}-{version}` +- Examples: + - `clice-io/clice:linux-clang-latest` + - `clice-io/clice:linux-gcc-v1.2.3` + +### ๐Ÿ”ง Advanced Usage + +#### Execute Custom Commands +```bash +# Execute specific command in container +./docker/linux/run.sh "cmake --version && xmake --version" + +# Run tests +./docker/linux/run.sh "cd /clice/build && ctest" + +# Interactive debugging +./docker/linux/run.sh "gdb ./build/clice" +``` + +## Building Docker Image + +Use the following command to build docker image: + +```bash +$ docker build -t clice . +``` + +Run docker image by running the following command: + +```bash +$ docker run --rm -it clice --help +OVERVIEW: clice is a new generation of language server for C/C++ +... +``` + +The directory structure of the docker image is as follows: + +``` +/opt/clice +โ”œโ”€โ”€ bin +โ”‚ โ”œโ”€โ”€ clice -> /usr/local/bin/clice +โ”œโ”€โ”€ include +โ”œโ”€โ”€ lib +โ”œโ”€โ”€ LICENSE +โ”œโ”€โ”€ README.md +``` + +Hint: launch clice in the docker container by running the following command: + +```bash +$ docker run --rm -it --entrypoint bash clice +``` + +## Development Container + +We provide Docker images as a pre-configured environment to streamline the setup process. You can use the following scripts to manage the development container. These scripts can be run from the project root directory. + +```bash +# Build the development image +./docker/linux/build.sh + +# Run the container with the clang toolchain +./docker/linux/run.sh --compiler clang + +# Run the container with the gcc toolchain +./docker/linux/run.sh --compiler gcc + +# Reset the container (stops and removes the existing one) +./docker/linux/run.sh --reset +``` + +> [!NOTE] +> This feature is currently in a preview stage and only supports Linux. Windows support will be provided in the future, and the functionality may be subject to change. +>>>>>>> 58bcd0e (feat: implement advanced multi-stage dev container architecture) diff --git a/docs/en/dev/dev-container-architecture.md b/docs/en/dev/dev-container-architecture.md new file mode 100644 index 00000000..559e7502 --- /dev/null +++ b/docs/en/dev/dev-container-architecture.md @@ -0,0 +1,610 @@ +# ๐Ÿณ Clice Container Architecture + +## Overview + +The Clice container provides a comprehensive, pre-configured environment for C++ development with all necessary toolchains, compilers, and dependencies. This document details the container architecture, build stages, file structure, caching mechanisms, and usage methods. + +## ๐Ÿ—๏ธ Multi-Stage Build Architecture + +The container uses a sophisticated multi-stage Docker build to optimize both build time and image size, adopting a parallel build strategy: + +### Architecture Flow Diagram + +```mermaid +graph TD + A[Base Image ubuntu:24.04] --> B[Python Base Environment base-python-environment] + + B --> C[Stage 1: Toolchain Builder toolchain-builder] + B --> D[Stage 2: Dependencies Downloader dependencies-downloader] + + C -->|Toolchain Build - Internal Parallel| E[Stage 3: Release Package Creator image-packer] + D -->|Dependency Download - Batch Parallel| E + + E -->|Create Compressed Archive| F[Stage 4: Final Package Image packed-image] + F -->|Auto Expand Before Runtime| G[Stage 5: Development Image expanded-image] + G --> H[Dev Container] + + subgraph "โšก Parallel Build" + C + D + end + + subgraph "๐Ÿ“ฆ Package Creation" + E + F + end + + subgraph "๐Ÿท๏ธ Release Distribution" + I[Small Size, Easy Distribution] + F + end + + subgraph "๐Ÿท๏ธ User Environment" + G + J[Full Featured Development] + end +``` + +### Build Stages Detailed + +#### Base Stage: Python Environment Foundation (`base-python-environment`) +**Purpose**: Establish consistent Python and uv environment foundation for all stages +**Base**: `ubuntu:24.04` + +```dockerfile +FROM ubuntu:24.04 AS base-python-environment +ENV PATH="/root/.local/bin:${PATH}" +ENV UV_CACHE_DIR=${UV_CACHE_DIR} + +# Copy project configuration to determine Python version +COPY config /clice/config +COPY docker/linux /clice/docker/linux + +RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked \ + --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked \ + --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked \ + bash -eux - <<'SCRIPT' + apt update + apt install -y --no-install-recommends curl jq ca-certificates + + # Install uv for Python management + curl -LsSf https://astral.sh/uv/install.sh | sh + + # Get Python version from configuration + PYTHON_VERSION=$(jq -r .python /clice/config/default-toolchain-version.json) + uv python install "$PYTHON_VERSION" +SCRIPT +``` + +**Installed Components**: +- `curl`, `jq`, `ca-certificates` - Essential system utilities for downloading and JSON processing +- `uv` - Modern Python package and project manager for consistent environment management +- **Dynamic Python Version** - Automatically installs Python version specified in configuration files + +#### Stage 1: Toolchain Builder (`toolchain-builder`) - Parallel +**Purpose**: Build custom compiler toolchain (currently not implemented) +**Parallel Optimization**: Runs concurrently with dependencies downloader, uses internal parallel building +**Base**: `base-python-environment` + +```dockerfile +FROM base-python-environment AS toolchain-builder +# Independent cache namespaces for parallel execution +RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=toolchain-builder-apt \ + --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=toolchain-builder-apt-state \ + --mount=type=cache,target=${CACHE_DIR_ROOT},sharing=locked,id=toolchain-builder-cache \ + --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=toolchain-builder-uv \ + bash -eux - <<'SCRIPT' + uv sync --project /clice/docker/linux/utility/pyproject.toml + source /clice/docker/linux/utility/.venv/bin/activate + python docker/linux/utility/build_clice_compiler_toolchain.py +SCRIPT +``` + +**Features**: +- **Independent Cache Namespace**: Uses `toolchain-builder-*` cache IDs for true parallel execution +- **Python-based Build System**: Uses uv for dependency management and Python scripts for build logic +- **Component Architecture**: Leverages component-based build system from build_config.py +- **Parallel Internal Processing**: Can build multiple compiler components simultaneously +- **Static Linking Support**: Can build static-linked libstdc++ for lower glibc compatibility + +#### Stage 2: Dependencies Downloader (`dependencies-downloader`) - Parallel +**Purpose**: Download all development dependencies without installing them +**Parallel Optimization**: Runs concurrently with toolchain builder, uses internal batch parallel downloads +**Base**: `base-python-environment` + +```dockerfile +FROM base-python-environment AS dependencies-downloader +# Independent cache namespaces for parallel execution +RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=dependencies-downloader-apt \ + --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=dependencies-downloader-apt-state \ + --mount=type=cache,target=${CACHE_DIR_ROOT},sharing=locked,id=dependencies-downloader-cache \ + --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=dependencies-downloader-uv \ + bash -eux - <<'SCRIPT' + uv sync --project /clice/docker/linux/utility/pyproject.toml + source /clice/docker/linux/utility/.venv/bin/activate + python docker/linux/utility/download_dependencies.py +SCRIPT +``` + +**Downloaded Components**: +- **APT Packages**: Complete dependency tree resolved using component-based architecture +- **CMake**: Binary installer with SHA-256 verification +- **XMake**: Platform-specific installation bundle +- **Python Dependencies**: Development tool packages managed by uv + +**Parallel Optimization Features**: +- **Independent Cache Namespace**: Uses `dependencies-downloader-*` cache IDs +- **aria2c Multi-connection Downloads**: High-speed parallel downloads for individual files +- **Batch Processing**: APT packages downloaded in concurrent batches +- **Component-based Resolution**: Uses ALL_COMPONENTS registry for dynamic dependency management +- **Pre-resolved Dependency Trees**: Reduces download-time dependency lookups + +**Cache Structure**: +``` +${RELEASE_PACKAGE_DIR}/ +โ”œโ”€โ”€ apt-unknown/ # APT component packages and metadata +โ”œโ”€โ”€ uv-unknown/ # UV component packages +โ”œโ”€โ”€ cmake-{version}/ # CMake component with version +โ”œโ”€โ”€ xmake-{version}/ # XMake component with version +โ”œโ”€โ”€ toolchain-unknown/ # Toolchain component container +โ”‚ โ”œโ”€โ”€ glibc-{version}/ # GNU C Library sub-component +โ”‚ โ”œโ”€โ”€ gcc-{version}/ # GNU Compiler Collection sub-component +โ”‚ โ”œโ”€โ”€ llvm-{version}/ # LLVM Project sub-component +โ”‚ โ””โ”€โ”€ linux-{version}/ # Linux Kernel Headers sub-component +โ””โ”€โ”€ manifest.json # Complete dependency manifest with ALL_COMPONENTS data + +${PACKED_RELEASE_PACKAGE_PATH} # Compressed archive (e.g., /release-pkg.tar.xz) +``` + +#### Stage 3: Release Package Creator (`image-packer`) +**Purpose**: Merge toolchain and dependencies into final release package for distribution +**Note**: This stage creates the compressed release package archive +**Base**: `base-python-environment` + +```dockerfile +FROM base-python-environment AS image-packer +# Merge outputs from parallel stages +COPY --from=toolchain-builder ${RELEASE_PACKAGE_DIR} ${RELEASE_PACKAGE_DIR} +COPY --from=dependencies-downloader ${RELEASE_PACKAGE_DIR} ${RELEASE_PACKAGE_DIR} + +# Independent cache namespace for package creation +RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=packed-image-apt \ + --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=packed-image-apt-state \ + --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=packed-image-uv \ + bash -eux - <<'SCRIPT' + uv sync --project /clice/docker/linux/utility/pyproject.toml + source /clice/docker/linux/utility/.venv/bin/activate + python docker/linux/utility/create_release_package.py +SCRIPT +``` + +**Release Package Creation Features**: +- **Independent Cache Namespace**: Uses `packed-image-*` cache IDs for isolation +- **Python-based Merging**: Uses create_release_package.py for intelligent component merging +- **Component Integration**: Merges outputs from parallel stages using component architecture +- **Manifest Generation**: Creates comprehensive manifest.json with ALL_COMPONENTS metadata +- **Parallel Task Execution**: Uses ParallelTaskScheduler for efficient package creation + +#### Stage 4: Final Package Image (`packed-image`) +**Purpose**: Create the final distribution image with compressed release package +**Note**: This stage copies the compressed archive and environment configuration +**Base**: `base-python-environment` + +```dockerfile +FROM base-python-environment AS packed-image +COPY --from=image-packer ${PACKED_RELEASE_PACKAGE_PATH} ${PACKED_RELEASE_PACKAGE_PATH} +COPY --from=image-packer ${ENVIRONMENT_CONFIG_FILE} ${ENVIRONMENT_CONFIG_FILE} +``` + +**Final Package Features**: +- **Compressed Release Archive**: Contains `${PACKED_RELEASE_PACKAGE_PATH}` (e.g., `/release-pkg.tar.xz`) +- **Environment Configuration**: Includes pre-configured shell environment settings +- **Distribution Optimized**: Minimal size for efficient distribution and caching + +#### Stage 5: Development Image (`expanded-image`) - Final Usage +**Purpose**: Fully expanded development environment - the final usable image +**Note**: Auto-expanded from release package using Python-based setup +**Base**: Uses `${PACKED_IMAGE_NAME}` (the release image from previous stage) + +```dockerfile +FROM ${PACKED_IMAGE_NAME} AS expanded-image +RUN bash -eux - <<'SCRIPT' + # Use project-specific Python environment + uv sync --project /clice/pyproject.toml + source /clice/docker/linux/utility/.venv/bin/activate + + # Expand release package into full development environment + python docker/linux/utility/local_setup.py + + # Clean up build artifacts to reduce final image size + rm -rf /clice +SCRIPT +``` + +**Installed Components**: +- **Compilers**: GCC 14, Clang 20 (from official LLVM PPA) +- **Build Systems**: CMake (latest), XMake (latest) +- **Development Tools**: Complete C++ development stack including debuggers and profilers +- **Runtime Libraries**: All necessary runtime dependencies + +**Expansion Features**: +- **Python tarfile-based Extraction**: Consistent archive handling using Python's built-in tarfile module +- **Component-based Installation**: Uses component architecture for systematic tool installation +- **Size Optimization**: Removes build artifacts after expansion to minimize final image size +- **No Cache Dependencies**: Final expansion doesn't require build-time caches, suitable for end-user environments + +**Development Container**: This is the final expanded, production-ready development environment + +## ๐Ÿ“ Container File Structure + +### Runtime Container Structure +``` +/clice/ # Project root directory (user workspace) +โ”œโ”€โ”€ build/ # Build output directory +โ”œโ”€โ”€ cmake/ # CMake configuration files +โ”œโ”€โ”€ config/ # Centralized configuration +โ”‚ โ”œโ”€โ”€ build_config.py # Build configuration constants and component architecture +โ”‚ โ””โ”€โ”€ default-toolchain-version.json # Toolchain version definitions +โ”œโ”€โ”€ docker/linux/utility/ # Container utility scripts +โ”‚ โ”œโ”€โ”€ build_utils.py # Build utilities and parallel scheduler +โ”‚ โ”œโ”€โ”€ download_dependencies.py # Dependency downloader +โ”‚ โ”œโ”€โ”€ create_release_package.py # Release package creator +โ”‚ โ””โ”€โ”€ local_setup.py # Local environment setup +โ”œโ”€โ”€ include/ # C++ header files +โ”œโ”€โ”€ src/ # C++ source files +โ””โ”€โ”€ tests/ # Test files +``` + +### Package Structure +``` +${RELEASE_PACKAGE_DIR}/ # Component package directory (build-time) +โ”œโ”€โ”€ apt-unknown/ # APT component packages and metadata +โ”œโ”€โ”€ uv-unknown/ # UV component packages +โ”œโ”€โ”€ cmake-{version}/ # CMake component (versioned) +โ”œโ”€โ”€ xmake-{version}/ # XMake component (versioned) +โ”œโ”€โ”€ toolchain-unknown/ # Toolchain component container +โ”‚ โ”œโ”€โ”€ glibc-{version}/ # GNU C Library sub-component +โ”‚ โ”œโ”€โ”€ gcc-{version}/ # GNU Compiler Collection sub-component +โ”‚ โ”œโ”€โ”€ llvm-{version}/ # LLVM Project sub-component +โ”‚ โ””โ”€โ”€ linux-{version}/ # Linux Kernel Headers sub-component +โ””โ”€โ”€ manifest.json # Complete component and dependency manifest + +${PACKED_RELEASE_PACKAGE_PATH} # Compressed release package (e.g., /release-pkg.tar.xz) + +${ENVIRONMENT_CONFIG_FILE} # Environment configuration file (e.g., /root/.bashrc) +``` + +### Dependency Manifest Structure +```json +{ + "timestamp": 1696723200, + "components": { + "apt-unknown": { + "name": "apt", + "version": "unknown", + "type": "APTComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/apt-unknown", + "packages": [ + "git", "binutils", "bison", "build-essential", "g++-14", + "gawk", "gcc-14", "gnupg", "libstdc++-14-dev", + "make", "rsync", "software-properties-common", "unzip", "xz-utils", + "aria2", "apt-rdepends", "bzip2", "xz-utils" + ], + "package_count": 125 + }, + "uv-unknown": { + "name": "uv", + "version": "unknown", + "type": "UVComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/uv-unknown" + }, + "cmake-{version}": { + "name": "cmake", + "version": "3.28.3", + "type": "CMakeComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/cmake-3.28.3", + "base_url": "https://github.com/Kitware/CMake/releases/download/v{version}", + "tarball_name": "cmake-3.28.3-linux-x86_64.sh", + "verification_name": "cmake-3.28.3-SHA-256.txt" + }, + "xmake-{version}": { + "name": "xmake", + "version": "2.8.5", + "type": "XMakeComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/xmake-2.8.5", + "base_url": "https://github.com/xmake-io/xmake/releases/download/v{version}", + "tarball_name": "xmake-bundle-v2.8.5.Linux.x86_64" + }, + "toolchain-unknown": { + "name": "toolchain", + "version": "unknown", + "type": "ToolchainComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/toolchain-unknown", + "sub_components": { + "glibc-{version}": { + "name": "glibc", + "version": "2.39", + "type": "GlibcSubComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/toolchain-unknown/glibc-2.39", + "base_url": "https://ftpmirror.gnu.org/gnu/glibc", + "tarball_name": "glibc-2.39.tar.xz" + }, + "gcc-{version}": { + "name": "gcc", + "version": "14", + "type": "GccSubComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/toolchain-unknown/gcc-14", + "base_url": "https://ftpmirror.gnu.org/gnu/gcc/gcc-14", + "tarball_name": "gcc-14.tar.xz" + }, + "llvm-{version}": { + "name": "llvm", + "version": "20.1.5", + "type": "LlvmSubComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/toolchain-unknown/llvm-20.1.5", + "base_url": "https://github.com/llvm/llvm-project/releases/download/llvmorg-20.1.5", + "tarball_name": "llvm-project-20.1.5.src.tar.xz" + }, + "linux-{version}": { + "name": "linux", + "version": "6.6", + "type": "LinuxSubComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/toolchain-unknown/linux-6.6", + "base_url": "https://github.com/torvalds/linux/archive/refs/tags", + "tarball_name": "v6.6.tar.gz" + } + }, + "sysroot_dir": "${RELEASE_PACKAGE_DIR}/toolchain-unknown/sysroot/x86_64-linux-gnu/x86_64-linux-gnu/glibc2.39-libstdc++14-linux6.6" + } + }, + "build_stages": { + "dependencies_downloader": ["apt-unknown", "uv-unknown", "cmake-{version}", "xmake-{version}"], + "toolchain_builder": ["toolchain-unknown"] + }, + "environment_variables": { + "PATH": "/root/.local/bin:${PATH}", + "XMAKE_ROOT": "y" + } +} +``` + +## ๐Ÿš€ Build Process + +### Build Commands +```bash +# Build with default settings (clang + latest) +./docker/linux/build.sh + +# Build with specific compiler and version +./docker/linux/build.sh --compiler gcc --version v1.2.3 +``` + +### Build Process Flow +1. **Stage 1**: Install basic system packages +2. **Stage 2**: Download all dependencies to cache +3. **Stage 3**: Install dependencies from cache to final image +4. **Finalization**: Configure environment and create development-ready container + +### Generated Images +**No distinction between dev and production builds**, unified image architecture: + +- **Image Name**: `clice-io/clice:linux-{compiler}-{version}` +- **Image Types**: + - **Release Image**: Distribution-optimized, contains compressed packages and cache, not directly usable + - **Development Image**: Fully expanded development environment, final usable image +- **Examples**: + - `clice-io/clice:linux-clang-latest` + - `clice-io/clice:linux-gcc-v1.2.3` + +**Important Notes**: +- Release image's main advantage is reducing user download image size +- Development image is the final expanded container, the environment users actually use +- Unified build process, no distinction between development and production environments + +## ๐Ÿƒ Container Usage + +### Running Container +```bash +# Run with default settings +./docker/linux/run.sh + +# Run with specific compiler and version +./docker/linux/run.sh --compiler gcc --version v1.2.3 + +# Reset container (remove and recreate) +./docker/linux/run.sh --reset + +# Update container image +./docker/linux/run.sh --update +``` + +### Container Management +- **Automatic Creation**: If container doesn't exist, it's created automatically +- **Version Checking**: Container image version is validated before use +- **Workspace Mounting**: Project directory is mounted to `/clice` in container +- **Persistent Storage**: Container persists between sessions + +### Development Workflow +```bash +# 1. Build development container +./docker/linux/build.sh --compiler clang + +# 2. Start development session +./docker/linux/run.sh --compiler clang + +# 3. Inside container - build project +cd /clice +mkdir build && cd build +cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Debug +ninja +``` + +## โšก Caching Strategy + +### Independent Cache Namespaces +Each build stage uses separate cache IDs to enable true parallel execution: + +#### Stage-Specific Cache IDs +- **Toolchain Builder**: `toolchain-builder-*` + - `toolchain-builder-apt` - APT package cache + - `toolchain-builder-apt-state` - APT state cache + - `toolchain-builder-cache` - General build cache + - `toolchain-builder-uv` - UV Python package cache + +- **Dependencies Downloader**: `dependencies-downloader-*` + - `dependencies-downloader-apt` - APT package cache + - `dependencies-downloader-apt-state` - APT state cache + - `dependencies-downloader-cache` - Download cache + - `dependencies-downloader-uv` - UV Python package cache + +- **Release Package Creator**: `packed-image-*` + - `packed-image-apt` - APT package cache + - `packed-image-apt-state` - APT state cache + - `packed-image-uv` - UV Python package cache + +### Docker Layer Caching +- **Python Base Environment**: Cached independently, shared across all stages +- **Stage Outputs**: Each stage's output is cached as separate Docker layers +- **Parallel Stage Isolation**: Independent caches prevent conflicts during parallel execution + +### Cache Optimization Benefits +- **True Parallel Execution**: Independent cache namespaces eliminate conflicts +- **Reduced Build Time**: Intelligent layer caching and component-based builds +- **Bandwidth Efficiency**: Downloads happen once per cache namespace +- **Offline Capability**: Complete dependency pre-resolution enables offline rebuilds +- **Selective Invalidation**: Changes to one component don't invalidate others + +## ๐Ÿ›ก๏ธ Security & Verification + +### Package Verification +- **CMake**: SHA-256 checksum verification of installer +- **APT Packages**: Standard APT signature verification +- **Dependency Tree**: Complete dependency resolution with `apt-rdepends` + +### Build Isolation +- **Multi-stage**: Each stage is isolated and cacheable +- **Non-root User**: Development runs as non-root user where possible +- **Clean Environment**: Each build starts from clean base + +## ๐Ÿ”ง Configuration Management + +### Centralized Configuration +All container configuration is managed through `config/build_config.py`: + +```python +# Version management +TOOLCHAIN_VERSIONS = { + "cmake": "3.28.3", + "xmake": "2.8.5", + "gcc": "14", + "llvm": "20" +} + +# Package lists +DEV_CONTAINER_BASIC_TOOLS = [ + "software-properties-common", + "gnupg", "git", "xz-utils", "unzip", "make" +] +``` + +### Environment Variables +- `PKG_CACHE_DIR=/pkg-cache` - Package cache directory +- `DEBIAN_FRONTEND=noninteractive` - Non-interactive package installation +- `XMAKE_ROOT=y` - XMake root privileges + +## ๐Ÿš€ Performance Optimizations + +### Parallel Processing Architecture +**Parallel optimization is implemented at three levels**: + +#### Inter-Stage Parallelism (Docker Build Level) +- **Toolchain Builder** and **Dependencies Downloader** stages execute concurrently +- **Release Package Creator** waits for both parallel stages to complete +- Docker BuildKit automatically schedules parallel stage execution +- **Independent Cache Namespaces** prevent cache conflicts during parallel execution + +#### Intra-Stage Parallelism (Component Level) +**Toolchain Builder Internal Parallelism**: +- Uses `ParallelTaskScheduler` for optimal job scheduling +- Compiler components built concurrently using `ProcessPoolExecutor` +- Multi-core CPU utilization for parallel compilation +- Component dependencies resolved using topological sorting + +**Dependencies Downloader Internal Parallelism**: +- `aria2c` multi-connection downloads for individual files +- Batch processing of APT packages using parallel job execution +- Component-based parallel downloads (APT, tools, Python packages simultaneously) +- Pre-resolved dependency trees reduce download-time lookups + +**Release Package Creator Parallelism**: +- Parallel component merging using job-based task scheduler +- Concurrent manifest generation and package compression +- Optimal resource utilization during final packaging stage + +#### Cache Independence Architecture +Each stage operates with completely independent cache namespaces: +```dockerfile +# Toolchain Builder - Independent cache namespace +--mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=toolchain-builder-apt +--mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=toolchain-builder-uv + +# Dependencies Downloader - Independent cache namespace +--mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=dependencies-downloader-apt +--mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=dependencies-downloader-uv + +# Release Package Creator - Independent cache namespace +--mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=packed-image-apt +--mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=packed-image-uv +``` + +### Build Optimization +- **Layer Caching**: Aggressive Docker layer caching strategy +- **Minimal Rebuilds**: Only changed components are rebuilt +- **Size Optimization**: Multi-stage builds minimize final image size +- **Cache Separation**: Release image serves as cache layer, Development image expands quickly + +## ๐Ÿ”„ Maintenance & Updates + +### Version Updates +Update versions in `config/default-toolchain-version.json`: +```json +{ + "cmake": "3.28.3", + "xmake": "2.8.5", + "gcc": "14", + "llvm": "20" +} +``` + +### Adding New Dependencies +1. Update package lists in `config/build_config.py` +2. Rebuild container with `./docker/linux/build.sh --rebuild` +3. Verify with `./docker/linux/run.sh --reset` + +### Container Health Checks +```bash +# Check container status +docker ps -f name=clice-linux-clang + +# Verify development environment +./docker/linux/run.sh bash -c "cmake --version && xmake --version" + +# Check package manifest +docker exec clice-linux-clang cat /pkg-cache/manifest.json +``` + +## ๐ŸŽฏ Best Practices + +### Development Workflow +1. Use version-specific containers for reproducible builds +2. Reset containers when switching between major versions +3. Use `--update` to pull latest images regularly +4. Mount only necessary directories to avoid performance issues + +### Container Management +1. Use descriptive version tags for release builds +2. Clean up unused containers and images periodically +3. Monitor container resource usage +4. Keep container configuration under version control + +This architecture provides a robust, efficient, and maintainable development environment for the Clice project, with optimized build times, comprehensive toolchain support, and excellent developer experience. \ No newline at end of file diff --git a/docs/zh/dev/dev-container-architecture.md b/docs/zh/dev/dev-container-architecture.md new file mode 100644 index 00000000..8d764602 --- /dev/null +++ b/docs/zh/dev/dev-container-architecture.md @@ -0,0 +1,610 @@ +# ๐Ÿณ Clice ๅฎนๅ™จๆžถๆž„ + +## ๆฆ‚่ฟฐ + +Clice ๅฎนๅ™จๆไพ›ไบ†ไธ€ไธชๅ…จ้ขใ€้ข„้…็ฝฎ็š„ C++ ๅผ€ๅ‘็Žฏๅขƒ๏ผŒๅŒ…ๅซๆ‰€ๆœ‰ๅฟ…่ฆ็š„ๅทฅๅ…ท้“พใ€็ผ–่ฏ‘ๅ™จๅ’Œไพ่ต–้กนใ€‚ๆœฌๆ–‡ๆกฃ่ฏฆ็ป†่ฏดๆ˜Žไบ†ๅฎนๅ™จๆžถๆž„ใ€ๆž„ๅปบ้˜ถๆฎตใ€ๆ–‡ไปถ็ป“ๆž„ใ€็ผ“ๅญ˜ๆœบๅˆถๅ’Œไฝฟ็”จๆ–นๆณ•ใ€‚ + +## ๐Ÿ—๏ธ ๅคš้˜ถๆฎตๆž„ๅปบๆžถๆž„ + +ๅฎนๅ™จไฝฟ็”จๅคๆ‚็š„ๅคš้˜ถๆฎต Docker ๆž„ๅปบๆฅไผ˜ๅŒ–ๆž„ๅปบๆ—ถ้—ดๅ’Œ้•œๅƒๅคงๅฐ๏ผŒ้‡‡็”จๅนถ่กŒๆž„ๅปบ็ญ–็•ฅ๏ผš + +### ๆžถๆž„ๆต็จ‹ๅ›พ + +```mermaid +graph TD + A[ๅŸบ็ก€้•œๅƒ ubuntu:24.04] --> B[Python ๅŸบ็ก€็Žฏๅขƒ base-python-environment] + + B --> C[้˜ถๆฎต 1: ๅทฅๅ…ท้“พๆž„ๅปบๅ™จ toolchain-builder] + B --> D[้˜ถๆฎต 2: ไพ่ต–ไธ‹่ฝฝๅ™จ dependencies-downloader] + + C -->|ๅทฅๅ…ท้“พๆž„ๅปบ - ๅ†…้ƒจๅนถ่กŒ| E[้˜ถๆฎต 3: Release ๅŒ…ๅˆ›ๅปบๅ™จ image-packer] + D -->|ไพ่ต–ไธ‹่ฝฝ - ๆ‰น้‡ๅนถ่กŒ| E + + E -->|ๅˆ›ๅปบๅŽ‹็ผฉๅฝ’ๆกฃ| F[้˜ถๆฎต 4: ๆœ€็ปˆๅŒ…้•œๅƒ packed-image] + F -->|่ฟ่กŒๅ‰่‡ชๅŠจๅฑ•ๅผ€| G[้˜ถๆฎต 5: Development ้•œๅƒ expanded-image] + G --> H[ๅผ€ๅ‘ๅฎนๅ™จ] + + subgraph "โšก ๅนถ่กŒๆž„ๅปบ" + C + D + end + + subgraph "๐Ÿ“ฆ ๅŒ…ๅˆ›ๅปบ" + E + F + end + + subgraph "๐Ÿท๏ธ ๅ‘ๅธƒๅˆ†ๅ‘" + I[ไฝ“็งฏๅฐ๏ผŒไพฟไบŽๅˆ†ๅ‘] + F + end + + subgraph "๐Ÿท๏ธ ็”จๆˆท็Žฏๅขƒ" + G + J[ๅŠŸ่ƒฝๅฎŒๆ•ด็š„ๅผ€ๅ‘็Žฏๅขƒ] + end +``` + +### ๆž„ๅปบ้˜ถๆฎต่ฏฆ่งฃ + +#### ๅŸบ็ก€้˜ถๆฎต๏ผšPython ็ŽฏๅขƒๅŸบ็ก€ (`base-python-environment`) +**็›ฎ็š„**๏ผšไธบๆ‰€ๆœ‰้˜ถๆฎตๅปบ็ซ‹ไธ€่‡ด็š„ Python ๅ’Œ uv ็ŽฏๅขƒๅŸบ็ก€ +**ๅŸบ็ก€้•œๅƒ**๏ผš`ubuntu:24.04` + +```dockerfile +FROM ubuntu:24.04 AS base-python-environment +ENV PATH="/root/.local/bin:${PATH}" +ENV UV_CACHE_DIR=${UV_CACHE_DIR} + +# ๅคๅˆถ้กน็›ฎ้…็ฝฎไปฅ็กฎๅฎš Python ็‰ˆๆœฌ +COPY config /clice/config +COPY docker/linux /clice/docker/linux + +RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked \ + --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked \ + --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked \ + bash -eux - <<'SCRIPT' + apt update + apt install -y --no-install-recommends curl jq ca-certificates + + # ๅฎ‰่ฃ… uv ็”จไบŽ Python ็ฎก็† + curl -LsSf https://astral.sh/uv/install.sh | sh + + # ไปŽ้…็ฝฎ่Žทๅ– Python ็‰ˆๆœฌ + PYTHON_VERSION=$(jq -r .python /clice/config/default-toolchain-version.json) + uv python install "$PYTHON_VERSION" +SCRIPT +``` + +**ๅฎ‰่ฃ…็š„็ป„ไปถ**๏ผš +- `curl`, `jq`, `ca-certificates` - ไธ‹่ฝฝๅ’Œ JSON ๅค„็†ๆ‰€้œ€็š„ๅŸบๆœฌ็ณป็ปŸๅทฅๅ…ท +- `uv` - ็Žฐไปฃ Python ๅŒ…ๅ’Œ้กน็›ฎ็ฎก็†ๅ™จ๏ผŒ็”จไบŽไธ€่‡ด็š„็Žฏๅขƒ็ฎก็† +- **ๅŠจๆ€ Python ็‰ˆๆœฌ** - ่‡ชๅŠจๅฎ‰่ฃ…้…็ฝฎๆ–‡ไปถไธญๆŒ‡ๅฎš็š„ Python ็‰ˆๆœฌ + +#### ้˜ถๆฎต 1๏ผšๅทฅๅ…ท้“พๆž„ๅปบๅ™จ (`toolchain-builder`) - ๅนถ่กŒ +**็›ฎ็š„**๏ผšๆž„ๅปบ่‡ชๅฎšไน‰็ผ–่ฏ‘ๅ™จๅทฅๅ…ท้“พ๏ผˆ็›ฎๅ‰ๆš‚ๆœชๅฎž็Žฐ๏ผ‰ +**ๅนถ่กŒไผ˜ๅŒ–**๏ผšไธŽไพ่ต–ไธ‹่ฝฝๅ™จๅŒๆ—ถ่ฟ่กŒ๏ผŒๅ†…้ƒจไฝฟ็”จๅนถ่กŒๆž„ๅปบ +**ๅŸบ็ก€้•œๅƒ**๏ผš`base-python-environment` + +```dockerfile +FROM base-python-environment AS toolchain-builder +# ็”จไบŽๅนถ่กŒๆ‰ง่กŒ็š„็‹ฌ็ซ‹็ผ“ๅญ˜ๅ‘ฝๅ็ฉบ้—ด +RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=toolchain-builder-apt \ + --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=toolchain-builder-apt-state \ + --mount=type=cache,target=${CACHE_DIR_ROOT},sharing=locked,id=toolchain-builder-cache \ + --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=toolchain-builder-uv \ + bash -eux - <<'SCRIPT' + uv sync --project /clice/docker/linux/utility/pyproject.toml + source /clice/docker/linux/utility/.venv/bin/activate + python docker/linux/utility/build_clice_compiler_toolchain.py +SCRIPT +``` + +**็‰น็‚น**๏ผš +- **็‹ฌ็ซ‹็ผ“ๅญ˜ๅ‘ฝๅ็ฉบ้—ด**๏ผšไฝฟ็”จ `toolchain-builder-*` ็ผ“ๅญ˜ ID ๅฎž็Žฐ็œŸๆญฃ็š„ๅนถ่กŒๆ‰ง่กŒ +- **ๅŸบไบŽ Python ็š„ๆž„ๅปบ็ณป็ปŸ**๏ผšไฝฟ็”จ uv ่ฟ›่กŒไพ่ต–็ฎก็†๏ผŒPython ่„šๆœฌๅค„็†ๆž„ๅปบ้€ป่พ‘ +- **็ป„ไปถๆžถๆž„**๏ผšๅˆฉ็”จ build_config.py ไธญๅŸบไบŽ็ป„ไปถ็š„ๆž„ๅปบ็ณป็ปŸ +- **ๅนถ่กŒๅ†…้ƒจๅค„็†**๏ผšๅฏไปฅๅŒๆ—ถๆž„ๅปบๅคšไธช็ผ–่ฏ‘ๅ™จ็ป„ไปถ +- **้™ๆ€้“พๆŽฅๆ”ฏๆŒ**๏ผšๅฏๆž„ๅปบ้™ๆ€้“พๆŽฅ็š„ libstdc++ ไปฅๅ…ผๅฎนๆ›ดไฝŽ็‰ˆๆœฌ็š„ glibc + +#### ้˜ถๆฎต 2๏ผšไพ่ต–ไธ‹่ฝฝๅ™จ (`dependencies-downloader`) - ๅนถ่กŒ +**็›ฎ็š„**๏ผšไธ‹่ฝฝๆ‰€ๆœ‰ๅผ€ๅ‘ไพ่ต–้กน่€Œไธๅฎ‰่ฃ…ๅฎƒไปฌ +**ๅนถ่กŒไผ˜ๅŒ–**๏ผšไธŽๅทฅๅ…ท้“พๆž„ๅปบๅ™จๅŒๆ—ถ่ฟ่กŒ๏ผŒๅ†…้ƒจๆ‰น้‡ๅนถ่กŒไธ‹่ฝฝ +**ๅŸบ็ก€้•œๅƒ**๏ผš`base-python-environment` + +```dockerfile +FROM base-python-environment AS dependencies-downloader +# ็”จไบŽๅนถ่กŒๆ‰ง่กŒ็š„็‹ฌ็ซ‹็ผ“ๅญ˜ๅ‘ฝๅ็ฉบ้—ด +RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=dependencies-downloader-apt \ + --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=dependencies-downloader-apt-state \ + --mount=type=cache,target=${CACHE_DIR_ROOT},sharing=locked,id=dependencies-downloader-cache \ + --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=dependencies-downloader-uv \ + bash -eux - <<'SCRIPT' + uv sync --project /clice/docker/linux/utility/pyproject.toml + source /clice/docker/linux/utility/.venv/bin/activate + python docker/linux/utility/download_dependencies.py +SCRIPT +``` + +**ไธ‹่ฝฝ็š„็ป„ไปถ**๏ผš +- **APT ๅŒ…**๏ผšไฝฟ็”จๅŸบไบŽ็ป„ไปถๆžถๆž„่งฃๆž็š„ๅฎŒๆ•ดไพ่ต–ๆ ‘ +- **CMake**๏ผšๅธฆ SHA-256 ้ชŒ่ฏ็š„ไบŒ่ฟ›ๅˆถๅฎ‰่ฃ…็จ‹ๅบ +- **XMake**๏ผšๅนณๅฐ็‰นๅฎš็š„ๅฎ‰่ฃ…ๅŒ… +- **Python ไพ่ต–**๏ผš็”ฑ uv ็ฎก็†็š„ๅผ€ๅ‘ๅทฅๅ…ทๅŒ… + +**ๅนถ่กŒไผ˜ๅŒ–็‰นๆ€ง**๏ผš +- **็‹ฌ็ซ‹็ผ“ๅญ˜ๅ‘ฝๅ็ฉบ้—ด**๏ผšไฝฟ็”จ `dependencies-downloader-*` ็ผ“ๅญ˜ ID +- **aria2c ๅคš่ฟžๆŽฅไธ‹่ฝฝ**๏ผšๅ•ไธชๆ–‡ไปถ็š„้ซ˜้€Ÿๅนถ่กŒไธ‹่ฝฝ +- **ๆ‰นๅค„็†**๏ผšAPT ๅŒ…ๅนถๅ‘ๆ‰น้‡ไธ‹่ฝฝ +- **ๅŸบไบŽ็ป„ไปถ็š„่งฃๆž**๏ผšไฝฟ็”จ ALL_COMPONENTS ๆณจๅ†Œ่กจ่ฟ›่กŒๅŠจๆ€ไพ่ต–็ฎก็† +- **้ข„่งฃๆžไพ่ต–ๆ ‘**๏ผšๅ‡ๅฐ‘ไธ‹่ฝฝๆ—ถ็š„ไพ่ต–ๆŸฅๆ‰พๅผ€้”€ + +**็ผ“ๅญ˜็ป“ๆž„**๏ผš +``` +${RELEASE_PACKAGE_DIR}/ +โ”œโ”€โ”€ apt-unknown/ # APT ็ป„ไปถๅŒ…ๅ’Œๅ…ƒๆ•ฐๆฎ +โ”œโ”€โ”€ uv-unknown/ # UV ็ป„ไปถๅŒ… +โ”œโ”€โ”€ cmake-{version}/ # ๅธฆ็‰ˆๆœฌ็š„ CMake ็ป„ไปถ +โ”œโ”€โ”€ xmake-{version}/ # ๅธฆ็‰ˆๆœฌ็š„ XMake ็ป„ไปถ +โ”œโ”€โ”€ toolchain-unknown/ # ๅทฅๅ…ท้“พ็ป„ไปถๅฎนๅ™จ +โ”‚ โ”œโ”€โ”€ glibc-{version}/ # GNU C ๅบ“ๅญ็ป„ไปถ +โ”‚ โ”œโ”€โ”€ gcc-{version}/ # GNU ็ผ–่ฏ‘ๅ™จ้›†ๅˆๅญ็ป„ไปถ +โ”‚ โ”œโ”€โ”€ llvm-{version}/ # LLVM ้กน็›ฎๅญ็ป„ไปถ +โ”‚ โ””โ”€โ”€ linux-{version}/ # Linux ๅ†…ๆ ธๅคดๆ–‡ไปถๅญ็ป„ไปถ +โ””โ”€โ”€ manifest.json # ๅŒ…ๅซ ALL_COMPONENTS ๆ•ฐๆฎ็š„ๅฎŒๆ•ดไพ่ต–ๆธ…ๅ• + +${PACKED_RELEASE_PACKAGE_PATH} # ๅŽ‹็ผฉๅฝ’ๆกฃ๏ผˆๅฆ‚ /release-pkg.tar.xz๏ผ‰ +``` + +#### ้˜ถๆฎต 3๏ผšRelease ๅŒ…ๅˆ›ๅปบๅ™จ (`image-packer`) +**็›ฎ็š„**๏ผšๅฐ†ๅทฅๅ…ท้“พๅ’Œไพ่ต–ๅˆๅนถไธบ็”จไบŽๅˆ†ๅ‘็š„ๆœ€็ปˆ release ๅŒ… +**็‰น็‚น**๏ผšๆญค้˜ถๆฎตๅˆ›ๅปบๅŽ‹็ผฉ็š„ release ๅŒ…ๅฝ’ๆกฃ +**ๅŸบ็ก€้•œๅƒ**๏ผš`base-python-environment` + +```dockerfile +FROM base-python-environment AS image-packer +# ๅˆๅนถๅนถ่กŒ้˜ถๆฎต็š„่พ“ๅ‡บ +COPY --from=toolchain-builder ${RELEASE_PACKAGE_DIR} ${RELEASE_PACKAGE_DIR} +COPY --from=dependencies-downloader ${RELEASE_PACKAGE_DIR} ${RELEASE_PACKAGE_DIR} + +# ็”จไบŽๅŒ…ๅˆ›ๅปบ็š„็‹ฌ็ซ‹็ผ“ๅญ˜ๅ‘ฝๅ็ฉบ้—ด +RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=packed-image-apt \ + --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=packed-image-apt-state \ + --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=packed-image-uv \ + bash -eux - <<'SCRIPT' + uv sync --project /clice/docker/linux/utility/pyproject.toml + source /clice/docker/linux/utility/.venv/bin/activate + python docker/linux/utility/create_release_package.py +SCRIPT +``` + +**Release ๅŒ…ๅˆ›ๅปบ็‰นๆ€ง**๏ผš +- **็‹ฌ็ซ‹็ผ“ๅญ˜ๅ‘ฝๅ็ฉบ้—ด**๏ผšไฝฟ็”จ `packed-image-*` ็ผ“ๅญ˜ ID ่ฟ›่กŒ้š”็ฆป +- **ๅŸบไบŽ Python ็š„ๅˆๅนถ**๏ผšไฝฟ็”จ create_release_package.py ่ฟ›่กŒๆ™บ่ƒฝ็ป„ไปถๅˆๅนถ +- **็ป„ไปถ้›†ๆˆ**๏ผšไฝฟ็”จ็ป„ไปถๆžถๆž„ๅˆๅนถๅนถ่กŒ้˜ถๆฎต็š„่พ“ๅ‡บ +- **ๆธ…ๅ•็”Ÿๆˆ**๏ผšๅˆ›ๅปบๅŒ…ๅซ ALL_COMPONENTS ๅ…ƒๆ•ฐๆฎ็š„็ปผๅˆ manifest.json +- **ๅนถ่กŒไปปๅŠกๆ‰ง่กŒ**๏ผšไฝฟ็”จ ParallelTaskScheduler ่ฟ›่กŒ้ซ˜ๆ•ˆ็š„ๅŒ…ๅˆ›ๅปบ + +#### ้˜ถๆฎต 4๏ผšๆœ€็ปˆๅŒ…้•œๅƒ (`packed-image`) +**็›ฎ็š„**๏ผšๅˆ›ๅปบๅŒ…ๅซๅŽ‹็ผฉ release ๅŒ…็š„ๆœ€็ปˆๅˆ†ๅ‘้•œๅƒ +**็‰น็‚น**๏ผšๆญค้˜ถๆฎตๅคๅˆถๅŽ‹็ผฉๅฝ’ๆกฃๅ’Œ็Žฏๅขƒ้…็ฝฎ +**ๅŸบ็ก€้•œๅƒ**๏ผš`base-python-environment` + +```dockerfile +FROM base-python-environment AS packed-image +COPY --from=image-packer ${PACKED_RELEASE_PACKAGE_PATH} ${PACKED_RELEASE_PACKAGE_PATH} +COPY --from=image-packer ${ENVIRONMENT_CONFIG_FILE} ${ENVIRONMENT_CONFIG_FILE} +``` + +**ๆœ€็ปˆๅŒ…็‰นๆ€ง**๏ผš +- **ๅŽ‹็ผฉ Release ๅฝ’ๆกฃ**๏ผšๅŒ…ๅซ `${PACKED_RELEASE_PACKAGE_PATH}`๏ผˆๅฆ‚ `/release-pkg.tar.xz`๏ผ‰ +- **็Žฏๅขƒ้…็ฝฎ**๏ผšๅŒ…ๅซ้ข„้…็ฝฎ็š„ shell ็Žฏๅขƒ่ฎพ็ฝฎ +- **ๅˆ†ๅ‘ไผ˜ๅŒ–**๏ผšๆœ€ๅฐๅฐบๅฏธไปฅๅฎž็Žฐ้ซ˜ๆ•ˆๅˆ†ๅ‘ๅ’Œ็ผ“ๅญ˜ + +#### ้˜ถๆฎต 5๏ผšDevelopment ้•œๅƒ (`expanded-image`) - ๆœ€็ปˆไฝฟ็”จ +**็›ฎ็š„**๏ผšๅฎŒๅ…จๅฑ•ๅผ€็š„ๅผ€ๅ‘็Žฏๅขƒ - ๆœ€็ปˆๅฏ็”จ็š„้•œๅƒ +**็‰น็‚น**๏ผšไฝฟ็”จๅŸบไบŽ Python ็š„่ฎพ็ฝฎไปŽ release ๅŒ…่‡ชๅŠจๅฑ•ๅผ€ +**ๅŸบ็ก€้•œๅƒ**๏ผšไฝฟ็”จ `${PACKED_IMAGE_NAME}`๏ผˆๆฅ่‡ชๅ‰ไธ€้˜ถๆฎต็š„ release ้•œๅƒ๏ผ‰ + +```dockerfile +FROM ${PACKED_IMAGE_NAME} AS expanded-image +RUN bash -eux - <<'SCRIPT' + # ไฝฟ็”จ้กน็›ฎ็‰นๅฎš็š„ Python ็Žฏๅขƒ + uv sync --project /clice/pyproject.toml + source /clice/docker/linux/utility/.venv/bin/activate + + # ๅฐ† release ๅŒ…ๅฑ•ๅผ€ไธบๅฎŒๆ•ด็š„ๅผ€ๅ‘็Žฏๅขƒ + python docker/linux/utility/local_setup.py + + # ๆธ…็†ๆž„ๅปบๅทฅไปถไปฅๅ‡ๅฐ‘ๆœ€็ปˆ้•œๅƒๅคงๅฐ + rm -rf /clice +SCRIPT +``` + +**ๅฎ‰่ฃ…็š„็ป„ไปถ**๏ผš +- **็ผ–่ฏ‘ๅ™จ**๏ผšGCC 14ใ€Clang 20๏ผˆๆฅ่‡ชๅฎ˜ๆ–น LLVM PPA๏ผ‰ +- **ๆž„ๅปบ็ณป็ปŸ**๏ผšCMake๏ผˆๆœ€ๆ–ฐ็‰ˆ๏ผ‰ใ€XMake๏ผˆๆœ€ๆ–ฐ็‰ˆ๏ผ‰ +- **ๅผ€ๅ‘ๅทฅๅ…ท**๏ผšๅฎŒๆ•ด็š„ C++ ๅผ€ๅ‘ๆ ˆ๏ผŒๅŒ…ๆ‹ฌ่ฐƒ่ฏ•ๅ™จๅ’Œๅˆ†ๆžๅ™จ +- **่ฟ่กŒๆ—ถๅบ“**๏ผšๆ‰€ๆœ‰ๅฟ…่ฆ็š„่ฟ่กŒๆ—ถไพ่ต– + +**ๅฑ•ๅผ€็‰นๆ€ง**๏ผš +- **ๅŸบไบŽ Python tarfile ็š„ๆๅ–**๏ผšไฝฟ็”จ Python ๅ†…็ฝฎ tarfile ๆจกๅ—่ฟ›่กŒไธ€่‡ด็š„ๅฝ’ๆกฃๅค„็† +- **ๅŸบไบŽ็ป„ไปถ็š„ๅฎ‰่ฃ…**๏ผšไฝฟ็”จ็ป„ไปถๆžถๆž„่ฟ›่กŒ็ณป็ปŸ็š„ๅทฅๅ…ทๅฎ‰่ฃ… +- **ๅคงๅฐไผ˜ๅŒ–**๏ผšๅฑ•ๅผ€ๅŽๅˆ ้™คๆž„ๅปบๅทฅไปถไปฅๆœ€ๅฐๅŒ–ๆœ€็ปˆ้•œๅƒๅคงๅฐ +- **ๆ— ็ผ“ๅญ˜ไพ่ต–**๏ผšๆœ€็ปˆๅฑ•ๅผ€ไธ้œ€่ฆๆž„ๅปบๆ—ถ็ผ“ๅญ˜๏ผŒ้€‚ๅˆๆœ€็ปˆ็”จๆˆท็Žฏๅขƒ + +**Development ๅฎนๅ™จ**๏ผš่ฟ™ๆ˜ฏๆœ€็ปˆๅฑ•ๅผ€็š„ใ€ๅฏ็”จไบŽ็”Ÿไบง็š„ๅผ€ๅ‘็Žฏๅขƒ + +## ๐Ÿ“ ๅฎนๅ™จๆ–‡ไปถ็ป“ๆž„ + +### ่ฟ่กŒๆ—ถๅฎนๅ™จ็ป“ๆž„ +``` +/clice/ # ้กน็›ฎๆ น็›ฎๅฝ•๏ผˆ็”จๆˆทๅทฅไฝœ็ฉบ้—ด๏ผ‰ +โ”œโ”€โ”€ build/ # ๆž„ๅปบ่พ“ๅ‡บ็›ฎๅฝ• +โ”œโ”€โ”€ cmake/ # CMake ้…็ฝฎๆ–‡ไปถ +โ”œโ”€โ”€ config/ # ้›†ไธญ้…็ฝฎ +โ”‚ โ”œโ”€โ”€ build_config.py # ๆž„ๅปบ้…็ฝฎๅธธ้‡ๅ’Œ็ป„ไปถๆžถๆž„ +โ”‚ โ””โ”€โ”€ default-toolchain-version.json # ๅทฅๅ…ท้“พ็‰ˆๆœฌๅฎšไน‰ +โ”œโ”€โ”€ docker/linux/utility/ # ๅฎนๅ™จๅฎž็”จ็จ‹ๅบ่„šๆœฌ +โ”‚ โ”œโ”€โ”€ build_utils.py # ๆž„ๅปบๅฎž็”จ็จ‹ๅบๅ’Œๅนถ่กŒ่ฐƒๅบฆๅ™จ +โ”‚ โ”œโ”€โ”€ download_dependencies.py # ไพ่ต–ไธ‹่ฝฝๅ™จ +โ”‚ โ”œโ”€โ”€ create_release_package.py # ReleaseๅŒ…ๅˆ›ๅปบๅ™จ +โ”‚ โ””โ”€โ”€ local_setup.py # ๆœฌๅœฐ็Žฏๅขƒ่ฎพ็ฝฎ +โ”œโ”€โ”€ include/ # C++ ๅคดๆ–‡ไปถ +โ”œโ”€โ”€ src/ # C++ ๆบๆ–‡ไปถ +โ””โ”€โ”€ tests/ # ๆต‹่ฏ•ๆ–‡ไปถ +``` + +### ๆ‰“ๅŒ…็ป“ๆž„ +``` +${RELEASE_PACKAGE_DIR}/ # ็ป„ไปถๅŒ…็›ฎๅฝ•๏ผˆๆž„ๅปบๆ—ถ๏ผ‰ +โ”œโ”€โ”€ apt-unknown/ # APT ็ป„ไปถๅŒ…ๅ’Œๅ…ƒๆ•ฐๆฎ +โ”œโ”€โ”€ uv-unknown/ # UV ็ป„ไปถๅŒ… +โ”œโ”€โ”€ cmake-{version}/ # CMake ็ป„ไปถ๏ผˆๅธฆ็‰ˆๆœฌ๏ผ‰ +โ”œโ”€โ”€ xmake-{version}/ # XMake ็ป„ไปถ๏ผˆๅธฆ็‰ˆๆœฌ๏ผ‰ +โ”œโ”€โ”€ toolchain-unknown/ # ๅทฅๅ…ท้“พ็ป„ไปถๅฎนๅ™จ +โ”‚ โ”œโ”€โ”€ glibc-{version}/ # GNU C ๅบ“ๅญ็ป„ไปถ +โ”‚ โ”œโ”€โ”€ gcc-{version}/ # GNU ็ผ–่ฏ‘ๅ™จ้›†ๅˆๅญ็ป„ไปถ +โ”‚ โ”œโ”€โ”€ llvm-{version}/ # LLVM ้กน็›ฎๅญ็ป„ไปถ +โ”‚ โ””โ”€โ”€ linux-{version}/ # Linux ๅ†…ๆ ธๅคดๆ–‡ไปถๅญ็ป„ไปถ +โ””โ”€โ”€ manifest.json # ๅฎŒๆ•ด็ป„ไปถๅ’Œไพ่ต–ๆธ…ๅ• + +${PACKED_RELEASE_PACKAGE_PATH} # ๅŽ‹็ผฉๅ‘ๅธƒๅŒ…๏ผˆๅฆ‚ /release-pkg.tar.xz๏ผ‰ + +${ENVIRONMENT_CONFIG_FILE} # ็Žฏๅขƒ้…็ฝฎๆ–‡ไปถ๏ผˆๅฆ‚ /root/.bashrc๏ผ‰ +``` + +### ไพ่ต–ๆธ…ๅ•็ป“ๆž„ +```json +{ + "timestamp": 1696723200, + "components": { + "apt-unknown": { + "name": "apt", + "version": "unknown", + "type": "APTComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/apt-unknown", + "packages": [ + "git", "binutils", "bison", "build-essential", "g++-14", + "gawk", "gcc-14", "gnupg", "libstdc++-14-dev", + "make", "rsync", "software-properties-common", "unzip", "xz-utils", + "aria2", "apt-rdepends", "bzip2", "xz-utils" + ], + "package_count": 125 + }, + "uv-unknown": { + "name": "uv", + "version": "unknown", + "type": "UVComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/uv-unknown" + }, + "cmake-{version}": { + "name": "cmake", + "version": "3.28.3", + "type": "CMakeComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/cmake-3.28.3", + "base_url": "https://github.com/Kitware/CMake/releases/download/v{version}", + "tarball_name": "cmake-3.28.3-linux-x86_64.sh", + "verification_name": "cmake-3.28.3-SHA-256.txt" + }, + "xmake-{version}": { + "name": "xmake", + "version": "2.8.5", + "type": "XMakeComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/xmake-2.8.5", + "base_url": "https://github.com/xmake-io/xmake/releases/download/v{version}", + "tarball_name": "xmake-bundle-v2.8.5.Linux.x86_64" + }, + "toolchain-unknown": { + "name": "toolchain", + "version": "unknown", + "type": "ToolchainComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/toolchain-unknown", + "sub_components": { + "glibc-{version}": { + "name": "glibc", + "version": "2.39", + "type": "GlibcSubComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/toolchain-unknown/glibc-2.39", + "base_url": "https://ftpmirror.gnu.org/gnu/glibc", + "tarball_name": "glibc-2.39.tar.xz" + }, + "gcc-{version}": { + "name": "gcc", + "version": "14", + "type": "GccSubComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/toolchain-unknown/gcc-14", + "base_url": "https://ftpmirror.gnu.org/gnu/gcc/gcc-14", + "tarball_name": "gcc-14.tar.xz" + }, + "llvm-{version}": { + "name": "llvm", + "version": "20.1.5", + "type": "LlvmSubComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/toolchain-unknown/llvm-20.1.5", + "base_url": "https://github.com/llvm/llvm-project/releases/download/llvmorg-20.1.5", + "tarball_name": "llvm-project-20.1.5.src.tar.xz" + }, + "linux-{version}": { + "name": "linux", + "version": "6.6", + "type": "LinuxSubComponent", + "package_dir": "${RELEASE_PACKAGE_DIR}/toolchain-unknown/linux-6.6", + "base_url": "https://github.com/torvalds/linux/archive/refs/tags", + "tarball_name": "v6.6.tar.gz" + } + }, + "sysroot_dir": "${RELEASE_PACKAGE_DIR}/toolchain-unknown/sysroot/x86_64-linux-gnu/x86_64-linux-gnu/glibc2.39-libstdc++14-linux6.6" + } + }, + "build_stages": { + "dependencies_downloader": ["apt-unknown", "uv-unknown", "cmake-{version}", "xmake-{version}"], + "toolchain_builder": ["toolchain-unknown"] + }, + "environment_variables": { + "PATH": "/root/.local/bin:${PATH}", + "XMAKE_ROOT": "y" + } +} +``` + +## ๐Ÿš€ ๆž„ๅปบ่ฟ‡็จ‹ + +### ๆž„ๅปบๅ‘ฝไปค +```bash +# ไฝฟ็”จ้ป˜่ฎค่ฎพ็ฝฎๆž„ๅปบ๏ผˆclang + latest๏ผ‰ +./docker/linux/build.sh + +# ไฝฟ็”จ็‰นๅฎš็ผ–่ฏ‘ๅ™จๅ’Œ็‰ˆๆœฌๆž„ๅปบ +./docker/linux/build.sh --compiler gcc --version v1.2.3 +``` + +### ๆž„ๅปบ่ฟ‡็จ‹ๆต็จ‹ +1. **้˜ถๆฎต 1**๏ผšๅฎ‰่ฃ…ๅŸบๆœฌ็ณป็ปŸๅŒ… +2. **้˜ถๆฎต 2**๏ผšๅฐ†ๆ‰€ๆœ‰ไพ่ต–ไธ‹่ฝฝๅˆฐ็ผ“ๅญ˜ +3. **้˜ถๆฎต 3**๏ผšไปŽ็ผ“ๅญ˜ๅฎ‰่ฃ…ไพ่ต–ๅˆฐๆœ€็ปˆ้•œๅƒ +4. **ๆœ€็ปˆๅŒ–**๏ผš้…็ฝฎ็Žฏๅขƒๅนถๅˆ›ๅปบๅผ€ๅ‘ๅฐฑ็ปชๅฎนๅ™จ + +### ็”Ÿๆˆ็š„้•œๅƒ +**ๆž„ๅปบ้•œๅƒไธๅˆ† dev ๅ’Œ็”Ÿไบง**๏ผŒ็ปŸไธ€็š„้•œๅƒๆžถๆž„๏ผš + +- **้•œๅƒๅ็งฐ**๏ผš`clice-io/clice:linux-{compiler}-{version}` +- **้•œๅƒ็ฑปๅž‹**๏ผš + - **Release ้•œๅƒ**๏ผšไพฟไบŽๅˆ†ๅ‘๏ผŒๅŒ…ๅซๅŽ‹็ผฉๅŒ…ๅ’Œ็ผ“ๅญ˜๏ผŒไธ่ƒฝ็›ดๆŽฅไฝฟ็”จ + - **Development ้•œๅƒ**๏ผšๅฎŒๅ…จๅฑ•ๅผ€็š„ๅผ€ๅ‘็Žฏๅขƒ๏ผŒๆœ€็ปˆไฝฟ็”จ็š„้•œๅƒ +- **็คบไพ‹**๏ผš + - `clice-io/clice:linux-clang-latest` + - `clice-io/clice:linux-gcc-v1.2.3` + +**้‡่ฆ่ฏดๆ˜Ž**๏ผš +- Release ้•œๅƒไธป่ฆไผ˜ๅŠฟๆ˜ฏ้™ไฝŽ็”จๆˆทไธ‹่ฝฝ็š„้•œๅƒๅคงๅฐ +- Development ้•œๅƒๆ˜ฏๆœ€็ปˆๅฑ•ๅผ€็š„ๅฎนๅ™จ๏ผŒ็”จๆˆทๅฎž้™…ไฝฟ็”จ็š„็Žฏๅขƒ +- ๆž„ๅปบ่ฟ‡็จ‹็ปŸไธ€๏ผŒไธๅŒบๅˆ†ๅผ€ๅ‘ๅ’Œ็”Ÿไบง็Žฏๅขƒ + +## ๐Ÿƒ ๅฎนๅ™จไฝฟ็”จ + +### ่ฟ่กŒๅฎนๅ™จ +```bash +# ไฝฟ็”จ้ป˜่ฎค่ฎพ็ฝฎ่ฟ่กŒ +./docker/linux/run.sh + +# ไฝฟ็”จ็‰นๅฎš็ผ–่ฏ‘ๅ™จๅ’Œ็‰ˆๆœฌ่ฟ่กŒ +./docker/linux/run.sh --compiler gcc --version v1.2.3 + +# ้‡็ฝฎๅฎนๅ™จ๏ผˆๅˆ ้™คๅนถ้‡ๆ–ฐๅˆ›ๅปบ๏ผ‰ +./docker/linux/run.sh --reset + +# ๆ›ดๆ–ฐๅฎนๅ™จ้•œๅƒ +./docker/linux/run.sh --update +``` + +### ๅฎนๅ™จ็ฎก็† +- **่‡ชๅŠจๅˆ›ๅปบ**๏ผšๅฆ‚ๆžœๅฎนๅ™จไธๅญ˜ๅœจ๏ผŒไผš่‡ชๅŠจๅˆ›ๅปบ +- **็‰ˆๆœฌๆฃ€ๆŸฅ**๏ผšไฝฟ็”จๅ‰ไผš้ชŒ่ฏๅฎนๅ™จ้•œๅƒ็‰ˆๆœฌ +- **ๅทฅไฝœๅŒบๆŒ‚่ฝฝ**๏ผš้กน็›ฎ็›ฎๅฝ•ๆŒ‚่ฝฝๅˆฐๅฎนๅ™จไธญ็š„ `/clice` +- **ๆŒไน…ๅญ˜ๅ‚จ**๏ผšๅฎนๅ™จๅœจไผš่ฏไน‹้—ดไฟๆŒๆŒไน… + +### ๅผ€ๅ‘ๅทฅไฝœๆต็จ‹ +```bash +# 1. ๆž„ๅปบๅผ€ๅ‘ๅฎนๅ™จ +./docker/linux/build.sh --compiler clang + +# 2. ๅผ€ๅง‹ๅผ€ๅ‘ไผš่ฏ +./docker/linux/run.sh --compiler clang + +# 3. ๅœจๅฎนๅ™จๅ†… - ๆž„ๅปบ้กน็›ฎ +cd /clice +mkdir build && cd build +cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Debug +ninja +``` + +## โšก ็ผ“ๅญ˜็ญ–็•ฅ + +### ็‹ฌ็ซ‹็ผ“ๅญ˜ๅ‘ฝๅ็ฉบ้—ด +ๆฏไธชๆž„ๅปบ้˜ถๆฎตไฝฟ็”จ็‹ฌ็ซ‹็š„็ผ“ๅญ˜ ID ไปฅๅฎž็Žฐ็œŸๆญฃ็š„ๅนถ่กŒๆ‰ง่กŒ๏ผš + +#### ้˜ถๆฎต็‰นๅฎš็ผ“ๅญ˜ ID +- **ๅทฅๅ…ท้“พๆž„ๅปบๅ™จ**๏ผš`toolchain-builder-*` + - `toolchain-builder-apt` - APT ๅŒ…็ผ“ๅญ˜ + - `toolchain-builder-apt-state` - APT ็Šถๆ€็ผ“ๅญ˜ + - `toolchain-builder-cache` - ้€š็”จๆž„ๅปบ็ผ“ๅญ˜ + - `toolchain-builder-uv` - UV Python ๅŒ…็ผ“ๅญ˜ + +- **ไพ่ต–ไธ‹่ฝฝๅ™จ**๏ผš`dependencies-downloader-*` + - `dependencies-downloader-apt` - APT ๅŒ…็ผ“ๅญ˜ + - `dependencies-downloader-apt-state` - APT ็Šถๆ€็ผ“ๅญ˜ + - `dependencies-downloader-cache` - ไธ‹่ฝฝ็ผ“ๅญ˜ + - `dependencies-downloader-uv` - UV Python ๅŒ…็ผ“ๅญ˜ + +- **Release ๅŒ…ๅˆ›ๅปบๅ™จ**๏ผš`packed-image-*` + - `packed-image-apt` - APT ๅŒ…็ผ“ๅญ˜ + - `packed-image-apt-state` - APT ็Šถๆ€็ผ“ๅญ˜ + - `packed-image-uv` - UV Python ๅŒ…็ผ“ๅญ˜ + +### Docker ๅฑ‚็ผ“ๅญ˜ +- **Python ๅŸบ็ก€็Žฏๅขƒ**๏ผš็‹ฌ็ซ‹็ผ“ๅญ˜๏ผŒๅœจๆ‰€ๆœ‰้˜ถๆฎต้—ดๅ…ฑไบซ +- **้˜ถๆฎต่พ“ๅ‡บ**๏ผšๆฏไธช้˜ถๆฎต็š„่พ“ๅ‡บไฝœไธบ็‹ฌ็ซ‹็š„ Docker ๅฑ‚็ผ“ๅญ˜ +- **ๅนถ่กŒ้˜ถๆฎต้š”็ฆป**๏ผš็‹ฌ็ซ‹็ผ“ๅญ˜้˜ฒๆญขๅนถ่กŒๆ‰ง่กŒๆœŸ้—ด็š„ๅ†ฒ็ช + +### ็ผ“ๅญ˜ไผ˜ๅŒ–ไผ˜ๅŠฟ +- **็œŸๆญฃ็š„ๅนถ่กŒๆ‰ง่กŒ**๏ผš็‹ฌ็ซ‹็ผ“ๅญ˜ๅ‘ฝๅ็ฉบ้—ดๆถˆ้™คๅ†ฒ็ช +- **ๅ‡ๅฐ‘ๆž„ๅปบๆ—ถ้—ด**๏ผšๆ™บ่ƒฝๅฑ‚็ผ“ๅญ˜ๅ’ŒๅŸบไบŽ็ป„ไปถ็š„ๆž„ๅปบ +- **ๅธฆๅฎฝๆ•ˆ็އ**๏ผšๆฏไธช็ผ“ๅญ˜ๅ‘ฝๅ็ฉบ้—ดไธ‹่ฝฝๅชๅ‘็”Ÿไธ€ๆฌก +- **็ฆป็บฟ่ƒฝๅŠ›**๏ผšๅฎŒๆ•ดไพ่ต–้ข„่งฃๆžไฝฟ็ฆป็บฟ้‡ๅปบๆˆไธบๅฏ่ƒฝ +- **้€‰ๆ‹ฉๆ€งๅคฑๆ•ˆ**๏ผšไธ€ไธช็ป„ไปถ็š„ๆ›ดๆ”นไธไผšไฝฟๅ…ถไป–็ป„ไปถๅคฑๆ•ˆ + +## ๐Ÿ›ก๏ธ ๅฎ‰ๅ…จๅ’Œ้ชŒ่ฏ + +### ๅŒ…้ชŒ่ฏ +- **CMake**๏ผšๅฎ‰่ฃ…็จ‹ๅบ็š„ SHA-256 ๆ ก้ชŒๅ’Œ้ชŒ่ฏ +- **APT ๅŒ…**๏ผšๆ ‡ๅ‡† APT ็ญพๅ้ชŒ่ฏ +- **ไพ่ต–ๆ ‘**๏ผšไฝฟ็”จ `apt-rdepends` ๅฎŒๆ•ดไพ่ต–่งฃๆž + +### ๆž„ๅปบ้š”็ฆป +- **ๅคš้˜ถๆฎต**๏ผšๆฏไธช้˜ถๆฎต้ƒฝๆ˜ฏ้š”็ฆปๅ’Œๅฏ็ผ“ๅญ˜็š„ +- **้ž root ็”จๆˆท**๏ผšๅผ€ๅ‘ๅฐฝๅฏ่ƒฝไปฅ้ž root ็”จๆˆท่ฟ่กŒ +- **ๆธ…ๆด็Žฏๅขƒ**๏ผšๆฏๆฌกๆž„ๅปบ้ƒฝไปŽๆธ…ๆดๅŸบ็ก€ๅผ€ๅง‹ + +## ๐Ÿ”ง ้…็ฝฎ็ฎก็† + +### ้›†ไธญ้…็ฝฎ +ๆ‰€ๆœ‰ๅฎนๅ™จ้…็ฝฎ้€š่ฟ‡ `config/build_config.py` ็ฎก็†๏ผš + +```python +# ็‰ˆๆœฌ็ฎก็† +TOOLCHAIN_VERSIONS = { + "cmake": "3.28.3", + "xmake": "2.8.5", + "gcc": "14", + "llvm": "20" +} + +# ๅŒ…ๅˆ—่กจ +DEV_CONTAINER_BASIC_TOOLS = [ + "software-properties-common", + "gnupg", "git", "xz-utils", "unzip", "make" +] +``` + +### ็Žฏๅขƒๅ˜้‡ +- `PKG_CACHE_DIR=/pkg-cache` - ๅŒ…็ผ“ๅญ˜็›ฎๅฝ• +- `DEBIAN_FRONTEND=noninteractive` - ้žไบคไบ’ๅผๅŒ…ๅฎ‰่ฃ… +- `XMAKE_ROOT=y` - XMake root ๆƒ้™ + +## ๐Ÿš€ ๆ€ง่ƒฝไผ˜ๅŒ– + +### ๅนถ่กŒๅค„็†ๆžถๆž„ +**ๅนถ่กŒไผ˜ๅŒ–ๅœจไธ‰ไธชๅฑ‚้ขๅฎž็Žฐ**๏ผš + +#### Stage ้—ดๅนถ่กŒ๏ผˆDocker ๆž„ๅปบๅฑ‚้ข๏ผ‰ +- **ๅทฅๅ…ท้“พๆž„ๅปบๅ™จ** ๅ’Œ **ไพ่ต–ไธ‹่ฝฝๅ™จ** ้˜ถๆฎตๅนถๅ‘ๆ‰ง่กŒ +- **Release ๅŒ…ๅˆ›ๅปบๅ™จ** ็ญ‰ๅพ…ไธคไธชๅนถ่กŒ้˜ถๆฎตๅฎŒๆˆ +- Docker BuildKit ่‡ชๅŠจ่ฐƒๅบฆๅนถ่กŒ้˜ถๆฎตๆ‰ง่กŒ +- **็‹ฌ็ซ‹็ผ“ๅญ˜ๅ‘ฝๅ็ฉบ้—ด** ้˜ฒๆญขๅนถ่กŒๆ‰ง่กŒๆœŸ้—ด็š„็ผ“ๅญ˜ๅ†ฒ็ช + +#### Stage ๅ†…ๅนถ่กŒ๏ผˆ็ป„ไปถๅฑ‚้ข๏ผ‰ +**ๅทฅๅ…ท้“พๆž„ๅปบๅ™จๅ†…้ƒจๅนถ่กŒ**๏ผš +- ไฝฟ็”จ `ParallelTaskScheduler` ่ฟ›่กŒๆœ€ไผ˜ไฝœไธš่ฐƒๅบฆ +- ไฝฟ็”จ `ProcessPoolExecutor` ๅนถๅ‘ๆž„ๅปบ็ผ–่ฏ‘ๅ™จ็ป„ไปถ +- ๅคšๆ ธ CPU ๅˆฉ็”จ็އ็”จไบŽๅนถ่กŒ็ผ–่ฏ‘ +- ไฝฟ็”จๆ‹“ๆ‰‘ๆŽ’ๅบ่งฃๆž็ป„ไปถไพ่ต– + +**ไพ่ต–ไธ‹่ฝฝๅ™จๅ†…้ƒจๅนถ่กŒ**๏ผš +- `aria2c` ๅคš่ฟžๆŽฅไธ‹่ฝฝๅ•ไธชๆ–‡ไปถ +- ไฝฟ็”จๅนถ่กŒไฝœไธšๆ‰ง่กŒ็š„ APT ๅŒ…ๆ‰นๅค„็† +- ๅŸบไบŽ็ป„ไปถ็š„ๅนถ่กŒไธ‹่ฝฝ๏ผˆAPTใ€ๅทฅๅ…ทใ€Python ๅŒ…ๅŒๆ—ถ่ฟ›่กŒ๏ผ‰ +- ้ข„่งฃๆžไพ่ต–ๆ ‘ๅ‡ๅฐ‘ไธ‹่ฝฝๆ—ถๆŸฅๆ‰พ + +**Release ๅŒ…ๅˆ›ๅปบๅ™จๅนถ่กŒ**๏ผš +- ไฝฟ็”จๅŸบไบŽไฝœไธš็š„ไปปๅŠก่ฐƒๅบฆๅ™จๅนถ่กŒ็ป„ไปถๅˆๅนถ +- ๅนถๅ‘ๆธ…ๅ•็”Ÿๆˆๅ’ŒๅŒ…ๅŽ‹็ผฉ +- ๆœ€็ปˆๆ‰“ๅŒ…้˜ถๆฎต็š„ๆœ€ไผ˜่ต„ๆบๅˆฉ็”จ + +#### ็ผ“ๅญ˜็‹ฌ็ซ‹ๆžถๆž„ +ๆฏไธช้˜ถๆฎตไฝฟ็”จๅฎŒๅ…จ็‹ฌ็ซ‹็š„็ผ“ๅญ˜ๅ‘ฝๅ็ฉบ้—ดๆ“ไฝœ๏ผš +```dockerfile +# ๅทฅๅ…ท้“พๆž„ๅปบๅ™จ - ็‹ฌ็ซ‹็ผ“ๅญ˜ๅ‘ฝๅ็ฉบ้—ด +--mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=toolchain-builder-apt +--mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=toolchain-builder-uv + +# ไพ่ต–ไธ‹่ฝฝๅ™จ - ็‹ฌ็ซ‹็ผ“ๅญ˜ๅ‘ฝๅ็ฉบ้—ด +--mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=dependencies-downloader-apt +--mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=dependencies-downloader-uv + +# Release ๅŒ…ๅˆ›ๅปบๅ™จ - ็‹ฌ็ซ‹็ผ“ๅญ˜ๅ‘ฝๅ็ฉบ้—ด +--mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=packed-image-apt +--mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=packed-image-uv +``` + +### ๆž„ๅปบไผ˜ๅŒ– +- **ๅฑ‚็ผ“ๅญ˜**๏ผš็งฏๆž็š„ Docker ๅฑ‚็ผ“ๅญ˜็ญ–็•ฅ +- **ๆœ€ๅฐ้‡ๅปบ**๏ผšๅช้‡ๅปบๆ›ดๆ”น็š„็ป„ไปถ +- **ๅคงๅฐไผ˜ๅŒ–**๏ผšๅคš้˜ถๆฎตๆž„ๅปบๆœ€ๅฐๅŒ–ๆœ€็ปˆ้•œๅƒๅคงๅฐ +- **็ผ“ๅญ˜ๅˆ†็ฆป**๏ผšRelease ้•œๅƒไฝœไธบ็ผ“ๅญ˜ๅฑ‚๏ผŒDevelopment ้•œๅƒๅฟซ้€Ÿๅฑ•ๅผ€ + +## ๐Ÿ”„ ็ปดๆŠคๅ’Œๆ›ดๆ–ฐ + +### ็‰ˆๆœฌๆ›ดๆ–ฐ +ๆ›ดๆ–ฐ `config/default-toolchain-version.json` ไธญ็š„็‰ˆๆœฌ๏ผš +```json +{ + "cmake": "3.28.3", + "xmake": "2.8.5", + "gcc": "14", + "llvm": "20" +} +``` + +### ๆทปๅŠ ๆ–ฐไพ่ต– +1. ๆ›ดๆ–ฐ `config/build_config.py` ไธญ็š„ๅŒ…ๅˆ—่กจ +2. ไฝฟ็”จ `./docker/linux/build.sh --rebuild` ้‡ๅปบๅฎนๅ™จ +3. ไฝฟ็”จ `./docker/linux/run.sh --reset` ้ชŒ่ฏ + +### ๅฎนๅ™จๅฅๅบทๆฃ€ๆŸฅ +```bash +# ๆฃ€ๆŸฅๅฎนๅ™จ็Šถๆ€ +docker ps -f name=clice-linux-clang + +# ้ชŒ่ฏๅผ€ๅ‘็Žฏๅขƒ +./docker/linux/run.sh bash -c "cmake --version && xmake --version" + +# ๆฃ€ๆŸฅๅŒ…ๆธ…ๅ• +docker exec clice-linux-clang cat /pkg-cache/manifest.json +``` + +## ๐ŸŽฏ ๆœ€ไฝณๅฎž่ทต + +### ๅผ€ๅ‘ๅทฅไฝœๆต็จ‹ +1. ไฝฟ็”จ็‰ˆๆœฌ็‰นๅฎš็š„ๅฎนๅ™จ่ฟ›่กŒๅฏ้‡็Žฐๆž„ๅปบ +2. ๅœจไธป่ฆ็‰ˆๆœฌไน‹้—ดๅˆ‡ๆขๆ—ถ้‡็ฝฎๅฎนๅ™จ +3. ๅฎšๆœŸไฝฟ็”จ `--update` ๆ‹‰ๅ–ๆœ€ๆ–ฐ้•œๅƒ +4. ไป…ๆŒ‚่ฝฝๅฟ…่ฆ็›ฎๅฝ•ไปฅ้ฟๅ…ๆ€ง่ƒฝ้—ฎ้ข˜ + +### ๅฎนๅ™จ็ฎก็† +1. ไธบๅ‘ๅธƒๆž„ๅปบไฝฟ็”จๆ่ฟฐๆ€ง็‰ˆๆœฌๆ ‡็ญพ +2. ๅฎšๆœŸๆธ…็†ๆœชไฝฟ็”จ็š„ๅฎนๅ™จๅ’Œ้•œๅƒ +3. ็›‘ๆŽงๅฎนๅ™จ่ต„ๆบไฝฟ็”จๆƒ…ๅ†ต +4. ๅฐ†ๅฎนๅ™จ้…็ฝฎไฟๆŒๅœจ็‰ˆๆœฌๆŽงๅˆถไธ‹ + +ๆญคๆžถๆž„ไธบ Clice ้กน็›ฎๆไพ›ไบ†ๅผบๅคงใ€้ซ˜ๆ•ˆๅ’Œๅฏ็ปดๆŠค็š„ๅผ€ๅ‘็Žฏๅขƒ๏ผŒๅ…ทๆœ‰ไผ˜ๅŒ–็š„ๆž„ๅปบๆ—ถ้—ดใ€ๅ…จ้ข็š„ๅทฅๅ…ท้“พๆ”ฏๆŒๅ’Œๅ‡บ่‰ฒ็š„ๅผ€ๅ‘่€…ไฝ“้ชŒใ€‚ \ No newline at end of file diff --git a/xmake.lua b/xmake.lua index edc9e5bf..3d52d107 100644 --- a/xmake.lua +++ b/xmake.lua @@ -52,6 +52,8 @@ add_requires("clice-llvm", { alias = "llvm" }) add_rules("mode.release", "mode.debug", "mode.releasedbg") set_languages("c++23") add_rules("clice_build_config") +add_cxflags("--sysroot=/toolchain-build/sysroot/x86_64-linux-gnu/x86_64-linux-gnu/glibc2.39-libstdc++14.3.0-linux6.17 -v -Wl,--verbose", {force = true, tools = {"gcc", "gxx", "clang", "clangxx"}}) +add_ldflags("--verbose -static-libstdc++ -static-libgcc", {force = true}) target("clice-core", function() set_kind("$(kind)") From af868f5cbe857207651d0308856ef87b6d32847c Mon Sep 17 00:00:00 2001 From: sora_mono <849526320@qq.com> Date: Wed, 22 Oct 2025 01:09:11 +0800 Subject: [PATCH 2/4] Optimize and bug fixes --- config/build_config.py | 187 ++++++++-- config/default-toolchain-version.json | 1 + docker/.dockerignore | 1 + docker/linux/Dockerfile | 346 +++++++++++++----- docker/linux/build.sh | 162 ++++---- docker/linux/container-entrypoint.sh | 36 ++ docker/linux/run.sh | 85 ++++- .../utility/build_clice_compiler_toolchain.py | 87 +++-- docker/linux/utility/build_utils.py | 51 ++- docker/linux/utility/common.sh | 66 ++++ .../linux/utility/create_release_package.py | 195 +++++++--- docker/linux/utility/download_dependencies.py | 74 ++-- docker/linux/utility/local_setup.py | 118 ++---- docs/en/dev/build.md | 156 ++++---- docs/en/dev/dev-container-architecture.md | 2 +- docs/zh/dev/dev-container-architecture.md | 2 +- 16 files changed, 1036 insertions(+), 533 deletions(-) create mode 100644 docker/linux/container-entrypoint.sh diff --git a/config/build_config.py b/config/build_config.py index 34ea43fe..72aee885 100644 --- a/config/build_config.py +++ b/config/build_config.py @@ -54,7 +54,9 @@ } # Core project structure definitions -PROJECT_ROOT: str = "/clice" # Root directory of the Clice project +# CLICE_WORKDIR can be customized via environment variable, defaults to /clice +CLICE_WORKDIR: str = os.getenv("CLICE_WORKDIR", "") # Working directory inside Docker container +PROJECT_ROOT: str = CLICE_WORKDIR # Root directory of the Clice project PYPROJECT_PATH: str = os.path.join(PROJECT_ROOT, "pyproject.toml") # Python project configuration file TOOLCHAIN_BUILD_ROOT: str = "/toolchain-build" # Root directory for all toolchain builds TOOLCHAIN_CONFIG_PATH: str = os.path.join(PROJECT_ROOT, "config/default-toolchain-version.json") # Version definitions @@ -70,7 +72,7 @@ ENVIRONMENT_CONFIG_FILE: str = os.getenv("ENVIRONMENT_CONFIG_FILE", "") # Source code cache directory for toolchain build -CACHE_DIR_ROOT: str = os.getenv("CACHE_DIR_ROOT", "") +BUILD_CACHE_DIR: str = os.getenv("BUILD_CACHE_DIR", "") WORKDIR_ROOT: str = "/dev-container-build" # Temporary work directory for builds (not persistent) @@ -137,8 +139,10 @@ class Component: "xz-utils", # Required for extracting .xz archives (toolchain sources) ] + build_prerequisites: List[str] = [] # To be defined by subclasses if needed + # Where the component will be deployed - host_system: str = "Linux" + host_system: str = "linux" host_machine: str = "x86_64" # Where the constructed output (like clice binary) runs on @@ -151,7 +155,7 @@ def __init__(self, name: str, version: str = "unknown"): # Directory structure generation based on name and version self.package_dir = os.path.join(RELEASE_PACKAGE_DIR, self.versioned_name) - self.cache_dir = os.path.join(CACHE_DIR_ROOT, self.versioned_name) + self.cache_dir = os.path.join(BUILD_CACHE_DIR, self.versioned_name) self.work_dir = os.path.join(WORKDIR_ROOT, self.versioned_name) @property @@ -235,7 +239,7 @@ def extracted_dir(self) -> str: @property def src_dir(self) -> str: """Version-specific source directory.""" - return os.path.join(self.work_dir, self.versioned_name) + return os.path.join(self.work_dir, "src") @property def build_dir(self) -> str: @@ -271,12 +275,65 @@ def all_packages(self) -> List[str]: class UVComponent(Component): - """UV Python package manager component (versionless).""" + """ + UV Python package manager component. + + Manages: + โ€ข UV standalone binary distribution + โ€ข Python interpreter installation (via UV's python management) + โ€ข Python packages from pyproject.toml (wheels, dependencies) + + Cache Strategy: + โ€ข cache_dir/ - UV tarball cache (Docker layer, for build efficiency) + - Only caches UV binary tarball downloads + - As uv do not provide download python packages separately, we have to give up caching python binary + - Not mounted as volume, ensures Docker layer caching + + โ€ข package_dir/ - Multi-purpose directory for cross-stage transfer + - UV binary for installation + - Python installation cache (UV_CACHE_DIR during python install, cached the same directory with python packages) + - Python packages cache (pip wheels, venv) + - Transferred to expand-stage for installation + + Why this design: + โ€ข UV tarball cached separately for Docker layer reuse + โ€ข Python install uses package_dir as UV_CACHE_DIR (no separate download cache) + โ€ข Python packages cache goes to standard location for later stages + โ€ข All needed files in package_dir for expand-stage + """ + + base_url = "https://github.com/astral-sh/uv/releases/download/{version}" + tarball_name_pattern = "uv-{machine}-unknown-linux-gnu.tar.gz" def __init__(self): - super().__init__("uv") - + version = TOOLCHAIN_VERSIONS["uv"] + super().__init__("uv", version) + # Python version managed by this UV instance + self.python_version = TOOLCHAIN_VERSIONS["python"] + + @property + def tarball_cache_dir(self) -> str: + """Directory where UV binary tarball is cached (Docker layer).""" + return os.path.join(self.cache_dir, "tarball") + @property + def tarball_package_dir(self) -> str: + """Directory where UV binary tarball is cached (Docker layer).""" + return os.path.join(self.package_dir, "tarball") + + @property + def install_dir(self) -> str: + """Directory where UV binary will be installed.""" + return "/root/.local/bin" + + @property + def packages_package_dir(self) -> str: + """ + UV_CACHE_DIR for Python installation phase. + Points to package_dir to avoid separate cache layer. + """ + return os.path.join(self.package_dir, "uv-packages") + class XMakeComponent(Component): """XMake build system component.""" @@ -322,28 +379,6 @@ def __init__(self): def sysroot_dir(self) -> str: """Sysroot directory with version-specific naming.""" return f"{self.package_dir}/sysroot/{self.host_triplet}/{self.target_triplet}/glibc{self.glibc.version}-libstdc++{self.gcc.version}-linux{self.linux.version}" - - @property - def build_prerequisites(self) -> List[str]: - """Comprehensive build environment for toolchain compilation.""" - return [ - # Core build tools - "make", # GNU Make build automation - "rsync", # File synchronization (Linux kernel headers) - "gawk", # GNU AWK text processing (glibc requirement) - "bison", # Parser generator (glibc requirement) - "binutils", # Binary utilities (assembler, linker, etc.) - "file", # File type identification (libcc1 requires this tool) - - # GCC toolchain for glibc (requires GCC < 10 to avoid linker conflicts) - "gcc-9", # GNU C compiler version 9 - - # Modern GCC toolchain for libstdc++ building - "gcc-14", # Latest GNU C compiler - "g++-14", # Latest GNU C++ compiler - "libstdc++-14-dev", # Latest C++ standard library development files - ] - # ======================================================================== # ๐Ÿงฉ Toolchain Sub-Component Classes @@ -355,6 +390,15 @@ class GlibcSubComponent(ToolchainSubComponent): base_url = "https://ftpmirror.gnu.org/gnu/glibc" tarball_name_pattern = "glibc-{version}.tar.xz" verification_name_pattern = "glibc-{version}.tar.xz.sig" + build_prerequisites: List[str] = [ + "make", # GNU Make build automation + "binutils", # Binary utilities (assembler, linker, etc.) + "gawk", # Text processing (required by glibc build system) + "bison", # Parser generator (required by glibc build system) + "gcc-9", # GNU C compiler version 9 (for glibc < 2.36) + + *ToolchainSubComponent.build_prerequisites + ] def __init__(self, parent_component: ToolchainComponent): super().__init__("glibc", parent_component) @@ -366,6 +410,17 @@ class GccSubComponent(ToolchainSubComponent): base_url = "https://ftpmirror.gnu.org/gnu/gcc/gcc-{version}" tarball_name_pattern = "gcc-{version}.tar.xz" verification_name_pattern = "gcc-{version}.tar.xz.sig" + build_prerequisites: List[str] = [ + "make", # GNU Make build automation + "binutils", # Binary utilities (assembler, linker, etc.) + "file", # File type identification (libcc1 requires this tool) + + "gcc-14", # Latest GNU C compiler (for bootstrapping) + "g++-14", # Latest GNU C++ compiler (for bootstrapping) + "libstdc++-14-dev", # Multiple Precision Floating-Point Reliable Library + + *ToolchainSubComponent.build_prerequisites + ] def __init__(self, parent_component: ToolchainComponent): super().__init__("gcc", parent_component) @@ -401,9 +456,68 @@ class LinuxSubComponent(ToolchainSubComponent): base_url = "https://github.com/torvalds/linux/archive/refs/tags" tarball_name_pattern = "v{version}.tar.gz" verification_name_pattern = "" # Linux kernel releases don't include separate signature files - + build_prerequisites: List[str] = [ + "make", # GNU Make build automation + "binutils", # Binary utilities (assembler, linker, etc.) + "rsync", # File synchronization (Linux kernel headers) + + "gcc-9", # Even though we don't build the kernel, configure requires gcc + + *ToolchainSubComponent.build_prerequisites + ] + def __init__(self, parent_component: ToolchainComponent): - super().__init__("linux", parent_component) + super().__init__("linux", parent_component) + +class CliceSetupScriptsComponent(Component): + """ + Clice setup scripts and configuration component. + + Contains Python scripts and configuration files needed for container setup, + packaged as a complete directory structure: + - config/build_config.py: Configuration definitions + - config/default-toolchain-version.json: Version information + - docker/linux/utility/local_setup.py: Final container setup script + - docker/linux/utility/build_utils.py: Utility functions + + These files are packaged preserving their directory structure and will be + executed in-place during container expansion (no extraction to CLICE_WORKDIR needed). + """ + + def __init__(self): + super().__init__("clice-setup-scripts") + + @property + def files_to_copy(self) -> list[str]: + """List of files to copy with their relative paths (preserving directory structure).""" + return [ + 'config/build_config.py', + 'config/default-toolchain-version.json', + 'docker/linux/utility/local_setup.py', + 'docker/linux/utility/build_utils.py', + ] + +class BashrcComponent(Component): + """ + Bash configuration component. + + Contains the .bashrc file with: + - Environment variables (PATH, etc.) + - Container entrypoint script (auto Python env setup) + """ + + def __init__(self): + super().__init__("bashrc") + + @property + def bashrc_path(self) -> str: + """Path to .bashrc file in package.""" + return os.path.join(self.package_dir, ".bashrc") + + @property + def entrypoint_script_source(self) -> str: + """Path to container-entrypoint.sh source file.""" + return os.path.join(CLICE_WORKDIR, "docker/linux/container-entrypoint.sh") # ======================================================================== # ๐Ÿ—๏ธ Component Instances and Build Stage Organization @@ -415,6 +529,8 @@ def __init__(self, parent_component: ToolchainComponent): XMAKE = XMakeComponent() CMAKE = CMakeComponent() TOOLCHAIN = ToolchainComponent() +CLICE_SETUP_SCRIPTS = CliceSetupScriptsComponent() +BASHRC = BashrcComponent() # ======================================================================== # ๐Ÿ“‹ Build Stage Component Groups @@ -428,6 +544,12 @@ def __init__(self, parent_component: ToolchainComponent): XMAKE, ] +# Image packer stage components (scripts and configs that go into package) +IMAGE_PACKER_STAGE: list[Component] = [ + CLICE_SETUP_SCRIPTS, + BASHRC, +] + # Toolchain builder stage components TOOLCHAIN_BUILDER_STAGE: list[Component] = [ TOOLCHAIN, @@ -436,6 +558,7 @@ def __init__(self, parent_component: ToolchainComponent): # Master component registry ALL_COMPONENTS = [ *DEPENDENCIES_DOWNLOADER_STAGE, + *IMAGE_PACKER_STAGE, *TOOLCHAIN_BUILDER_STAGE, ] diff --git a/config/default-toolchain-version.json b/config/default-toolchain-version.json index 6c88a5b1..8f55df48 100644 --- a/config/default-toolchain-version.json +++ b/config/default-toolchain-version.json @@ -2,6 +2,7 @@ "xmake": "3.0.2", "cmake": "3.31.8", "python": "3.13", + "uv": "0.9.2", "gcc": "14.3.0", "llvm": "20.1.8", "glibc": "2.39", diff --git a/docker/.dockerignore b/docker/.dockerignore index 115b4295..24bef13a 100644 --- a/docker/.dockerignore +++ b/docker/.dockerignore @@ -1,6 +1,7 @@ build/ out/ .cache +__pycache__ .clice/ .llvm*/ diff --git a/docker/linux/Dockerfile b/docker/linux/Dockerfile index 86e0d7ff..a3e2c3fe 100644 --- a/docker/linux/Dockerfile +++ b/docker/linux/Dockerfile @@ -1,3 +1,5 @@ +# check=skip=InvalidDefaultArgInFrom,experimental=all + # ======================================================================== # ๐Ÿš€ Clice Dev Container Multi-Stage Build System # ======================================================================== @@ -11,166 +13,287 @@ # Arguments passed from docker image build system ARG COMPILER ARG PACKED_IMAGE_NAME +ARG CLICE_WORKDIR=/clice # Global config shared in multi-stage builds -ARG RELEASE_PACKAGE_DIR="/clice-dev-container-package" -ARG PACKED_RELEASE_PACKAGE_PATH="/release-pkg.tar.xz" -ARG ENVIRONMENT_CONFIG_FILE="/root/.bashrc" -ARG CACHE_DIR_ROOT="/var/cache/clice-dev-container" +ARG RELEASE_PACKAGE_DIR=/clice-dev-container-package +ARG PACKED_RELEASE_PACKAGE_PATH=/release-pkg.tar.xz +ARG ENVIRONMENT_CONFIG_FILE=/root/.bashrc +ARG BUILD_CACHE_DIR=/var/cache/clice-dev-container # APT system paths configuration -ARG APT_CACHE_DIR="/var/cache/apt" -ARG APT_STATE_CACHE_DIR="/var/lib/apt" +ARG APT_CACHE_DIR=/var/cache/apt +ARG APT_STATE_CACHE_DIR=/var/lib/apt + +# UV docker layer cache configuration +ARG UV_PACKAGE_DIR_NAME=uv-package +ARG UV_TARBALL_DIR_NAME=tarball -# UV cache configuration -ARG UV_CACHE_DIR="/var/cache/uv" +# Python build scripts communicate via these environment variables +ARG PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES="\ +CLICE_WORKDIR=${CLICE_WORKDIR} \ +RELEASE_PACKAGE_DIR=${RELEASE_PACKAGE_DIR} \ +PACKED_RELEASE_PACKAGE_PATH=${PACKED_RELEASE_PACKAGE_PATH} \ +ENVIRONMENT_CONFIG_FILE=${ENVIRONMENT_CONFIG_FILE} \ +BUILD_CACHE_DIR=${BUILD_CACHE_DIR}" # ======================================================================== # ๐Ÿ Base Stage: Python Environment Foundation # ======================================================================== -FROM ubuntu:24.04 AS base-python-environment -LABEL description="Base image with consistent Python and uv environment for all stages" +FROM ubuntu:24.04 AS base-python-environment-for-build +LABEL description="Base image with consistent Python and uv environment for builder stages" + +ARG CLICE_WORKDIR +ARG APT_CACHE_DIR +ARG APT_STATE_CACHE_DIR +ARG RELEASE_PACKAGE_DIR +ARG BUILD_CACHE_DIR +ARG UV_PACKAGE_DIR_NAME +ARG UV_TARBALL_DIR_NAME # Environment setup ENV PATH="/root/.local/bin:${PATH}" -ENV UV_CACHE_DIR=${UV_CACHE_DIR} +ENV DEBIAN_FRONTEND=noninteractive -COPY config /clice/config -COPY docker/linux /clice/docker/linux +# Do NOT copy all config at once, or all stages would be rebuilt when any file changes +# Only copy what is needed for this stage +COPY docker/linux/utility/pyproject.toml ${CLICE_WORKDIR}/docker/linux/utility/pyproject.toml +COPY config/default-toolchain-version.json ${CLICE_WORKDIR}/config/default-toolchain-version.json # Install minimal system dependencies and uv RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked \ --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked \ - --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked \ + --mount=type=cache,target=${BUILD_CACHE_DIR},sharing=locked \ bash -eux - <<'SCRIPT' - apt update - apt install -y --no-install-recommends curl jq ca-certificates + set -e + + # Disable auto cleanup to keep apt cache + # This option would override Binary::apt::APT::Keep-Downloaded-Packages + rm -f /etc/apt/apt.conf.d/docker-clean + # It is strange that apt will accept APT::Keep-Downloaded-Packages in commandline, + # but Binary::apt::APT::Keep-Downloaded-Packages in config file + echo 'APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/99keepcache + echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' >> /etc/apt/apt.conf.d/99keepcache + + apt update -o DPkg::Lock::Timeout=-1 + apt install -y --no-install-recommends -o DPkg::Lock::Timeout=-1 curl jq ca-certificates + + # Get uv version from configuration + UV_VERSION=$(jq -r .uv ${CLICE_WORKDIR}/config/default-toolchain-version.json) + echo "๐Ÿ“ฆ Installing uv version: $UV_VERSION" + + # Determine architecture for uv standalone build + ARCH=$(uname -m) + case "$ARCH" in + x86_64) + UV_PLATFORM="x86_64-unknown-linux-gnu" + ;; + *) + echo "Unsupported architecture: $ARCH" + exit 1 + ;; + esac + + # Download uv standalone build from GitHub releases to package directory + UV_TARBALL_NAME="uv-${UV_PLATFORM}.tar.gz" + UV_URL="https://github.com/astral-sh/uv/releases/download/${UV_VERSION}/${UV_TARBALL_NAME}" + + # Create cache directory for UV tarball (Docker layer cache) + UV_BUILD_CACHE_DIR="${BUILD_CACHE_DIR}/uv-${UV_VERSION}" + UV_BUILD_CACHE_PACKAGE_DIR="${UV_BUILD_CACHE_DIR}/${UV_PACKAGE_DIR_NAME}" + UV_BUILD_CACHE_TARBALL_DIR="${UV_BUILD_CACHE_DIR}/${UV_TARBALL_DIR_NAME}" + UV_BUILD_CACHE_TARBALL_FILE="${UV_BUILD_CACHE_TARBALL_DIR}/${UV_TARBALL_NAME}" + mkdir -p "${UV_BUILD_CACHE_TARBALL_DIR}" + + # Create package directory for UV + UV_PACKAGE_ROOT="${RELEASE_PACKAGE_DIR}/uv-${UV_VERSION}" + UV_PACKAGE_CACHE_DIR="${UV_PACKAGE_ROOT}/${UV_PACKAGE_DIR_NAME}" + UV_PACKAGE_TARBALL_DIR="${UV_PACKAGE_ROOT}/${UV_TARBALL_DIR_NAME}" + UV_PACKAGE_TARBALL_FILE="${UV_PACKAGE_TARBALL_DIR}/${UV_TARBALL_NAME}" + mkdir -p "${UV_PACKAGE_CACHE_DIR}" "${UV_PACKAGE_TARBALL_DIR}" - # Install uv for Python management - curl -LsSf https://astral.sh/uv/install.sh | sh + echo "๐ŸŒ Downloading uv from: $UV_URL" + echo "๐Ÿ’พ Cache location: ${UV_BUILD_CACHE_TARBALL_FILE}" + echo "๐Ÿ“ฆ Package location: ${UV_PACKAGE_TARBALL_FILE}" + + # Download to cache + echo "โฌ‡๏ธ Downloading uv to cache..." + curl -fsSL "$UV_URL" -o "$UV_BUILD_CACHE_TARBALL_FILE" + + # Copy to package directory for later packaging + echo "๐Ÿ“‹ Copying to package directory..." + cp "$UV_BUILD_CACHE_TARBALL_FILE" "$UV_PACKAGE_TARBALL_FILE" + + # Extract and install uv from package + echo "๐Ÿ”ง Installing uv..." + mkdir -p /root/.local/bin + tar -xzf "$UV_PACKAGE_TARBALL_FILE" -C /root/.local/bin --strip-components=1 + + # Verify installation + echo "โœ… UV installed successfully:" + uv --version + + # Save UV version to a file for expanded-image stage to read + # This allows expanded-image to find UV package directory without globbing + echo "$UV_VERSION" > "${RELEASE_PACKAGE_DIR}/.uv-version" + echo "๐Ÿ“ Saved UV version to: ${RELEASE_PACKAGE_DIR}/.uv-version" # Get Python version from configuration - PYTHON_VERSION=$(jq -r .python /clice/config/default-toolchain-version.json) - echo "Installing Python version: $PYTHON_VERSION" + PYTHON_VERSION=$(jq -r .python ${CLICE_WORKDIR}/config/default-toolchain-version.json) + echo "๐Ÿ Installing Python version: $PYTHON_VERSION" - # Install specified Python version - uv python install "$PYTHON_VERSION" + # Install Python using UV with package_dir as cache + echo "๐Ÿ”ง Installing Python ${PYTHON_VERSION} to package directory..." + # This creates Python installation cache in RELEASE_PACKAGE_DIR for expand-stage + # So we use UV_PACKAGE_CACHE_DIR here + UV_CACHE_DIR=${UV_PACKAGE_CACHE_DIR} uv python install "$PYTHON_VERSION" --default + + # Save Python version to a file for expanded-image stage to read + # This allows expanded-image to know which Python version to install without jq + echo "$PYTHON_VERSION" > "${UV_PACKAGE_ROOT}/.python-version" + echo "๐Ÿ“ Saved Python version to: ${UV_PACKAGE_ROOT}/.python-version" + + echo "โœ… Python installation cached to: ${UV_PACKAGE_CACHE_DIR}" + + # Setup Python project environment + echo "๐Ÿ”ง Setting up Python project environment..." + # cache to build cache, here we use docker/linux/utility pyproject.toml, only for build + UV_CACHE_DIR=${UV_BUILD_CACHE_PACKAGE_DIR} uv sync --project ${CLICE_WORKDIR}/docker/linux/utility/pyproject.toml + echo "โœ… Base Python environment setup complete!" SCRIPT -WORKDIR /clice +WORKDIR ${CLICE_WORKDIR} # ======================================================================== # ๐Ÿ—๏ธ Stage 1: Compiler Toolchain Builder # ======================================================================== -FROM base-python-environment AS toolchain-builder +FROM base-python-environment-for-build AS toolchain-builder LABEL description="Builds custom compiler toolchain with static libstdc++ for glibc compatibility" +ARG CLICE_WORKDIR +ARG APT_CACHE_DIR +ARG APT_STATE_CACHE_DIR +ARG BUILD_CACHE_DIR +ARG PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES -# For build_config.py -ENV CACHE_DIR_ROOT=${CACHE_DIR_ROOT} -ENV RELEASE_PACKAGE_DIR=${RELEASE_PACKAGE_DIR} - -# uv auto reads from this env variable -ENV UV_CACHE_DIR=${UV_CACHE_DIR} - -ENV DEBIAN_FRONTEND=noninteractive - -# Copy additional project structure (config already copied in base) -COPY docker/linux /clice/docker/linux +COPY config/build_config.py ${CLICE_WORKDIR}/config/build_config.py +COPY config/default-toolchain-version.json ${CLICE_WORKDIR}/config/default-toolchain-version.json +COPY docker/linux/utility/build_utils.py ${CLICE_WORKDIR}/docker/linux/utility/build_utils.py +COPY docker/linux/utility/build_clice_compiler_toolchain.py ${CLICE_WORKDIR}/docker/linux/utility/build_clice_compiler_toolchain.py # Build the custom toolchain (Python script handles all dependencies) RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=toolchain-builder-apt \ --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=toolchain-builder-apt-state \ - --mount=type=cache,target=${CACHE_DIR_ROOT},sharing=locked,id=toolchain-builder-cache \ - --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=toolchain-builder-uv \ + --mount=type=cache,target=${BUILD_CACHE_DIR},sharing=locked,id=toolchain-builder-cache \ bash -eux - <<'SCRIPT' - # Setup Python project environment - uv sync --project /clice/docker/linux/utility/pyproject.toml - # Activate Python environment and build toolchain - source /clice/docker/linux/utility/.venv/bin/activate + # Activate Python environment + echo "๐Ÿ Activating Python environment..." + source ${CLICE_WORKDIR}/docker/linux/utility/.venv/bin/activate + + echo "๐Ÿ”จ Building custom compiler toolchain..." + eval ${PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES} \ python docker/linux/utility/build_clice_compiler_toolchain.py + echo "โœ… Toolchain build complete!" SCRIPT # ======================================================================== # ๐Ÿ—๏ธ Stage 2: Dependencies Downloader (Parallel to Stage 1) # ======================================================================== -FROM base-python-environment AS dependencies-downloader +FROM base-python-environment-for-build AS dependencies-downloader LABEL description="Downloads dev-container dependencies for cache optimization" -ENV CACHE_DIR_ROOT=${CACHE_DIR_ROOT} -ENV RELEASE_PACKAGE_DIR=${RELEASE_PACKAGE_DIR} +ARG CLICE_WORKDIR +ARG APT_CACHE_DIR +ARG APT_STATE_CACHE_DIR +ARG BUILD_CACHE_DIR +ARG PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES -# uv auto reads from this env variable -ENV UV_CACHE_DIR=${UV_CACHE_DIR} - -ENV DEBIAN_FRONTEND=noninteractive +COPY config/build_config.py ${CLICE_WORKDIR}/config/build_config.py +COPY config/default-toolchain-version.json ${CLICE_WORKDIR}/config/default-toolchain-version.json +COPY docker/linux/utility/build_utils.py ${CLICE_WORKDIR}/docker/linux/utility/build_utils.py +COPY docker/linux/utility/download_dependencies.py ${CLICE_WORKDIR}/docker/linux/utility/download_dependencies.py -# Copy additional project structure (config already copied in base) -COPY docker /clice/docker +# for download python dependencies +COPY pyproject.toml ${CLICE_WORKDIR}/pyproject.toml # Setup Python project environment and download dependencies RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=dependencies-downloader-apt \ --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=dependencies-downloader-apt-state \ - --mount=type=cache,target=${CACHE_DIR_ROOT},sharing=locked,id=dependencies-downloader-cache \ - --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=dependencies-downloader-uv \ + --mount=type=cache,target=${BUILD_CACHE_DIR},sharing=locked,id=dependencies-downloader-cache \ bash -eux - <<'SCRIPT' - # Setup Python environment - uv sync --project /clice/docker/linux/utility/pyproject.toml - # Download dependencies - source /clice/docker/linux/utility/.venv/bin/activate + # Activate Python environment + echo "๐Ÿ Activating Python environment..." + source ${CLICE_WORKDIR}/docker/linux/utility/.venv/bin/activate + + echo "๐Ÿ“ฅ Downloading dependencies..." + eval ${PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES} \ python docker/linux/utility/download_dependencies.py + echo "โœ… Dependencies download complete!" SCRIPT # ======================================================================== # ๐Ÿ—๏ธ Stage 3: Release Package Creator # ======================================================================== -FROM base-python-environment AS image-packer +FROM base-python-environment-for-build AS image-packer LABEL description="Merges toolchain and dependencies into final release package" -# For build_config.py -ENV RELEASE_PACKAGE_DIR=${RELEASE_PACKAGE_DIR} -ENV PACKED_RELEASE_PACKAGE_PATH=${PACKED_RELEASE_PACKAGE_PATH} -ENV ENVIRONMENT_CONFIG_FILE=${ENVIRONMENT_CONFIG_FILE} +ARG CLICE_WORKDIR +ARG RELEASE_PACKAGE_DIR +ARG APT_CACHE_DIR +ARG APT_STATE_CACHE_DIR +ARG PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES -# uv auto reads from this env variable -ENV UV_CACHE_DIR=${UV_CACHE_DIR} +# For execution in this layer and for package in final image +COPY config/build_config.py ${CLICE_WORKDIR}/config/build_config.py +COPY config/default-toolchain-version.json ${CLICE_WORKDIR}/config/default-toolchain-version.json +COPY docker/linux/utility/build_utils.py ${CLICE_WORKDIR}/docker/linux/utility/build_utils.py -ENV DEBIAN_FRONTEND=noninteractive +# For execution in this layer only +COPY docker/linux/utility/create_release_package.py ${CLICE_WORKDIR}/docker/linux/utility/create_release_package.py +COPY docker/linux/container-entrypoint.sh ${CLICE_WORKDIR}/docker/linux/container-entrypoint.sh + +# For final packaging only +COPY docker/linux/utility/local_setup.py ${CLICE_WORKDIR}/docker/linux/utility/local_setup.py # Copy outputs from previous stages # Merge by RELEASE_PACKAGE_DIR structure, each component has its own directory # No need to manually copy individual files + +# UV tarball and python +COPY --from=base-python-environment-for-build ${RELEASE_PACKAGE_DIR} ${RELEASE_PACKAGE_DIR} +# static libstdc++ toolchain COPY --from=toolchain-builder ${RELEASE_PACKAGE_DIR} ${RELEASE_PACKAGE_DIR} +# other dependencies COPY --from=dependencies-downloader ${RELEASE_PACKAGE_DIR} ${RELEASE_PACKAGE_DIR} # Setup Python project environment and create final release package RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=packed-image-apt \ --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=packed-image-apt-state \ - --mount=type=cache,target=${UV_CACHE_DIR},sharing=locked,id=packed-image-uv \ bash -eux - <<'SCRIPT' - # Setup Python environment - uv sync --project /clice/docker/linux/utility/pyproject.toml - # Create final release package by merging stage outputs - source /clice/docker/linux/utility/.venv/bin/activate + # Activate Python environment + echo "๐Ÿ Activating Python environment..." + source ${CLICE_WORKDIR}/docker/linux/utility/.venv/bin/activate + + # Create final release package + echo "๐Ÿ“ฆ Creating release package..." + eval ${PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES} \ python docker/linux/utility/create_release_package.py + echo "โœ… Release package created successfully!" SCRIPT # ======================================================================== -# ๐Ÿ—๏ธ Stage 4: Release Package Creator +# ๐Ÿ—๏ธ Stage 4: Release Package # ======================================================================== -FROM base-python-environment AS packed-image +FROM ubuntu:24.04 AS packed-image -# Copy project configuration to determine Python version -# these two COPYs must be scheduled before expand image -# so the config and scripts could keep the same with docker image build environment -COPY config /clice/config -COPY docker/linux /clice/docker/linux +ARG PACKED_RELEASE_PACKAGE_PATH +# Copy only the packed release package +# All scripts, configs, and .bashrc are already inside the package COPY --from=image-packer ${PACKED_RELEASE_PACKAGE_PATH} ${PACKED_RELEASE_PACKAGE_PATH} -COPY --from=image-packer ${ENVIRONMENT_CONFIG_FILE} ${ENVIRONMENT_CONFIG_FILE} # ======================================================================== # ๐Ÿ—๏ธ Stage 5: Development Image (Expanded) @@ -178,29 +301,66 @@ COPY --from=image-packer ${ENVIRONMENT_CONFIG_FILE} ${ENVIRONMENT_CONFIG_FILE} FROM ${PACKED_IMAGE_NAME} AS expanded-image LABEL description="Fully expanded development image with all tools installed" -# We should NOT copy project structure for local setup -# Local config and other scripts may be different from pack environment -# The two COPYs below are copyed from pack environment, not from local -# /clice/config -# /clice/docker/linux +ARG CLICE_WORKDIR +ARG RELEASE_PACKAGE_DIR +ARG PACKED_RELEASE_PACKAGE_PATH +ARG PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES +ARG UV_PACKAGE_DIR_NAME +ARG UV_TARBALL_DIR_NAME + +ENV PATH="/root/.local/bin:${PATH}" # Expand the release image into a full development environment # We don't mark here with --mount=type=cache because this is executed on clice developer environment # clice developer do not have the cache from previous stages RUN bash -eux - <<'SCRIPT' + # Extract the release package first + echo "๐Ÿ“ฆ Extracting release package..." + mkdir -p "${RELEASE_PACKAGE_DIR}" + tar -xJf "${PACKED_RELEASE_PACKAGE_PATH}" -C "${RELEASE_PACKAGE_DIR}" + echo "โœ… Release package extracted!" - # Setup Python project environment - uv sync --project /clice/pyproject.toml + # Install UV and Python from packaged files (offline installation) + echo "๐Ÿ“ฆ Installing UV from package..." + UV_VERSION_FILE="${RELEASE_PACKAGE_DIR}/.uv-version" + UV_VERSION=$(cat "${UV_VERSION_FILE}") + echo "๐Ÿ“‹ UV version: ${UV_VERSION}" - # Run local setup to expand everything - source /clice/docker/linux/utility/.venv/bin/activate - python docker/linux/utility/local_setup.py - - # cleanup project structure to reduce image size - # User could refer to ${RELEASE_PACKAGE_DIR}/manifest.json if needed - rm -rf /clice + UV_PACKAGE_ROOT="${RELEASE_PACKAGE_DIR}/uv-${UV_VERSION}" + UV_PACKAGE_CACHE_DIR="${UV_PACKAGE_ROOT}/${UV_PACKAGE_DIR_NAME}" + UV_TARBALL_PATH="${UV_PACKAGE_ROOT}/${UV_TARBALL_DIR_NAME}/uv-*.tar.gz" + UV_INSTALL_DIR="/root/.local/bin" + mkdir -p "${UV_INSTALL_DIR}" + + echo "๐Ÿ”ง Extracting UV tarball..." + tar -xzf ${UV_TARBALL_PATH} -C ${UV_INSTALL_DIR} --strip-components=1 + echo "โœ… UV installed successfully!" + + # Install Python + PYTHON_VERSION_FILE="${UV_PACKAGE_ROOT}/.python-version" + PYTHON_VERSION=$(cat "${PYTHON_VERSION_FILE}") + echo "๐Ÿ“‹ Python version: ${PYTHON_VERSION}" + + echo "๐Ÿ Installing Python ${PYTHON_VERSION}..." + UV_CACHE_DIR=${UV_PACKAGE_CACHE_DIR} uv python install "${PYTHON_VERSION}" --default + echo "โœ… Python ${PYTHON_VERSION} installed successfully!" + + # Run local setup directly from packaged scripts (no extraction needed) + echo "๐Ÿš€ Running local setup to expand environment..." + + # Get path to local_setup.py in package + CLICE_SETUP_SCRIPTS_DIR="${RELEASE_PACKAGE_DIR}/clice-setup-scripts-unknown" + LOCAL_SETUP_SCRIPT="${CLICE_SETUP_SCRIPTS_DIR}/docker/linux/utility/local_setup.py" + + # Run local setup directly from package (no venv needed, using system Python) + eval ${PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES} \ + python "${LOCAL_SETUP_SCRIPT}" + echo "โœ… Environment expansion complete!" + + # Cleanup + echo "๐Ÿงน Cleaning up temporary files..." + rm -f "${PACKED_RELEASE_PACKAGE_PATH}" + echo "โœ… Cleanup complete! Dev container ready! ๐ŸŽ‰" SCRIPT -WORKDIR /clice - CMD ["/bin/bash"] diff --git a/docker/linux/build.sh b/docker/linux/build.sh index 71454ec0..d525a87c 100644 --- a/docker/linux/build.sh +++ b/docker/linux/build.sh @@ -16,6 +16,9 @@ set -e # ๐Ÿ”ง Environment Setup # ======================================================================== +# Source common utilities +source "$(dirname "${BASH_SOURCE[0]}")/utility/common.sh" + # Save original working directory and switch to project root ORIG_PWD="$(pwd)" SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" @@ -28,13 +31,13 @@ trap 'cd "${ORIG_PWD}"' EXIT # โš™๏ธ Default Configuration # ======================================================================== -COMPILER="clang" -DOCKERFILE_PATH="docker/linux/Dockerfile" -BUILD_STAGE="expanded-image" # Always build development image (auto-expand from release if needed) +COMPILER="${DEFAULT_COMPILER}" +BUILD_STAGE="${DEFAULT_BUILD_STAGE}" CACHE_FROM="" CACHE_TO="" -VERSION="latest" # Will be replaced with actual clice version in releases +VERSION="${DEFAULT_VERSION}" REBUILD="false" +DEBUG="false" # ======================================================================== # ๐Ÿ“š Usage Information @@ -53,15 +56,17 @@ OPTIONS: --version Set version tag (default: ${VERSION}) --stage Build specific stage (packed-image or expanded-image) --rebuild Force rebuild even if image exists + --debug Enable interactive debug mode (requires Docker 23.0+) --help, -h Show this help message EXAMPLES: $0 Build development container with clang $0 --compiler gcc Build development container with gcc - $0 --stage packed-image Build only the release image - $0 --stage expanded-image Expand release image to development image + $0 --stage packed-image Build only the release image + $0 --stage expanded-image Expand release image to development image $0 --version v1.0.0 Build versioned container (v1.0.0) $0 --rebuild Force rebuild existing image + $0 --debug Build with interactive debug mode $0 --cache-from clice-io/clice-dev:cache Use cache from existing image $0 --cache-to type=registry,ref=myregistry/myimage:cache Push cache @@ -77,6 +82,16 @@ BUILD MODES: โ€ข Single-stage build: Builds only the specified stage โ€ข Auto-expansion: Development image can build from existing release image +DEBUG MODE: + --debug enables interactive debugging with docker buildx debug build + โ€ข Requires Docker 23.0+ with BuildKit experimental features + โ€ข Automatically sets BUILDX_EXPERIMENTAL=1 + โ€ข On build failure, you can use debug commands to inspect the build state + โ€ข Example debug commands: + - docker buildx debug ps List debug sessions + - docker buildx debug exec Execute commands in failed build + - docker buildx debug shell Open interactive shell in failed build + The container includes: โ€ข Custom toolchain (fully installed and ready) โ€ข All development dependencies @@ -103,6 +118,8 @@ while [ "$#" -gt 0 ]; do BUILD_STAGE="$2"; shift 2;; --rebuild) REBUILD="true"; shift 1;; + --debug) + DEBUG="true"; shift 1;; -h|--help) usage; exit 0;; *) @@ -114,15 +131,18 @@ done # ๐Ÿท๏ธ Image Naming # ======================================================================== -# Container image tag with version -IMAGE_TAG="linux-${COMPILER}-${VERSION}" -PACKED_IMAGE_NAME="clice-io/clice:${IMAGE_TAG}" +IMAGE_TAG=$(get_image_tag "${COMPILER}" "${VERSION}") +PACKED_IMAGE_NAME=$(get_packed_image_name "${COMPILER}" "${VERSION}") +EXPANDED_IMAGE_NAME=$(get_expanded_image_name "${COMPILER}" "${VERSION}") # Set the target image name based on build stage if [ "$BUILD_STAGE" = "packed-image" ]; then TARGET_IMAGE_NAME="$PACKED_IMAGE_NAME" +elif [ "$BUILD_STAGE" = "expanded-image" ]; then + TARGET_IMAGE_NAME="$EXPANDED_IMAGE_NAME" else - TARGET_IMAGE_NAME="clice-io/clice:${IMAGE_TAG}-expanded" + TARGET_IMAGE_NAME="clice-dev_container-debug_build-$BUILD_STAGE" + echo "๐Ÿ”ง Debug Building Intermediate Stage: $BUILD_STAGE" >&2; usage; fi # ======================================================================== @@ -147,98 +167,72 @@ fi echo "=========================================================================" # ======================================================================== -# ๐Ÿ› ๏ธ Docker Build Arguments +# ๐Ÿ”„ Auto-Expansion Logic (Release โ†’ Development) # ======================================================================== +# Build the target image +echo "๐Ÿ” Checking for target image: ${TARGET_IMAGE_NAME}" + +# Handle REBUILD flag - clean up existing images +if [ "$REBUILD" = "true" ]; then + echo "๐Ÿ”„ Force rebuild requested - cleaning up existing images..." + + # Clean up target image + if docker image inspect "${TARGET_IMAGE_NAME}" >/dev/null 2>&1; then + echo "๐Ÿงน Removing existing target image: ${TARGET_IMAGE_NAME}" + docker rmi "${TARGET_IMAGE_NAME}" || true + fi +fi + +# Rebuild BUILD_ARGS with correct release base image BUILD_ARGS=( "--progress=plain" - "--target=${BUILD_STAGE}" - "--build-arg=COMPILER=${COMPILER}" - "--build-arg=PACKED_IMAGE_NAME=${PACKED_IMAGE_NAME}" - "--build-arg=BUILDKIT_INLINE_CACHE=1" # Enable inline cache + "--target" + "${BUILD_STAGE}" + "--build-arg" + "COMPILER=${COMPILER}" + "--build-arg" + "VERSION=${VERSION}" + "--build-arg" + "PACKED_IMAGE_NAME=${PACKED_IMAGE_NAME}" + "--build-arg" + "CLICE_DIR=${CLICE_DIR}" + "--build-arg" + "BUILDKIT_INLINE_CACHE=1" ) -# Add cache configuration with logging +# Add cache configuration if [ -n "$CACHE_FROM" ]; then - echo "๐Ÿ’พ Configuring cache source: ${CACHE_FROM}" BUILD_ARGS+=("--cache-from=${CACHE_FROM}") fi if [ -n "$CACHE_TO" ]; then - echo "๐Ÿ’พ Configuring cache destination: ${CACHE_TO}" BUILD_ARGS+=("--cache-to=${CACHE_TO}") - # Log cache operations - echo "๐Ÿ“ Cache operations will be logged during build" + echo "๐Ÿ“ Starting build with cache-to logging enabled..." fi -# ======================================================================== -# ๐Ÿ—๏ธ Execute Build -# ======================================================================== +# Add final arguments to complete the build command +BUILD_ARGS+=("-t" "${TARGET_IMAGE_NAME}" "-f" "${DOCKERFILE_PATH}" ".") -echo "๐Ÿ—๏ธ Starting Docker build process with parallel optimization..." -echo "๐Ÿ”จ Build command: docker buildx build ${BUILD_ARGS[*]} -t ${TARGET_IMAGE_NAME} -f ${DOCKERFILE_PATH} ." +# Execute build with or without debug mode +if [ "$DEBUG" = "true" ]; then + # Enable BuildKit experimental features for debug mode + echo "๐Ÿ› Debug mode enabled (BUILDX_EXPERIMENTAL=1)" -# ======================================================================== -# ๐Ÿ”„ Auto-Expansion Logic (Release โ†’ Development) -# ======================================================================== + export BUILDX_EXPERIMENTAL=1 + BUILD_COMMAND="docker buildx debug --invoke /bin/bash build" +else + BUILD_COMMAND="docker buildx build" +fi -# Build the target image -echo "๐Ÿ” Checking for target image: ${TARGET_IMAGE_NAME}" +echo "๐Ÿ”จ Build command: ${BUILD_COMMAND} ${BUILD_ARGS[*]}" +${BUILD_COMMAND} "${BUILD_ARGS[@]}" -if [ "$REBUILD" = "true" ] || ! docker image inspect "${TARGET_IMAGE_NAME}" >/dev/null 2>&1; then - if [ "$REBUILD" = "true" ]; then - echo "๐Ÿ”„ Force rebuilding ${BUILD_STAGE}..." - else - echo "๐Ÿ” Target image not found, building ${BUILD_STAGE}..." - fi - - # Set up build arguments based on the target stage - if [ "$BUILD_STAGE" = "expanded-image" ]; then - # For development image, check if we can build from existing release image - if docker image inspect "${PACKED_IMAGE_NAME}" >/dev/null 2>&1; then - echo "๐Ÿ“ฆ Found existing release image: ${PACKED_IMAGE_NAME}" - echo "๐Ÿ—๏ธ Building development image from existing release image..." - ACTUAL_RELEASE_BASE="${PACKED_IMAGE_NAME}" - else - echo "๐Ÿ” Release image not found: ${PACKED_IMAGE_NAME}" - echo "๐Ÿ—๏ธ Building full multi-stage build (release + development)..." - ACTUAL_RELEASE_BASE="packed-image" - fi - else - # For release image or other stages, use default stage reference - ACTUAL_RELEASE_BASE="packed-image" - fi - - # Rebuild BUILD_ARGS with correct release base image - BUILD_ARGS=( - "--progress=plain" - "--target=${BUILD_STAGE}" - "--build-arg=COMPILER=${COMPILER}" - "--build-arg=VERSION=${VERSION}" - "--build-arg=RELEASE_BASE_IMAGE=${ACTUAL_RELEASE_BASE}" - "--build-arg=BUILDKIT_INLINE_CACHE=1" - ) - - # Add cache configuration - if [ -n "$CACHE_FROM" ]; then - BUILD_ARGS+=("--cache-from=${CACHE_FROM}") - fi - - if [ -n "$CACHE_TO" ]; then - BUILD_ARGS+=("--cache-to=${CACHE_TO}") - echo "๐Ÿ“ Starting build with cache-to logging enabled..." - fi +BUILD_SUCCESS=$? - echo "๐Ÿ—๏ธ Building ${BUILD_STAGE} with auto-expansion support..." - docker buildx build "${BUILD_ARGS[@]}" -t "${TARGET_IMAGE_NAME}" -f "${DOCKERFILE_PATH}" . - - # Log cache operations if cache-to was used - if [ -n "$CACHE_TO" ]; then - echo "๐Ÿ’พ Cache operations completed. Cache pushed to: ${CACHE_TO}" - fi -else - echo "โœ… Target image already exists: ${TARGET_IMAGE_NAME}" - echo "โ„น๏ธ Use --rebuild to force rebuild" +# Log cache operations if cache-to was used +if [ -n "$CACHE_TO" ]; then + echo "๐Ÿ’พ Cache operations completed. Cache pushed to: ${CACHE_TO}" fi # ======================================================================== @@ -265,7 +259,7 @@ if [ $BUILD_SUCCESS -eq 0 ]; then echo "" echo "๐Ÿš€ NEXT STEPS:" echo " โ€ข Run container: ./docker/linux/run.sh --compiler ${COMPILER}" - echo " โ€ข Test container: docker run --rm -it ${TARGET_IMAGE_NAME} /bin/bash" + echo " โ€ข Use container: docker run --rm -it ${TARGET_IMAGE_NAME} /bin/bash" echo " โ€ข Development environment is ready to use immediately" fi diff --git a/docker/linux/container-entrypoint.sh b/docker/linux/container-entrypoint.sh new file mode 100644 index 00000000..c30c347f --- /dev/null +++ b/docker/linux/container-entrypoint.sh @@ -0,0 +1,36 @@ +# ======================================================================== +# ๐Ÿš€ Clice Dev Container Shell Initialization +# ======================================================================== +# File: docker/linux/container-entrypoint.sh +# Purpose: Bash initialization script for Clice dev container +# +# This script is sourced by .bashrc and performs: +# 1. Runs uv sync to create/update virtual environment if needed +# 2. Auto-activates virtual environment for interactive shells +# +# Usage: This file will be appended to /root/.bashrc during image build +# ======================================================================== + +# Only run in interactive shells to avoid breaking non-interactive scripts +if [[ $- == *i* ]]; then + # Check if .venv exists, if not, run uv sync + + # Read UV version to set cache directory + UV_VERSION_FILE="${RELEASE_PACKAGE_DIR}/.uv-version" + UV_VERSION=$(cat "${UV_VERSION_FILE}") + UV_PACKAGE_ROOT="${RELEASE_PACKAGE_DIR}/uv-${UV_VERSION}" + UV_PACKAGE_CACHE_DIR="${UV_PACKAGE_ROOT}/${UV_PACKAGE_DIR_NAME}" + + echo "๐Ÿ“ฆ Running uv sync..." + + if UV_CACHE_DIR="${UV_PACKAGE_CACHE_DIR}" uv sync --project "${CLICE_WORKDIR}/pyproject.toml"; then + echo "โœ… Python environment ready at ${CLICE_WORKDIR}/.venv" + else + echo "โš ๏ธ Failed to sync Python environment (pyproject.toml might not exist)" + fi + + # Auto-activate virtual environment if it exists + if [ -f "${CLICE_WORKDIR}/.venv/bin/activate" ]; then + source "${CLICE_WORKDIR}/.venv/bin/activate" + fi +fi diff --git a/docker/linux/run.sh b/docker/linux/run.sh index e53d02c9..53213146 100644 --- a/docker/linux/run.sh +++ b/docker/linux/run.sh @@ -19,6 +19,9 @@ set -e # ๐Ÿ”ง Environment Setup # ======================================================================== +# Source common utilities +source "$(dirname "${BASH_SOURCE[0]}")/utility/common.sh" + # Save original working directory and switch to project root ORIG_PWD="$(pwd)" SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" @@ -31,12 +34,12 @@ trap 'cd "${ORIG_PWD}"' EXIT # โš™๏ธ Default Configuration # ======================================================================== -COMPILER="clang" +COMPILER="${DEFAULT_COMPILER}" RESET="false" UPDATE="false" -VERSION="latest" +VERSION="${DEFAULT_VERSION}" COMMAND="" -CONTAINER_WORKDIR="/clice" +CONTAINER_WORKDIR="${DEFAULT_CONTAINER_WORKDIR}" # ======================================================================== # ๐Ÿ“š Usage Information @@ -98,10 +101,9 @@ done # ๐Ÿท๏ธ Container and Image Naming # ======================================================================== -IMAGE_TAG="linux-${COMPILER}-v${VERSION}" -PACKED_IMAGE_NAME="clice-io/clice:${IMAGE_TAG}" -EXPANDED_IMAGE_NAME="${PACKED_IMAGE_NAME}-expanded" -CONTAINER_NAME="clice_dev-linux-${COMPILER}-v${VERSION}" +PACKED_IMAGE_NAME=$(get_packed_image_name "${COMPILER}" "${VERSION}") +EXPANDED_IMAGE_NAME=$(get_expanded_image_name "${COMPILER}" "${VERSION}") +CONTAINER_NAME=$(get_container_name "${COMPILER}" "${VERSION}") # ======================================================================== # ๐Ÿš€ Main Execution @@ -139,10 +141,19 @@ fi # ๐Ÿ—๏ธ Image Management # ======================================================================== -# Handle --update: pull latest images and exit -if [ "$UPDATE" = "true" ] || ! docker image inspect "${PACKED_IMAGE_NAME}" >/dev/null 2>&1; then - echo "๐Ÿ”„ Force updating image..." - +# Check if we need to update/pull the packed image +UPDATE_REASON="" +if [ "$UPDATE" = "true" ]; then + UPDATE_REASON="๐Ÿ”„ Force updating image..." +elif ! docker image inspect "${PACKED_IMAGE_NAME}" >/dev/null 2>&1; then + UPDATE_REASON="๐Ÿ”„ Packed image ${PACKED_IMAGE_NAME} not found locally, pulling..." +fi + +# Handle image update if needed +if [ -n "$UPDATE_REASON" ]; then + + echo "${UPDATE_REASON}" + # Try to remove existing expanded image before pulling if docker image inspect "${EXPANDED_IMAGE_NAME}" >/dev/null 2>&1; then echo "๐Ÿงน Cleaning existing expanded image: ${EXPANDED_IMAGE_NAME}..." @@ -164,17 +175,55 @@ if [ "$UPDATE" = "true" ] || ! docker image inspect "${PACKED_IMAGE_NAME}" >/dev echo "๐Ÿ’ก Please check if the image exists in the registry" exit 1 fi + + echo "๐Ÿ Update completed." +fi + +# ======================================================================== +# ๐Ÿ—๏ธ Auto-Expand Packed Image to Development Image +# ======================================================================== + +# At this point, packed image is guaranteed to exist (either pulled or already present) +# Check if expanded development image exists, if not, expand it from packed image +if ! docker image inspect "${EXPANDED_IMAGE_NAME}" >/dev/null 2>&1; then + echo "=========================================================================" + echo "๐Ÿ—๏ธ EXPANDING PACKED IMAGE TO DEVELOPMENT IMAGE" + echo "=========================================================================" + echo "๐Ÿ“ฆ Source (Packed): ${PACKED_IMAGE_NAME}" + echo "๐ŸŽฏ Target (Expanded): ${EXPANDED_IMAGE_NAME}" + echo "=========================================================================" - # Expand the packed image to development image using build.sh - echo "๐Ÿ—๏ธ Expanding packed image to development image..." - if "${SCRIPT_DIR}/build.sh" --compiler "${COMPILER}" --version "${VERSION}" --stage expanded-image; then - echo "โœ… Successfully created development image: ${EXPANDED_IMAGE_NAME}" + # Run packed image container and execute its internal build.sh for expansion + # Why use container's build.sh instead of local: + # 1. Container's build.sh is the same version as the packed image + # 2. Container has all the correct tools and environment + # 3. Local build.sh might be from a different branch/version + # 4. Ensures consistent expansion regardless of host environment + # + # Mounts: + # โ€ข /var/run/docker.sock - Allow container to build images on host Docker daemon + if docker run --rm \ + -v /var/run/docker.sock:/var/run/docker.sock \ + "${PACKED_IMAGE_NAME}" \ + /bin/bash -c "cd ${CLICE_DIR} && ./docker/linux/build.sh --stage expanded-image --compiler ${COMPILER} --version ${VERSION}"; then + echo "=========================================================================" + echo "โœ… EXPANSION COMPLETED SUCCESSFULLY" + echo "=========================================================================" + echo "๐ŸŽ‰ Development image created: ${EXPANDED_IMAGE_NAME}" + echo "๐Ÿ“ฆ Ready for container creation" + echo "=========================================================================" else - echo "โŒ Failed to expand packed image to development image" + echo "=========================================================================" + echo "โŒ EXPANSION FAILED" + echo "=========================================================================" + echo "๐Ÿ’ก Troubleshooting tips:" + echo " โ€ข Check packed image is valid: docker run --rm ${PACKED_IMAGE_NAME} ls -la" + echo " โ€ข Review expansion logs above for specific error" + echo "=========================================================================" exit 1 fi - - echo "๐Ÿ Update completed." +else + echo "โœ… Development image already exists: ${EXPANDED_IMAGE_NAME}" fi # Check if the container exists and is using the current development image diff --git a/docker/linux/utility/build_clice_compiler_toolchain.py b/docker/linux/utility/build_clice_compiler_toolchain.py index 009ff255..e3323473 100644 --- a/docker/linux/utility/build_clice_compiler_toolchain.py +++ b/docker/linux/utility/build_clice_compiler_toolchain.py @@ -56,8 +56,6 @@ # ======================================================================== # ๐Ÿ“š Standard Library Imports # ======================================================================== -import shutil # High-level file operations -import tarfile # Archive extraction capabilities from typing import Dict, Set # Type hints for better code clarity # ======================================================================== @@ -66,10 +64,7 @@ from build_utils import ( Job, # Individual build task representation ParallelTaskScheduler, # High-performance parallel execution engine - download_file, # Accelerated file download with aria2c run_command, # Shell command execution with environment control - verify_signature, # GPG signature verification - # Generic component build utilities install_download_prerequisites, # Download prerequisite installation install_extract_prerequisites, # Extract prerequisite installation download_and_verify, # Component source download and verification @@ -80,11 +75,11 @@ # โš™๏ธ Configuration Constants # ======================================================================== from config.build_config import ( - TOOLCHAIN_BUILD_ROOT, # Build root directory - GPG_KEY_SERVER, # GPG keyserver list TOOLCHAIN_BUILD_ENV_VARS, # Build environment variables # Import component instances for structured access - TOOLCHAIN + TOOLCHAIN, + Component, + ToolchainSubComponent ) # ======================================================================== @@ -110,34 +105,49 @@ def update_apt(): installation can proceed safely. """ print("๐Ÿ”„ [SETUP] Refreshing APT package database...") - run_command("apt update") + run_command("apt update -o DPkg::Lock::Timeout=-1") - - -def install_build_prerequisites(): +def install_build_prerequisites(component): """ ๐Ÿ”จ Install Build Stage Prerequisites - Installs the complete build environment including: + Installs the build prerequisites for all sub-components of the given component. + This collects and deduplicates all build_prerequisites from sub-components, + then installs them in a single batch operation. + + For toolchain component, this includes: โ€ข Core build tools (make, binutils, rsync) โ€ข Text processing tools (gawk, bison) for glibc โ€ข GCC 9 toolchain for glibc compilation โ€ข GCC 14 toolchain for libstdc++ compilation + Args: + component: The parent component (e.g., TOOLCHAIN) whose sub-components' + build prerequisites should be installed + Note: We maintain multiple GCC versions because glibc requires GCC < 10 to avoid linker symbol conflicts, while modern libstdc++ benefits from the latest compiler features. """ - print("๐Ÿ”จ [SETUP] Installing comprehensive build environment...") - print(" ๐Ÿ“‹ Components: make, binutils, gawk, bison, gcc-9, gcc-14") - build_prerequisites = TOOLCHAIN.build_prerequisites - pkg_list = " ".join(build_prerequisites) - run_command(f"apt install -y --no-install-recommends -o APT::Keep-Downloaded-Packages=true {pkg_list}") - # linux headers install requires gcc, even though we won't use it in linux header install + # Collect all build prerequisites from sub-components + all_prerequisites = set() + if hasattr(component, 'sub_components'): + for sub_component in component.sub_components: + all_prerequisites.update(sub_component.build_prerequisites) + + if not all_prerequisites: + print(f"โ„น๏ธ [SETUP] No build prerequisites for {component.name}") + return + + print(f"๐Ÿ”จ [SETUP] Installing build prerequisites for {component.name}...") + print(f" ๐Ÿ“‹ Packages: {', '.join(sorted(all_prerequisites))}") + pkg_list = " ".join(sorted(all_prerequisites)) + run_command(f"apt install -y --no-install-recommends=true -o DPkg::Lock::Timeout=-1 {pkg_list}") + + # Setup GCC alternatives after installation + # Linux headers install requires gcc, even though we won't use it in linux header install run_command("update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-9 90") - print("โœ… [SETUP] Build environment ready") - - + print(f"โœ… [SETUP] Build prerequisites for {component.name} installed") # ======================================================================== # ๐Ÿ“š GNU C Library (glibc) Tasks @@ -201,7 +211,7 @@ def fix_glibc_paths(): print(f"โœ… [POST-PROCESS] Path fixing complete ({files_processed} files processed)") -def build_and_install_glibc(component): +def build_and_install_glibc(glibc_component: ToolchainSubComponent, linux_component: ToolchainSubComponent): """ ๐Ÿ—๏ธ Build and Install GNU C Library (glibc) @@ -219,35 +229,34 @@ def build_and_install_glibc(component): Note: glibc is built out-of-tree in a separate build directory to maintain clean separation between source and build artifacts. """ - print(f"๐Ÿ—๏ธ [BUILD] Starting {component.name} compilation...") + print(f"๐Ÿ—๏ธ [BUILD] Starting {glibc_component.name} compilation...") print(f" ๐Ÿ“‹ Using GCC 9 (required for glibc compatibility)") print(f" ๐ŸŽฏ Target: {TOOLCHAIN.host_triplet} ({TOOLCHAIN.host_machine})") print(f" ๐Ÿ“ Install: {TOOLCHAIN.sysroot_dir}/usr") # Prepare out-of-tree build directory - os.makedirs(component.build_dir, exist_ok=True) + os.makedirs(glibc_component.build_dir, exist_ok=True) # Configure build environment with GCC 9 compiler_env = { 'CC': 'gcc-9', # GNU C compiler version 9 (full path) - 'CXX': 'g++-9', # GNU C++ compiler version 9 (full path) 'CPP': 'cpp-9', # C preprocessor (explicit) } compiler_env.update(TOOLCHAIN_BUILD_ENV_VARS) # Configure glibc build print(f"โš™๏ธ [CONFIG] Configuring glibc build...") - configure_script = os.path.join(component.src_dir, "configure") - configure_command = f"{configure_script} --host={TOOLCHAIN.host_triplet} --prefix={TOOLCHAIN.sysroot_dir}/usr --disable-werror --disable-lib32 --enable-lib64" - run_command(configure_command, cwd=component.build_dir, env=compiler_env) + configure_script = os.path.join(glibc_component.src_dir, "configure") + configure_command = f"{configure_script} --host={glibc_component.host_triplet} --prefix={TOOLCHAIN.sysroot_dir}/usr --with-headers={TOOLCHAIN.sysroot_dir}/usr/include --enable-kernel={linux_component.version} --disable-werror --disable-lib32 --enable-lib64" + run_command(configure_command, cwd=glibc_component.build_dir, env=compiler_env) # Compile glibc print(f"๐Ÿ”จ [COMPILE] Building glibc (this may take several minutes)...") - run_command("make -j", cwd=component.build_dir, env=compiler_env) + run_command("make -j", cwd=glibc_component.build_dir, env=compiler_env) # Install glibc to sysroot print(f"๐Ÿ“ฆ [INSTALL] Installing glibc to sysroot...") - run_command(f"make install -j", cwd=component.build_dir, env=compiler_env) + run_command(f"make install -j", cwd=glibc_component.build_dir, env=compiler_env) # Post-process to fix hardcoded paths fix_glibc_paths() @@ -445,14 +454,14 @@ def main(): all_jobs: Dict[str, Job] = { # ๐Ÿ“ฆ System Setup Tasks "update_apt": Job("update_apt", update_apt), - "install_download_prerequisites": Job("install_download_prerequisites", install_download_prerequisites), - "install_extract_prerequisites": Job("install_extract_prerequisites", install_extract_prerequisites), - "install_build_prerequisites": Job("install_build_prerequisites", install_build_prerequisites), + "install_download_prerequisites": Job("install_download_prerequisites", install_download_prerequisites, (TOOLCHAIN,)), + "install_extract_prerequisites": Job("install_extract_prerequisites", install_extract_prerequisites, (TOOLCHAIN,)), + "install_build_prerequisites": Job("install_build_prerequisites", install_build_prerequisites, (TOOLCHAIN,)), # ๐Ÿ“š GNU C Library (glibc) Pipeline "download_glibc": Job("download_glibc", download_and_verify, (TOOLCHAIN.glibc,)), "extract_glibc": Job("extract_glibc", extract_source, (TOOLCHAIN.glibc,)), - "build_and_install_glibc": Job("build_and_install_glibc", build_and_install_glibc, (TOOLCHAIN.glibc,)), + "build_and_install_glibc": Job("build_and_install_glibc", build_and_install_glibc, (TOOLCHAIN.glibc, TOOLCHAIN.linux)), # ๐Ÿง Linux Kernel Headers Pipeline "download_linux": Job("download_linux", download_and_verify, (TOOLCHAIN.linux,)), @@ -487,9 +496,13 @@ def main(): # ๐Ÿ“š glibc Build Pipeline "download_glibc": {"install_download_prerequisites"}, "extract_glibc": {"download_glibc", "install_extract_prerequisites"}, - "build_and_install_glibc": {"extract_glibc", "install_build_prerequisites"}, + "build_and_install_glibc": { + "extract_glibc", + "install_build_prerequisites", + "install_linux_headers" # glibc requires Linux headers for compilation + }, - # ๐Ÿง Linux Headers Pipeline (can run parallel with glibc download/extract) + # ๐Ÿง Linux Headers Pipeline (must complete before glibc build) "download_linux": {"install_download_prerequisites"}, "extract_linux": {"download_linux", "install_extract_prerequisites"}, "install_linux_headers": {"extract_linux", "install_build_prerequisites"}, diff --git a/docker/linux/utility/build_utils.py b/docker/linux/utility/build_utils.py index 0a11610f..8f61079d 100644 --- a/docker/linux/utility/build_utils.py +++ b/docker/linux/utility/build_utils.py @@ -3,7 +3,7 @@ import os import tarfile -from config.build_config import TOOLCHAIN_VERSIONS, Component +from config.build_config import Component # Add project root to the Python path to allow importing 'config' module project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) @@ -14,8 +14,6 @@ import hashlib import concurrent.futures import time -import traceback -import json from typing import Dict, Set, Tuple, Optional, List, Callable from graphlib import TopologicalSorter from collections import defaultdict @@ -55,6 +53,9 @@ def run_command(command: str, cwd: str = os.getcwd(), env: Dict[str, str] = {}) """ Executes a shell command, directing its output to the current shell. Sets DEBIAN_FRONTEND to noninteractive to prevent interactive prompts. + + Output is streamed in real-time to stdout/stderr for better visibility + in both direct execution and parallel task scenarios. """ print(f"--- Running command: {{{command}}} in {cwd or os.getcwd()} ---") @@ -64,14 +65,18 @@ def run_command(command: str, cwd: str = os.getcwd(), env: Dict[str, str] = {}) if env: process_env.update(env) - # By not setting stdout/stderr, they are inherited from the parent process, - # which means the output will go directly to the user's terminal. + # Explicitly set stdout and stderr to sys.stdout/sys.stderr for real-time output + # This ensures output is visible even when running in ProcessPoolExecutor process = subprocess.Popen( command, shell=True, cwd=cwd, env=process_env, - executable="/bin/bash" + executable="/bin/bash", + stdout=sys.stdout, + stderr=sys.stderr, + bufsize=1, # Line buffered for real-time output + universal_newlines=True ) process.wait() @@ -361,7 +366,7 @@ def install_download_prerequisites(component: Component): print("โฌ‡๏ธ [SETUP] Installing download prerequisites (aria2c, gnupg)...") download_prerequisites = component.download_prerequisites pkg_list = " ".join(download_prerequisites) - run_command(f"apt install -y --no-install-recommends -o APT::Keep-Downloaded-Packages=true {pkg_list}") + run_command(f"apt install -y --no-install-recommends=true -o DPkg::Lock::Timeout=-1 {pkg_list}") print("โœ… [SETUP] Download tools ready") def install_extract_prerequisites(component: Component): @@ -377,7 +382,7 @@ def install_extract_prerequisites(component: Component): print("๐Ÿ“‚ [SETUP] Installing archive extraction tools...") extract_prerequisites = component.extract_prerequisites pkg_list = " ".join(extract_prerequisites) - run_command(f"apt install -y --no-install-recommends -o APT::Keep-Downloaded-Packages=true {pkg_list}") + run_command(f"apt install -y --no-install-recommends=true -o DPkg::Lock::Timeout=-1 {pkg_list}") print("โœ… [SETUP] Extraction tools ready") @@ -437,12 +442,19 @@ def extract_source(component): ๐Ÿ“‚ Extract Component Source Archive Extracts the downloaded source tarball to the appropriate directory - structure, automatically detecting compression format. + structure, automatically detecting compression format and stripping + the top-level directory. Supports multiple archive formats: โ€ข .tar.xz (LZMA compression) - Used by most GNU projects โ€ข .tar.gz (Gzip compression) - Used by Linux kernel + The function automatically handles archives with a top-level directory: + 1. Extracts directly to target directory + 2. Detects if there's a single top-level directory wrapper + 3. Moves all contents up one level + 4. Removes the empty wrapper directory + Args: component: Component instance (glibc, gcc, llvm, or linux) """ @@ -459,8 +471,27 @@ def extract_source(component): print(f" ๐Ÿ“ Source: {tarball_path}") print(f" ๐Ÿ“ Target: {component.extracted_dir}") - # Auto-detect compression format and extract + # Auto-detect compression format and extract directly mode = "r:xz" if tarball_path.endswith(".tar.xz") else "r:gz" with tarfile.open(tarball_path, mode) as tar: tar.extractall(path=component.extracted_dir, filter='data') + + # Check if we need to strip a top-level directory + extracted_items = os.listdir(component.extracted_dir) + + if len(extracted_items) == 1 and os.path.isdir(os.path.join(component.extracted_dir, extracted_items[0])): + # Single top-level directory found - strip it + top_dir_name = extracted_items[0] + top_dir_path = os.path.join(component.extracted_dir, top_dir_name) + print(f" ๐Ÿ”„ Stripping top-level directory: {top_dir_name}") + + # Move all contents from top_dir to parent (extracted_dir) + for item in os.listdir(top_dir_path): + src = os.path.join(top_dir_path, item) + dst = os.path.join(component.extracted_dir, item) + shutil.move(src, dst) + + # Remove the now-empty top-level directory + os.rmdir(top_dir_path) + print(f"โœ… [EXTRACT] {component.name} extraction complete") diff --git a/docker/linux/utility/common.sh b/docker/linux/utility/common.sh index e69de29b..1244fde8 100644 --- a/docker/linux/utility/common.sh +++ b/docker/linux/utility/common.sh @@ -0,0 +1,66 @@ +#!/bin/bash +# ======================================================================== +# ๐Ÿš€ Clice Development Container Common Variables +# ======================================================================== +# File: docker/linux/utility/common.sh +# Purpose: Defines shared variables and functions for build and run scripts. +# +# This script is sourced by other scripts to ensure consistency in image +# and container naming conventions. +# ======================================================================== + +set -e + +# ======================================================================== +# โš™๏ธ Default Configuration +# ======================================================================== + +# These are the default values that can be overridden by command-line arguments +DEFAULT_COMPILER="clang" +DEFAULT_VERSION="latest" +DEFAULT_BUILD_STAGE="packed-image" +# where clice is located inside the docker container +CLICE_DIR="/clice" +# pwd inside the container when you open a shell +DEFAULT_CONTAINER_WORKDIR="${CLICE_DIR}" + +# Dockerfile and build configuration +DOCKERFILE_PATH="docker/linux/Dockerfile" + +# ======================================================================== +# ๐Ÿท๏ธ Naming Convention Functions +# ======================================================================== + +# Generates the base image tag. +# Usage: get_image_tag +get_image_tag() { + local compiler="$1" + local version="$2" + echo "linux-${compiler}-v${version}" +} + +# Generates the full name for the packed (release) image. +# Usage: get_packed_image_name +get_packed_image_name() { + local compiler="$1" + local version="$2" + local image_tag + image_tag=$(get_image_tag "$compiler" "$version") + echo "clice.io/clice:${image_tag}" +} + +# Generates the full name for the expanded (development) image. +# Usage: get_expanded_image_name +get_expanded_image_name() { + local packed_image_name + packed_image_name=$(get_packed_image_name "$1" "$2") + echo "${packed_image_name}-expanded" +} + +# Generates the name for the development container. +# Usage: get_container_name +get_container_name() { + local compiler="$1" + local version="$2" + echo "clice_dev-linux-${compiler}-v${version}" +} diff --git a/docker/linux/utility/create_release_package.py b/docker/linux/utility/create_release_package.py index 1986ebfb..65f4f0c0 100644 --- a/docker/linux/utility/create_release_package.py +++ b/docker/linux/utility/create_release_package.py @@ -21,6 +21,7 @@ import sys import tarfile import json +import shutil # Add project root to Python path project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) @@ -30,9 +31,14 @@ from config.build_config import ( PACKED_RELEASE_PACKAGE_PATH, RELEASE_PACKAGE_DIR, + CLICE_WORKDIR, + DEVELOPMENT_SHELL_VARS, ALL_COMPONENTS, # Component instances for structured access - TOOLCHAIN + TOOLCHAIN, + CLICE_SETUP_SCRIPTS, + BASHRC, + UV ) # Import build utilities for parallel execution @@ -41,6 +47,84 @@ ParallelTaskScheduler ) +# ======================================================================== +# ๐ŸŒ Environment Setup Functions +# ======================================================================== + +def setup_environment_variables_and_entrypoint(): + """ + Setup .bashrc with environment variables and container entrypoint script. + + This function creates a complete .bashrc file that: + 1. Exports environment variables from DEVELOPMENT_SHELL_VARS for persistent shell use + 2. Sets internal variables (CLICE_WORKDIR, etc.) without export for script-only use + 3. Embeds the container entrypoint script for auto Python environment setup + """ + print("๐ŸŒ Setting up .bashrc with environment variables and entrypoint script...") + + # Read container entrypoint script from BashrcComponent + entrypoint_script_path = BASHRC.entrypoint_script_source + + with open(entrypoint_script_path, 'r') as f: + entrypoint_content = f.read() + + # Create .bashrc in BASHRC component package directory + bashrc_path = BASHRC.bashrc_path + os.makedirs(os.path.dirname(bashrc_path), exist_ok=True) + + # Write complete .bashrc + with open(bashrc_path, 'w') as f: + f.write("# ========================================================================\n") + f.write("# ๐Ÿš€ Clice Dev Container - Bash Configuration\n") + f.write("# ========================================================================\n") + f.write("# This file is auto-generated during image packaging.\n") + f.write("# It contains:\n") + f.write("# 1. Exported environment variables from DEVELOPMENT_SHELL_VARS\n") + f.write("# 2. Internal variables for container entrypoint (not exported)\n") + f.write("# 3. Container entrypoint script (auto Python environment setup)\n") + f.write("# ========================================================================\n\n") + + # Export environment variables from DEVELOPMENT_SHELL_VARS + f.write("# Exported environment variables (from DEVELOPMENT_SHELL_VARS)\n") + for key, value in DEVELOPMENT_SHELL_VARS.items(): + f.write(f'export {key}="{value}"\n') + f.write("\n") + + # Set internal variables for container entrypoint (not exported) + f.write("# Internal variables for container entrypoint (not exported to user environment)\n") + f.write(f'CLICE_WORKDIR="{CLICE_WORKDIR}"\n') + f.write(f'RELEASE_PACKAGE_DIR="{RELEASE_PACKAGE_DIR}"\n') + f.write('UV_PACKAGE_DIR_NAME="uv-packages"\n') + f.write("\n") + + # Write container entrypoint script + f.write("# ========================================================================\n") + f.write("# Container Entrypoint Script - Auto Python Environment Setup\n") + f.write("# ========================================================================\n") + f.write(entrypoint_content) + f.write("\n") + + print(f"โœ… .bashrc created at {bashrc_path}") + print(f" ๐Ÿ“ Exported variables: {len(DEVELOPMENT_SHELL_VARS)} from DEVELOPMENT_SHELL_VARS") + for key in DEVELOPMENT_SHELL_VARS.keys(): + print(f" โ€ข {key}") + print(" ๐Ÿ“ Internal variables: CLICE_WORKDIR, RELEASE_PACKAGE_DIR, UV_PACKAGE_DIR_NAME") + print(" ๐Ÿ“ Container entrypoint script embedded") + +def copy_setup_scripts(): + """Copy setup scripts and configuration files as complete directory structure.""" + print("๐Ÿ“‹ Copying setup scripts and configuration files...") + + # Get files to copy from component definition + for src_rel in CLICE_SETUP_SCRIPTS.files_to_copy: + src = os.path.join(CLICE_WORKDIR, src_rel) + dst = os.path.join(CLICE_SETUP_SCRIPTS.package_dir, src_rel) + os.makedirs(os.path.dirname(dst), exist_ok=True) + shutil.copy2(src, dst) + print(f" โœ… Copied: {src} -> {dst}") + + print(f"โœ… Setup scripts and configs copied to {CLICE_SETUP_SCRIPTS.package_dir}") + # ======================================================================== # ๐Ÿ“‹ Manifest Creation Functions # ======================================================================== @@ -73,7 +157,7 @@ def create_comprehensive_manifest(): # Process each component from ALL_COMPONENTS for component in ALL_COMPONENTS: - component_dir = os.path.join(RELEASE_PACKAGE_DIR, component.name) + package_dir = component.package_dir component_info = { "name": component.name, @@ -84,10 +168,10 @@ def create_comprehensive_manifest(): } # Calculate component statistics - file_count = sum(len(files) for _, _, files in os.walk(component_dir)) + file_count = sum(len(files) for _, _, files in os.walk(package_dir)) dir_size = sum( os.path.getsize(os.path.join(dirpath, filename)) - for dirpath, _, filenames in os.walk(component_dir) + for dirpath, _, filenames in os.walk(package_dir) for filename in filenames ) / (1024 * 1024) # Convert to MB @@ -99,7 +183,7 @@ def create_comprehensive_manifest(): case "apt": # Count APT packages apt_packages = [] - for file in os.listdir(component_dir): + for file in os.listdir(package_dir): if file.endswith('.deb'): pkg_name = file.split('_')[0] if pkg_name not in apt_packages: @@ -107,16 +191,12 @@ def create_comprehensive_manifest(): component_info["packages"] = sorted(apt_packages) component_info["package_count"] = len(apt_packages) - case "python": - # Count Python packages - python_packages = [] - for file in os.listdir(component_dir): - if file.endswith('.whl'): - pkg_name = file.split('-')[0] - if pkg_name not in python_packages: - python_packages.append(pkg_name) - component_info["packages"] = sorted(python_packages) - component_info["package_count"] = len(python_packages) + case "uv": + # UV and Python version information + component_info["uv_details"] = { + "uv_version": UV.version, + "python_version": UV.python_version, + } case "toolchain": # Toolchain specific information @@ -126,6 +206,17 @@ def create_comprehensive_manifest(): "linux_version": TOOLCHAIN.linux.version, "llvm_version": TOOLCHAIN.llvm.version, } + + case "clice-setup-scripts": + # Setup scripts information - executed in-place + component_info["note"] = "Executed in-place during expansion, not extracted to CLICE_WORKDIR" + component_info["structure"] = "Complete directory tree (config/, docker/linux/utility/)" + + case "bashrc": + # Bashrc information + bashrc_file = os.path.join(package_dir, ".bashrc") + component_info["bashrc_path"] = bashrc_file + component_info["bashrc_size_kb"] = round(os.path.getsize(bashrc_file) / 1024, 2) manifest["components"][component.name] = component_info manifest["summary"]["total_components"] += 1 @@ -174,34 +265,32 @@ def create_final_release_package(): print(f" ๐Ÿ“ Source: {RELEASE_PACKAGE_DIR}") print(f" ๐Ÿ“ Target: {PACKED_RELEASE_PACKAGE_PATH}") - try: - with tarfile.open(PACKED_RELEASE_PACKAGE_PATH, 'w:xz', preset=9) as tar: - # Add all subdirectories and files, preserving original directory structure - for item in os.listdir(RELEASE_PACKAGE_DIR): - item_path = os.path.join(RELEASE_PACKAGE_DIR, item) - print(f" ๐Ÿ“ฆ Adding: {item}") - tar.add(item_path, arcname=item) - - # Report package statistics - package_size_mb = os.path.getsize(PACKED_RELEASE_PACKAGE_PATH) / (1024 * 1024) - - # Calculate source directory size for compression ratio - source_size_mb = sum( - os.path.getsize(os.path.join(dirpath, filename)) - for dirpath, _, filenames in os.walk(RELEASE_PACKAGE_DIR) - for filename in filenames - ) / (1024 * 1024) - - compression_ratio = (source_size_mb - package_size_mb) / source_size_mb * 100 if source_size_mb > 0 else 0 - - print(f"โœ… Final release package created: {PACKED_RELEASE_PACKAGE_PATH}") - print(f"๐Ÿ“Š Source size: {source_size_mb:.1f} MB") - print(f"๐Ÿ“Š Package size: {package_size_mb:.1f} MB") - print(f"๐Ÿ“Š Compression ratio: {compression_ratio:.1f}%") - - except Exception as e: - print(f"โŒ Failed to create release package: {e}") - raise + # LZMA could be optimized with multithreading, but reduces compress rate + # With higher preset, multithreading benefits diminish. Ref: https://github.com/python/cpython/pull/114954 + # So we choose single-threaded for best compression + with tarfile.open(PACKED_RELEASE_PACKAGE_PATH, 'w:xz', preset=9) as tar: + # Add all subdirectories and files, preserving original directory structure + for item in os.listdir(RELEASE_PACKAGE_DIR): + item_path = os.path.join(RELEASE_PACKAGE_DIR, item) + print(f" ๐Ÿ“ฆ Adding: {item}") + tar.add(item_path, arcname=item) + + # Report package statistics + package_size_mb = os.path.getsize(PACKED_RELEASE_PACKAGE_PATH) / (1024 * 1024) + + # Calculate source directory size for compression ratio + source_size_mb = sum( + os.path.getsize(os.path.join(dirpath, filename)) + for dirpath, _, filenames in os.walk(RELEASE_PACKAGE_DIR) + for filename in filenames + ) / (1024 * 1024) + + compression_ratio = (source_size_mb - package_size_mb) / source_size_mb * 100 if source_size_mb > 0 else 0 + + print(f"โœ… Final release package created: {PACKED_RELEASE_PACKAGE_PATH}") + print(f"๐Ÿ“Š Source size: {source_size_mb:.1f} MB") + print(f"๐Ÿ“Š Package size: {package_size_mb:.1f} MB") + print(f"๐Ÿ“Š Compression ratio: {compression_ratio:.1f}%") # ======================================================================== # ๐Ÿš€ Main Execution @@ -212,9 +301,10 @@ def main(): ๐Ÿš€ Main Stage 3 Execution Orchestrates the final packaging stage using parallel task execution: - 1. Verify stage outputs are present (Stage 1 & 2 already merged by Docker COPY) - 2. Create comprehensive manifest based on ALL_COMPONENTS - 3. Package everything into final release archive + 1. Setup .bashrc with environment variables and entrypoint script + 2. Copy setup scripts and configuration files + 3. Create comprehensive manifest based on ALL_COMPONENTS + 4. Package everything into final release archive This creates the complete release package ready for deployment. """ @@ -226,13 +316,20 @@ def main(): # Define packaging jobs with proper dependency management jobs = { + "setup_bashrc": Job("setup_bashrc", setup_environment_variables_and_entrypoint, ()), + "copy_setup_scripts": Job("copy_setup_scripts", copy_setup_scripts, ()), "create_manifest": Job("create_manifest", create_comprehensive_manifest, ()), "create_package": Job("create_package", create_final_release_package, ()), } - # Define dependencies - package creation depends on manifest + # Define dependencies + # - bashrc setup and script copy can run in parallel + # - Manifest creation depends on bashrc and scripts being ready + # - Package creation depends on manifest dependencies = { - "create_manifest": set(), + "setup_bashrc": set(), + "copy_setup_scripts": set(), + "create_manifest": {"setup_bashrc", "copy_setup_scripts"}, "create_package": {"create_manifest"}, } @@ -245,6 +342,8 @@ def main(): print("๐ŸŽ‰ ========================================================================") print(f"โœ… Final release package: {PACKED_RELEASE_PACKAGE_PATH}") print(f"โœ… Manifest: {RELEASE_PACKAGE_DIR}/manifest.json") + print(f"โœ… Bashrc: {BASHRC.bashrc_path}") + print(f"โœ… Setup scripts: {CLICE_SETUP_SCRIPTS.package_dir}") print("๐ŸŽ‰ ========================================================================") if __name__ == "__main__": diff --git a/docker/linux/utility/download_dependencies.py b/docker/linux/utility/download_dependencies.py index c4f8876b..21776212 100644 --- a/docker/linux/utility/download_dependencies.py +++ b/docker/linux/utility/download_dependencies.py @@ -48,7 +48,6 @@ from config.build_config import ( RELEASE_PACKAGE_DIR, PYPROJECT_PATH, - # Component instances for structured access APT, UV, CMAKE, XMAKE ) @@ -61,11 +60,11 @@ def install_download_prerequisites() -> None: print("๐Ÿ“ฆ Installing dependencies download prerequisites...") # Update package lists first - run_command("apt update") + run_command("apt update -o DPkg::Lock::Timeout=-1") # Install all download prerequisites (universal + APT-specific) download_prerequisites = APT.download_prerequisites - run_command(f"apt install -y --no-install-recommends -o APT::Keep-Downloaded-Packages=true {' '.join(download_prerequisites)}") + run_command(f"apt install -y --no-install-recommends=true -o DPkg::Lock::Timeout=-1 {' '.join(download_prerequisites)}") print(f"โœ… Installed {len(download_prerequisites)} download prerequisites") @@ -296,39 +295,31 @@ def download_xmake() -> None: print(f"๐Ÿ“ฆ XMake copied to package: {xmake_package_file}") def download_python_packages() -> None: - """Download Python packages using uv.""" - print("๐Ÿ Downloading Python packages...") - - # Create both cache and package directories - os.makedirs(UV.cache_dir, exist_ok=True) - os.makedirs(UV.package_dir, exist_ok=True) - - # Download packages specified in pyproject.toml to cache directory first - if os.path.exists(PYPROJECT_PATH): - print(f"๐Ÿ“ฅ Downloading Python packages to cache: {UV.cache_dir}") - # Use uv to download packages to cache directory - run_command(f"uv sync --cache-dir {UV.cache_dir}") - - # Copy only wheel files from cache to package directory - print("๐Ÿ“ฆ Copying wheel files from cache to package directory...") - copied_count = 0 - - # Find all .whl files in cache directory recursively - for root, dirs, files in os.walk(UV.cache_dir): - for file in files: - if file.endswith('.whl'): - src = os.path.join(root, file) - dst = os.path.join(UV.package_dir, file) - # Only copy if not already exists to avoid duplicates - if not os.path.exists(dst): - shutil.copy2(src, dst) - copied_count += 1 - - print(f"๐Ÿ“Š Copied {copied_count} wheel files to package directory") - print(f"โœ… Python packages cached to {UV.cache_dir}") - print(f"๐Ÿ“ฆ Python packages ready in {UV.package_dir}") - else: - print(f"โš ๏ธ pyproject.toml not found at {PYPROJECT_PATH}") + """ + Download Python packages from pyproject.toml using uv sync. + + Uses uv sync to download all dependencies to UV's packages cache directory. + """ + print("๐Ÿ Downloading Python packages from pyproject.toml...") + + # Create cache directory for packages + os.makedirs(UV.packages_package_dir, exist_ok=True) + + # Set UV_CACHE_DIR to packages cache directory + print(f"๐Ÿ“ฅ Downloading package wheels to UV packages package dir: {UV.packages_package_dir}") + print(f"๐Ÿ“‹ Using pyproject.toml from: {PYPROJECT_PATH}") + + # Run uv sync with project root as working directory + # UV will automatically find pyproject.toml in the project root + project_root = os.path.dirname(PYPROJECT_PATH) + + run_command( + f"UV_CACHE_DIR={UV.packages_package_dir} uv sync --no-install-project --no-editable", + cwd=project_root + ) + + print(f"โœ… Package wheels cached to: {UV.packages_package_dir}") + print(f"๐Ÿ“ Packages cache will be available to later stages via cache mount") # LLVM downloading removed as per requirements @@ -344,21 +335,24 @@ def main(): os.makedirs(RELEASE_PACKAGE_DIR, exist_ok=True) # Define download jobs with proper dependency management + # Note: Python installation is now done in Dockerfile, not here jobs = { "install_download_prerequisites": Job("install_download_prerequisites", install_download_prerequisites, ()), "download_apt_packages": Job("download_apt_packages", download_apt_packages, ()), + "download_python_packages": Job("download_python_packages", download_python_packages, ()), "download_cmake": Job("download_cmake", download_cmake, ()), "download_xmake": Job("download_xmake", download_xmake, ()), - "download_python_packages": Job("download_python_packages", download_python_packages, ()), } - # Define dependencies - all downloads depend on prerequisites installation + # Define dependencies + # UV and packages downloads need install_download_prerequisites + # Python installation is handled in Dockerfile base-stage dependencies = { "install_download_prerequisites": set(), "download_apt_packages": {"install_download_prerequisites"}, + "download_python_packages": {"install_download_prerequisites"}, "download_cmake": {"install_download_prerequisites"}, "download_xmake": {"install_download_prerequisites"}, - "download_python_packages": {"install_download_prerequisites"}, } # Execute downloads in parallel where possible @@ -369,4 +363,4 @@ def main(): print(f"๐Ÿ“ Cache directory: {RELEASE_PACKAGE_DIR}") if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/docker/linux/utility/local_setup.py b/docker/linux/utility/local_setup.py index 5734c2e3..7f5cd021 100644 --- a/docker/linux/utility/local_setup.py +++ b/docker/linux/utility/local_setup.py @@ -26,21 +26,19 @@ import os import sys import tarfile +import shutil # Ensure utility directory is in Python path for imports -utility_path = os.path.dirname(os.path.abspath(__file__)) -if utility_path not in sys.path: - sys.path.insert(0, utility_path) +project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) +if project_root not in sys.path: + sys.path.insert(0, project_root) # Import all configuration from build_config using new component structure from config.build_config import ( - ENVIRONMENT_CONFIG_FILE, RELEASE_PACKAGE_DIR, - PACKED_RELEASE_PACKAGE_PATH, - DEVELOPMENT_SHELL_VARS, - PROJECT_ROOT, + CLICE_WORKDIR, # Import component instances for structured access - APT, UV, CMAKE, XMAKE, TOOLCHAIN + APT, UV, CMAKE, XMAKE, TOOLCHAIN, BASHRC ) # Import build_utils for run_command and other utilities @@ -54,33 +52,21 @@ # ๐ŸŒ Environment Setup Functions # ======================================================================== -def setup_environment_variables(): - """Write environment variables to /root/.bashrc for persistent shell environment.""" - print("๐ŸŒ Setting up environment variables...") +def deploy_bashrc(): + """Deploy .bashrc from package to /root/.bashrc.""" + print("๐ŸŒ Deploying .bashrc configuration...") - bashrc_path = ENVIRONMENT_CONFIG_FILE + source_bashrc = BASHRC.bashrc_path + target_bashrc = "/root/.bashrc" - # Ensure .bashrc exists - if not os.path.exists(bashrc_path): - with open(bashrc_path, 'w') as f: - f.write("# Generated by Clice local setup\n") - - # Read existing content to avoid duplicates - existing_content = "" - with open(bashrc_path, 'r') as f: - existing_content = f.read() + if not os.path.exists(source_bashrc): + print(f"โš ๏ธ Warning: .bashrc not found at {source_bashrc}") + return - # Add environment variables from build config - for key, value in DEVELOPMENT_SHELL_VARS.items(): - env_line = f"export {key}=\"{value}\"\n" - if f'export {key}=' not in existing_content: - with open(bashrc_path, 'a') as f: - f.write(env_line) - print(f" โœ… Added {key}={value}") - else: - print(f" โœ… {key} already set") + # Copy .bashrc to target location + shutil.copy2(source_bashrc, target_bashrc) - print("โœ… Environment variables configured") + print(f"โœ… .bashrc deployed to {target_bashrc}") # ======================================================================== # ๐Ÿ“ฆ Package Installation Functions @@ -90,11 +76,6 @@ def install_apt_packages(apt_component): """Install APT development packages from downloaded .deb files.""" print("๐Ÿ“ฆ Installing APT development packages...") - if not os.path.exists(apt_component.package_dir): - print(f"โš ๏ธ APT package directory not found: {apt_component.package_dir}") - print("Downloading packages first...") - return - # Install all .deb files found in the package directory deb_files = [f for f in os.listdir(apt_component.package_dir) if f.endswith('.deb')] @@ -109,14 +90,7 @@ def install_toolchain(toolchain_component): """Install the custom toolchain.""" print("๐Ÿ”ง Installing custom toolchain...") - # Toolchain is already installed at toolchain package directory within RELEASE_PACKAGE_DIR - if not os.path.exists(toolchain_component.package_dir): - print("โš ๏ธ Toolchain package directory not found") - return - print(f"โœ… Toolchain available at: {toolchain_component.package_dir}") - - print(f"โœ… Toolchain setup completed") def install_cmake(cmake_component): """Install CMake from pre-downloaded installer.""" @@ -125,10 +99,6 @@ def install_cmake(cmake_component): cmake_installer_filename = cmake_component.tarball_name cmake_installer_path = os.path.join(cmake_component.package_dir, cmake_installer_filename) - if not os.path.exists(cmake_installer_path): - print(f"โš ๏ธ CMake installer not found: {cmake_installer_path}") - return - # Make installer executable and run it run_command(f"chmod +x {cmake_installer_path}") @@ -157,10 +127,6 @@ def install_xmake(xmake_component): xmake_filename = xmake_component.tarball_name xmake_path = os.path.join(xmake_component.package_dir, xmake_filename) - if not os.path.exists(xmake_path): - print(f"โš ๏ธ XMake package not found: {xmake_path}") - return - # Make XMake bundle executable run_command(f"chmod +x {xmake_path}") @@ -173,10 +139,6 @@ def install_python_packages(uv_component): """Install Python packages from uv cache.""" print("๐Ÿ Installing Python packages...") - if not os.path.exists(uv_component.package_dir): - print(f"โš ๏ธ Python package directory not found: {uv_component.package_dir}") - return - # Install wheel files found in the UV package directory wheel_files = [f for f in os.listdir(uv_component.package_dir) if f.endswith('.whl')] @@ -196,55 +158,34 @@ def setup_git_safe_directory(): """Configure git to treat the workspace as safe.""" print("๐Ÿ”ง Configuring git safe directory...") - run_command(f"git config --global --add safe.directory {PROJECT_ROOT}") + run_command(f"git config --global --add safe.directory {CLICE_WORKDIR}") print("โœ… Git safe directory configured") -def extract_release_archive(): - """Extract the release archive to setup the environment.""" - print("๐Ÿ“ฆ Extracting release archive...") - - if not os.path.exists(PACKED_RELEASE_PACKAGE_PATH): - print("โš ๏ธ Packed release archive not found") - return - - # Extract the release archive to the appropriate location - release_parent_dir = os.path.dirname(RELEASE_PACKAGE_DIR) - os.makedirs(release_parent_dir, exist_ok=True) - - # Use Python tarfile module for extraction (reverse of packaging) - print(f" ๐Ÿ“ Source: {PACKED_RELEASE_PACKAGE_PATH}") - print(f" ๐Ÿ“ Target: {release_parent_dir}") - - with tarfile.open(PACKED_RELEASE_PACKAGE_PATH, 'r:xz') as tar: - tar.extractall(path=release_parent_dir, filter='data') - print(f"โœ… Release archive extracted to: {RELEASE_PACKAGE_DIR}") - def main(): """Main setup orchestration function with parallel task scheduling.""" - print("๐Ÿš€ Unpacking Clice Dev Container...") + print("๐Ÿš€ Setting up Clice Dev Container...") # Define setup jobs with proper dependency management + # Note: Release archive is already extracted by Dockerfile, so we start with installations jobs = { - "extract_release_archive": Job("extract_release_archive", extract_release_archive, ()), - "setup_environment_variables": Job("setup_environment_variables", setup_environment_variables, ()), "setup_git_safe_directory": Job("setup_git_safe_directory", setup_git_safe_directory, ()), "install_apt_packages": Job("install_apt_packages", install_apt_packages, (APT,)), "install_toolchain": Job("install_toolchain", install_toolchain, (TOOLCHAIN,)), "install_cmake": Job("install_cmake", install_cmake, (CMAKE,)), "install_xmake": Job("install_xmake", install_xmake, (XMAKE,)), "install_python_packages": Job("install_python_packages", install_python_packages, (UV,)), + "deploy_bashrc": Job("deploy_bashrc", deploy_bashrc, ()), } - # Define dependencies - some installations can run in parallel after archive extraction + # Define dependencies - git setup depends on apt packages dependencies = { - "extract_release_archive": set(), - "setup_environment_variables": set(), - "setup_git_safe_directory": set(), - "install_apt_packages": {"extract_release_archive"}, - "install_toolchain": {"extract_release_archive"}, - "install_cmake": {"extract_release_archive"}, - "install_xmake": {"extract_release_archive"}, - "install_python_packages": {"extract_release_archive"}, + "install_apt_packages": set(), + "setup_git_safe_directory": {"install_apt_packages"}, + "install_toolchain": set(), + "install_cmake": set(), + "install_xmake": set(), + "install_python_packages": set(), + "deploy_bashrc": set(), } # Execute setup tasks in parallel where possible @@ -253,6 +194,7 @@ def main(): print("โœ… Clice development environment setup completed successfully!") print(f"๐Ÿ“ฆ Components installed from: {RELEASE_PACKAGE_DIR}") + print(f"๐Ÿ“ Bashrc deployed to: /root/.bashrc") if __name__ == "__main__": main() \ No newline at end of file diff --git a/docs/en/dev/build.md b/docs/en/dev/build.md index 474a4b86..de2564ed 100644 --- a/docs/en/dev/build.md +++ b/docs/en/dev/build.md @@ -126,131 +126,125 @@ We provide a complete Docker development container solution with pre-configured ### ๐Ÿš€ Quick Start -#### Build Development Container -```bash -# Build default container (clang + latest version) -./docker/linux/build.sh - -# Build container with specific compiler and version -./docker/linux/build.sh --compiler gcc --version v1.2.3 -``` - #### Run Development Container ```bash # Run default container ./docker/linux/run.sh # Run container with specific compiler -./docker/linux/run.sh --compiler clang ./docker/linux/run.sh --compiler gcc # Run container with specific version -./docker/linux/run.sh --compiler clang --version v1.2.3 +./docker/linux/run.sh --version v1.2.3 ``` #### Container Management ```bash -# Reset container (remove existing container) +# Reset container (remove and recreate) ./docker/linux/run.sh --reset # Update container image (pull latest version) ./docker/linux/run.sh --update - -# Rebuild container image -./docker/linux/run.sh --rebuild ``` -#### Multi-version Testing +### ๐Ÿ—๏ธ Development Workflow + +#### Complete Development Flow Example ```bash -# Test different compilers -./docker/linux/run.sh --compiler gcc +# 1. Start development session ./docker/linux/run.sh --compiler clang -# Test specific versions -./docker/linux/run.sh --version v1.0.0 -./docker/linux/run.sh --version latest -``` +# 2. Build project inside container (project directory auto-mounted to /clice) +cd /clice +mkdir build && cd build -### ๐Ÿ“‹ Container Configuration +# Build with CMake +cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Debug -DLLVM_INSTALL_PATH="/usr/local/llvm" +ninja -#### Supported Parameters -| Parameter | Description | Default | -|-----------|-------------|---------| -| `--compiler` | Compiler type (gcc/clang) | `clang` | -| `--version` | Version tag | `latest` | -| `--reset` | Reset container | - | -| `--rebuild` | Force rebuild image | - | -| `--update` | Pull latest image | - | - -#### Generated Image Naming -- Format: `clice-io/clice:linux-{compiler}-{version}` -- Examples: - - `clice-io/clice:linux-clang-latest` - - `clice-io/clice:linux-gcc-v1.2.3` - -### ๐Ÿ”ง Advanced Usage - -#### Execute Custom Commands -```bash -# Execute specific command in container -./docker/linux/run.sh "cmake --version && xmake --version" - -# Run tests -./docker/linux/run.sh "cd /clice/build && ctest" - -# Interactive debugging -./docker/linux/run.sh "gdb ./build/clice" +# Or build with XMake +xmake f --mode=debug --toolchain=clang +xmake build --all ``` -## Building Docker Image +### ๐Ÿ“ฆ Container Features -Use the following command to build docker image: +#### Pre-installed Tools and Environment +- **Compilers**: GCC 14, Clang 20 (from official LLVM PPA) +- **Build Systems**: CMake 3.28+, XMake 2.8+ +- **Development Tools**: Complete C++ development stack including debuggers, profilers, etc. +- **LLVM Libraries**: Pre-configured LLVM 20.x development libraries and headers +- **Python Environment**: Consistent Python environment managed by uv -```bash -$ docker build -t clice . -``` +#### Automation Features +- **Environment Isolation**: Independent containers per compiler and version +- **Persistence**: Container state persists across sessions +- **Auto-mount**: Project directory auto-mounted to `/clice` +- **Version Awareness**: Support creating dev environment from existing release images -Run docker image by running the following command: +### ๐ŸŽฏ Use Cases +#### Daily Development ```bash -$ docker run --rm -it clice --help -OVERVIEW: clice is a new generation of language server for C/C++ -... +# Start development environment (auto-build if image doesn't exist) +./docker/linux/run.sh + +# Container will automatically: +# - Check and start existing container, or create new one +# - Mount project directory to /clice +# - Provide complete development environment ``` -The directory structure of the docker image is as follows: +#### Multi-compiler Testing +```bash +# Test different compilers +./docker/linux/run.sh --compiler gcc +./docker/linux/run.sh --compiler clang +# Each compiler has independent container and environment ``` -/opt/clice -โ”œโ”€โ”€ bin -โ”‚ โ”œโ”€โ”€ clice -> /usr/local/bin/clice -โ”œโ”€โ”€ include -โ”œโ”€โ”€ lib -โ”œโ”€โ”€ LICENSE -โ”œโ”€โ”€ README.md -``` - -Hint: launch clice in the docker container by running the following command: +#### Version Management ```bash -$ docker run --rm -it --entrypoint bash clice +# Use specific version +./docker/linux/run.sh --version v1.0.0 + +# Update to latest version (can be used with --version, but not effective for released versions as their images cannot be updated) +./docker/linux/run.sh --update ``` -## Development Container +### ๐Ÿ“‹ Detailed Parameters -We provide Docker images as a pre-configured environment to streamline the setup process. You can use the following scripts to manage the development container. These scripts can be run from the project root directory. +#### run.sh Parameters +| Parameter | Description | Default | +|-----------|-------------|---------| +| `--compiler ` | Compiler type | `clang` | +| `--version ` | Version tag | `latest` | +| `--reset` | Remove and recreate container | - | +| `--update` | Pull latest image and update | - | + +#### Generated Image Naming Convention +- **Release image**: `clice-io/clice:linux-{compiler}-{version}` +- **Development image**: `clice-io/clice:linux-{compiler}-{version}-expanded` +- Examples: + - `clice-io/clice:linux-clang-latest` + - `clice-io/clice:linux-clang-latest-expanded` + - `clice-io/clice:linux-gcc-v1.2.3` -```bash -# Build the development image -./docker/linux/build.sh +### ๐Ÿ”ง Advanced Usage -# Run the container with the clang toolchain -./docker/linux/run.sh --compiler clang +#### Execute Custom Commands +```bash +# Execute specific command in container (use -- separator) +./docker/linux/run.sh -- cmake --version -# Run the container with the gcc toolchain -./docker/linux/run.sh --compiler gcc +# Execute multiple commands +./docker/linux/run.sh -- "cd /clice/build && cmake .." +``` -# Reset the container (stops and removes the existing one) +#### Container Lifecycle Management +```bash +# Complete cleanup and rebuild ./docker/linux/run.sh --reset ``` diff --git a/docs/en/dev/dev-container-architecture.md b/docs/en/dev/dev-container-architecture.md index 559e7502..fd2cba9a 100644 --- a/docs/en/dev/dev-container-architecture.md +++ b/docs/en/dev/dev-container-architecture.md @@ -579,7 +579,7 @@ Update versions in `config/default-toolchain-version.json`: ### Adding New Dependencies 1. Update package lists in `config/build_config.py` 2. Rebuild container with `./docker/linux/build.sh --rebuild` -3. Verify with `./docker/linux/run.sh --reset` +3. Verify with `./docker/linux/run.sh --update` ### Container Health Checks ```bash diff --git a/docs/zh/dev/dev-container-architecture.md b/docs/zh/dev/dev-container-architecture.md index 8d764602..a45ce9e0 100644 --- a/docs/zh/dev/dev-container-architecture.md +++ b/docs/zh/dev/dev-container-architecture.md @@ -579,7 +579,7 @@ DEV_CONTAINER_BASIC_TOOLS = [ ### ๆทปๅŠ ๆ–ฐไพ่ต– 1. ๆ›ดๆ–ฐ `config/build_config.py` ไธญ็š„ๅŒ…ๅˆ—่กจ 2. ไฝฟ็”จ `./docker/linux/build.sh --rebuild` ้‡ๅปบๅฎนๅ™จ -3. ไฝฟ็”จ `./docker/linux/run.sh --reset` ้ชŒ่ฏ +3. ไฝฟ็”จ `./docker/linux/run.sh --update` ้ชŒ่ฏ ### ๅฎนๅ™จๅฅๅบทๆฃ€ๆŸฅ ```bash From 4f4b5adb7926ee5445457d9f536dfcf12c408c09 Mon Sep 17 00:00:00 2001 From: sora_mono <849526320@qq.com> Date: Sun, 26 Oct 2025 04:01:15 +0800 Subject: [PATCH 3/4] Optimized release image size Removed useless comment --- config/build_config.py | 324 ++++++------------ config/default-toolchain-version.json | 2 +- docker/.dockerignore | 10 - docker/linux/Dockerfile | 112 +++--- docker/linux/build.sh | 52 +-- docker/linux/container-entrypoint.sh | 11 +- docker/linux/run.sh | 43 +-- .../utility/build_clice_compiler_toolchain.py | 268 ++------------- docker/linux/utility/build_utils.py | 222 ++++-------- docker/linux/utility/common.sh | 9 +- .../linux/utility/create_release_package.py | 155 +++------ docker/linux/utility/download_dependencies.py | 57 +-- docker/linux/utility/local_setup.py | 52 +-- 13 files changed, 352 insertions(+), 965 deletions(-) delete mode 100644 docker/.dockerignore diff --git a/config/build_config.py b/config/build_config.py index 72aee885..b8d2f42c 100644 --- a/config/build_config.py +++ b/config/build_config.py @@ -1,36 +1,4 @@ -# ======================================================================== -# ๐Ÿ”ง Clice Toolchain Build Configuration -# ======================================================================== -# File: config/build_config.py -# Purpose: Centralized configuration constants for the toolchain build system -# -# This module provides a comprehensive configuration framework for building -# the complete Clice compiler toolchain, including glibc, GCC, LLVM, and -# Linux kernel headers. -# ======================================================================== - -""" -๐Ÿš€ Clice Toolchain Build Configuration - -This module serves as the central configuration hub for the Clice toolchain -build process. It defines all necessary constants, paths, and component -configurations required to build a complete development environment. - -Key Features: - โ€ข Centralized toolchain component definitions - โ€ข Environment variable management - โ€ข Build dependency specifications - โ€ข Cross-platform compatibility settings - โ€ข Automated version management - -Supported Components: - โ€ข glibc (GNU C Library) - โ€ข GCC (GNU Compiler Collection) - โ€ข LLVM (Low Level Virtual Machine) - โ€ข Linux Kernel Headers - โ€ข CMake Build System - โ€ข XMake Build System -""" +"""Centralized configuration constants for toolchain build system.""" import json import os @@ -40,48 +8,42 @@ # ๐ŸŒ Environment Variables and Core Paths # ======================================================================== -# Global environment variables that will be written to /root/.bashrc -# and utilized by the run_command execution framework +# Global environment variables that will be used in dev-container shells DEVELOPMENT_SHELL_VARS: Dict[str, str] = { "PATH": "/root/.local/bin:${PATH}", - "XMAKE_ROOT": "y" # Enable XMake auto-updating and self-management system + "XMAKE_ROOT": "y" } -# Specialized environment variables for toolchain build processes -# These variables control linking behavior and runtime path resolution +# Environment variables specifically for toolchain build processes +# NOT used in dev-container shells TOOLCHAIN_BUILD_ENV_VARS: Dict[str, str] = { - "ORIGIN": "$ORIGIN" # Enable relative rpath for portable binary distribution + "ORIGIN": "$ORIGIN" # Enable relative rpath for portable binary distribution } -# Core project structure definitions -# CLICE_WORKDIR can be customized via environment variable, defaults to /clice -CLICE_WORKDIR: str = os.getenv("CLICE_WORKDIR", "") # Working directory inside Docker container -PROJECT_ROOT: str = CLICE_WORKDIR # Root directory of the Clice project -PYPROJECT_PATH: str = os.path.join(PROJECT_ROOT, "pyproject.toml") # Python project configuration file -TOOLCHAIN_BUILD_ROOT: str = "/toolchain-build" # Root directory for all toolchain builds -TOOLCHAIN_CONFIG_PATH: str = os.path.join(PROJECT_ROOT, "config/default-toolchain-version.json") # Version definitions +# CLICE_WORKDIR is different from PROJECT_ROOT +# CLICE_WORKDIR means where the Clice project is mounted inside the container +CLICE_WORKDIR: str = os.getenv("CLICE_WORKDIR", "") +# PROJECT_ROOT is the root directory of the Clice project +# It could be different from CLICE_WORKDIR if the build_config.py is executed in expand-stage +PROJECT_ROOT: str = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) +PYPROJECT_PATH: str = os.path.join(PROJECT_ROOT, "pyproject.toml") +TOOLCHAIN_CONFIG_PATH: str = os.path.join(PROJECT_ROOT, "config/default-toolchain-version.json") # ======================================================================== # ๐Ÿ“ฆ Release Package Configuration (Cross-Stage Variables) # ======================================================================== -# These variables are designed to be passed across Docker build stages -# RELEASE_PACKAGE_DIR is the main package directory, PACKED_RELEASE_PACKAGE_PATH is the compressed archive +# main package directory RELEASE_PACKAGE_DIR: str = os.getenv("RELEASE_PACKAGE_DIR", "") +# compressed package archive PACKED_RELEASE_PACKAGE_PATH: str = os.getenv("PACKED_RELEASE_PACKAGE_PATH", "") +# bashrc environment configuration file ENVIRONMENT_CONFIG_FILE: str = os.getenv("ENVIRONMENT_CONFIG_FILE", "") - -# Source code cache directory for toolchain build +# File cache directory for toolchain build +# Only save what is always same, e.g. source code tarballs BUILD_CACHE_DIR: str = os.getenv("BUILD_CACHE_DIR", "") - WORKDIR_ROOT: str = "/dev-container-build" # Temporary work directory for builds (not persistent) -# ======================================================================== -# ๐Ÿ—๏ธ Dynamic Configuration Loading -# ======================================================================== - -# Dynamic version loading from configuration file -# This enables centralized version management across all build components TOOLCHAIN_VERSIONS: Dict[str, Any] = {} with open(TOOLCHAIN_CONFIG_PATH, "r") as f: TOOLCHAIN_VERSIONS = json.load(f) @@ -90,11 +52,9 @@ # ๐Ÿ” GPG Verification Configuration # ======================================================================== -# Trusted GPG key servers for source code verification -# Multiple servers provide redundancy for package signature validation GPG_KEY_SERVER: List[str] = [ - "keys.openpgp.org", # Primary OpenPGP keyserver - "keyserver.ubuntu.com" # Ubuntu's reliable keyserver mirror + "keys.openpgp.org", + "keyserver.ubuntu.com" ] # ======================================================================== @@ -102,26 +62,6 @@ # ======================================================================== class Component: - """ - ๐Ÿ”ง Base Component Configuration Class - - Provides a unified interface for all build components including APT packages, - development tools, and toolchain components. Each component manages its own - directory structure and download configuration. - - Attributes: - name: Component identifier (unique across all components) - version: Component version from TOOLCHAIN_VERSIONS (None for versionless components) - package_dir: Directory for final packaged files in RELEASE_PACKAGE_DIR - cache_dir: Directory for persistent caches (downloads, APT packages, etc.) - work_dir: Directory for temporary build files (not persistent) - base_url: Base URL pattern for downloads (class-level, optional) - tarball_name_pattern: Tarball filename pattern (class-level, optional) - verification_name_pattern: Verification filename pattern (class-level, optional) - download_prerequisites: Tools required for downloading source code (class-level) - extract_prerequisites: Tools required for extracting archives (class-level) - """ - # Class-level URL patterns (overridden by subclasses) base_url: str = "" tarball_name_pattern: str = "" @@ -130,8 +70,7 @@ class Component: # Class-level prerequisites configuration (overridden by subclasses) download_prerequisites: List[str] = [ "aria2", # High-speed multi-connection downloader - "gnupg", # GPG signature verification system - "git", # Required for git clone llvm + "gnupg", # For GPG signature verification, python-gnupg is just a wrapper ] extract_prerequisites: List[str] = [ @@ -152,20 +91,25 @@ class Component: def __init__(self, name: str, version: str = "unknown"): self.name = name self.version = version - - # Directory structure generation based on name and version - self.package_dir = os.path.join(RELEASE_PACKAGE_DIR, self.versioned_name) - self.cache_dir = os.path.join(BUILD_CACHE_DIR, self.versioned_name) - self.work_dir = os.path.join(WORKDIR_ROOT, self.versioned_name) + + @property + def package_dir(self) -> str: + return os.path.join(RELEASE_PACKAGE_DIR, self.versioned_name) + + @property + def cache_dir(self) -> str: + return os.path.join(BUILD_CACHE_DIR, self.versioned_name) + + @property + def work_dir(self) -> str: + return os.path.join(WORKDIR_ROOT, self.versioned_name) @property def versioned_name(self) -> str: - """Generate folder name from component name and version.""" return f"{self.name}-{self.version}" @property def tarball_name(self) -> str: - """Complete tarball filename.""" if not self.tarball_name_pattern: raise ValueError(f"Component '{self.name}' missing required tarball_name_pattern") @@ -173,7 +117,6 @@ def tarball_name(self) -> str: @property def tarball_url(self) -> str: - """Complete download URL (requires base_url and tarball_name_pattern).""" if not self.base_url: raise ValueError(f"Component '{self.name}' missing required base_url") if not self.tarball_name_pattern: @@ -184,7 +127,6 @@ def tarball_url(self) -> str: @property def verification_name(self) -> str: - """Complete verification filename.""" if not self.verification_name_pattern: raise ValueError(f"Component '{self.name}' missing required verification_name_pattern") @@ -192,58 +134,46 @@ def verification_name(self) -> str: @property def verification_url(self) -> str: - """Verification file download URL.""" formatted_base_url = self.base_url.format(version=self.version, system=self.host_system, machine=self.host_machine) return f"{formatted_base_url}/{self.verification_name}" @property def host_triplet(self) -> str: - """Host system triplet (e.g., x86_64-linux-gnu).""" return f"{self.host_machine}-{self.host_system}-gnu" @property def target_triplet(self) -> str: - """Target system triplet (e.g., x86_64-linux-gnu).""" return f"{self.target_machine}-{self.target_system}-gnu" class ToolchainSubComponent(Component): - """ - ๐Ÿ”ง Toolchain Sub-Component Configuration Class - - Specialized component for toolchain elements (glibc, gcc, llvm, linux). - Creates subdirectories under the main toolchain component structure. - - Additional Attributes: - parent_component: Reference to parent toolchain component - extracted_dir: Directory for extracted source codwe - src_dir: Version-specific source directory - build_dir: Out-of-tree build directory - """ - def __init__(self, name: str, parent_component: Component): version = TOOLCHAIN_VERSIONS[name] super().__init__(name, version) self.parent_component = parent_component - - # Override directory structure to be under parent toolchain - self.package_dir = os.path.join(self.parent_component.package_dir, self.versioned_name) - self.cache_dir = os.path.join(self.parent_component.cache_dir, self.versioned_name) - self.work_dir = os.path.join(self.parent_component.work_dir, self.versioned_name) + @property + def package_dir(self) -> str: + return os.path.join(self.parent_component.package_dir, self.versioned_name) + + @property + def cache_dir(self) -> str: + return os.path.join(self.parent_component.cache_dir, self.versioned_name) + + @property + def work_dir(self) -> str: + return os.path.join(self.parent_component.work_dir, self.versioned_name) + @property def extracted_dir(self) -> str: - """Directory for extracted source code.""" return self.src_dir @property def src_dir(self) -> str: - """Version-specific source directory.""" return os.path.join(self.work_dir, "src") @property def build_dir(self) -> str: - """Out-of-tree build directory.""" return os.path.join(self.src_dir, "build") # ======================================================================== @@ -251,92 +181,51 @@ def build_dir(self) -> str: # ======================================================================== class APTComponent(Component): - """APT packages management component (versionless).""" - # APT-specific prerequisites with specialization download_prerequisites: List[str] = Component.download_prerequisites + [ - "apt-rdepends", # Advanced dependency resolution and analysis tool + "apt-rdepends", # For resolving APT package dependencies ] def __init__(self): super().__init__("apt") - + @property - def basic_tools(self) -> List[str]: - """Basic tools from Dockerfile basic-tools stage.""" + def all_packages(self) -> List[str]: return [ - "git", # For xmake initialize + "git", # For XMake initialize + "ca-certificates", # For ssl verification, git needs it + "ninja", # For CMake and XMake build system + "curl", # For XMake initialize + "make" # For XMake initialize ] - - @property - def all_packages(self) -> List[str]: - """Complete package list for development container.""" - return self.basic_tools class UVComponent(Component): - """ - UV Python package manager component. - - Manages: - โ€ข UV standalone binary distribution - โ€ข Python interpreter installation (via UV's python management) - โ€ข Python packages from pyproject.toml (wheels, dependencies) - - Cache Strategy: - โ€ข cache_dir/ - UV tarball cache (Docker layer, for build efficiency) - - Only caches UV binary tarball downloads - - As uv do not provide download python packages separately, we have to give up caching python binary - - Not mounted as volume, ensures Docker layer caching - - โ€ข package_dir/ - Multi-purpose directory for cross-stage transfer - - UV binary for installation - - Python installation cache (UV_CACHE_DIR during python install, cached the same directory with python packages) - - Python packages cache (pip wheels, venv) - - Transferred to expand-stage for installation - - Why this design: - โ€ข UV tarball cached separately for Docker layer reuse - โ€ข Python install uses package_dir as UV_CACHE_DIR (no separate download cache) - โ€ข Python packages cache goes to standard location for later stages - โ€ข All needed files in package_dir for expand-stage - """ - base_url = "https://github.com/astral-sh/uv/releases/download/{version}" tarball_name_pattern = "uv-{machine}-unknown-linux-gnu.tar.gz" def __init__(self): version = TOOLCHAIN_VERSIONS["uv"] super().__init__("uv", version) - # Python version managed by this UV instance self.python_version = TOOLCHAIN_VERSIONS["python"] @property def tarball_cache_dir(self) -> str: - """Directory where UV binary tarball is cached (Docker layer).""" return os.path.join(self.cache_dir, "tarball") @property def tarball_package_dir(self) -> str: - """Directory where UV binary tarball is cached (Docker layer).""" return os.path.join(self.package_dir, "tarball") @property def install_dir(self) -> str: - """Directory where UV binary will be installed.""" return "/root/.local/bin" @property def packages_package_dir(self) -> str: - """ - UV_CACHE_DIR for Python installation phase. - Points to package_dir to avoid separate cache layer. - """ return os.path.join(self.package_dir, "uv-packages") class XMakeComponent(Component): - """XMake build system component.""" - base_url = "https://github.com/xmake-io/xmake/releases/download/v{version}" tarball_name_pattern = "xmake-bundle-v{version}.{system}.{machine}" @@ -345,8 +234,6 @@ def __init__(self): super().__init__("xmake", version) class CMakeComponent(Component): - """CMake build system component.""" - base_url = "https://github.com/Kitware/CMake/releases/download/v{version}" tarball_name_pattern = "cmake-{version}-linux-x86_64.sh" verification_name_pattern = "cmake-{version}-SHA-256.txt" @@ -355,19 +242,40 @@ def __init__(self): version = TOOLCHAIN_VERSIONS["cmake"] super().__init__("cmake", version) +class P7ZipComponent(Component): + build_prerequisites: List[str] = [ + "p7zip-full", + ] + + compression_level: str = "9" # Maximum compression (0-9) + + def __init__(self): + # p7zip doesn't have a version we track, using "system" as placeholder + super().__init__("p7zip", "system") + + @property + def compression_options(self) -> List[str]: + return [ + f"-t7z", # Archive type + f"-mx={self.compression_level}", # Compression level + "-mmt=on", # Use all available CPU cores + "-ms=on" # Better compression for similar files + ] + + @property + def sfx_option(self) -> str: + return f"-sfx7zCon.sfx" + class ToolchainComponent(Component): - """Main toolchain component containing glibc, gcc, llvm, linux sub-components.""" def __init__(self): - super().__init__("toolchain") + super().__init__("toolchain", "custom") - # Create sub-components self.glibc: GlibcSubComponent = GlibcSubComponent(self) self.gcc: GccSubComponent = GccSubComponent(self) self.llvm: LlvmSubComponent = LlvmSubComponent(self) self.linux: LinuxSubComponent = LinuxSubComponent(self) - # Sub-component registry self.sub_components: list[ToolchainSubComponent] = [ self.glibc, self.gcc, @@ -377,7 +285,6 @@ def __init__(self): @property def sysroot_dir(self) -> str: - """Sysroot directory with version-specific naming.""" return f"{self.package_dir}/sysroot/{self.host_triplet}/{self.target_triplet}/glibc{self.glibc.version}-libstdc++{self.gcc.version}-linux{self.linux.version}" # ======================================================================== @@ -385,14 +292,13 @@ def sysroot_dir(self) -> str: # ======================================================================== class GlibcSubComponent(ToolchainSubComponent): - """GNU C Library sub-component.""" base_url = "https://ftpmirror.gnu.org/gnu/glibc" tarball_name_pattern = "glibc-{version}.tar.xz" verification_name_pattern = "glibc-{version}.tar.xz.sig" build_prerequisites: List[str] = [ - "make", # GNU Make build automation - "binutils", # Binary utilities (assembler, linker, etc.) + "make", + "binutils", "gawk", # Text processing (required by glibc build system) "bison", # Parser generator (required by glibc build system) "gcc-9", # GNU C compiler version 9 (for glibc < 2.36) @@ -405,19 +311,18 @@ def __init__(self, parent_component: ToolchainComponent): class GccSubComponent(ToolchainSubComponent): - """GNU Compiler Collection sub-component.""" base_url = "https://ftpmirror.gnu.org/gnu/gcc/gcc-{version}" tarball_name_pattern = "gcc-{version}.tar.xz" verification_name_pattern = "gcc-{version}.tar.xz.sig" build_prerequisites: List[str] = [ - "make", # GNU Make build automation - "binutils", # Binary utilities (assembler, linker, etc.) - "file", # File type identification (libcc1 requires this tool) + "make", + "binutils", + "file", # File type identification (libcc1 requires this tool) - "gcc-14", # Latest GNU C compiler (for bootstrapping) - "g++-14", # Latest GNU C++ compiler (for bootstrapping) - "libstdc++-14-dev", # Multiple Precision Floating-Point Reliable Library + "gcc-14", + "g++-14", + "libstdc++-14-dev", *ToolchainSubComponent.build_prerequisites ] @@ -440,25 +345,25 @@ def target_libs(self) -> List[str]: class LlvmSubComponent(ToolchainSubComponent): - """LLVM Project sub-component.""" - base_url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-{version}" tarball_name_pattern = "llvm-project-{version}.src.tar.xz" verification_name_pattern = "llvm-project-{version}.src.tar.xz.sig" + download_prerequisites = ToolchainSubComponent.download_prerequisites + [ + "git", # Required for git clone llvm + ] + def __init__(self, parent_component: ToolchainComponent): super().__init__("llvm", parent_component) class LinuxSubComponent(ToolchainSubComponent): - """Linux Kernel Headers sub-component.""" - base_url = "https://github.com/torvalds/linux/archive/refs/tags" tarball_name_pattern = "v{version}.tar.gz" - verification_name_pattern = "" # Linux kernel releases don't include separate signature files + verification_name_pattern = "" build_prerequisites: List[str] = [ - "make", # GNU Make build automation - "binutils", # Binary utilities (assembler, linker, etc.) + "make", + "binutils", "rsync", # File synchronization (Linux kernel headers) "gcc-9", # Even though we don't build the kernel, configure requires gcc @@ -470,26 +375,11 @@ def __init__(self, parent_component: ToolchainComponent): super().__init__("linux", parent_component) class CliceSetupScriptsComponent(Component): - """ - Clice setup scripts and configuration component. - - Contains Python scripts and configuration files needed for container setup, - packaged as a complete directory structure: - - config/build_config.py: Configuration definitions - - config/default-toolchain-version.json: Version information - - docker/linux/utility/local_setup.py: Final container setup script - - docker/linux/utility/build_utils.py: Utility functions - - These files are packaged preserving their directory structure and will be - executed in-place during container expansion (no extraction to CLICE_WORKDIR needed). - """ - def __init__(self): - super().__init__("clice-setup-scripts") + super().__init__("clice-setup-scripts", "project") @property def files_to_copy(self) -> list[str]: - """List of files to copy with their relative paths (preserving directory structure).""" return [ 'config/build_config.py', 'config/default-toolchain-version.json', @@ -498,45 +388,35 @@ def files_to_copy(self) -> list[str]: ] class BashrcComponent(Component): - """ - Bash configuration component. - - Contains the .bashrc file with: - - Environment variables (PATH, etc.) - - Container entrypoint script (auto Python env setup) - """ + """Bash configuration with environment variables and container entrypoint.""" def __init__(self): - super().__init__("bashrc") + super().__init__("bashrc", "project") @property def bashrc_path(self) -> str: - """Path to .bashrc file in package.""" return os.path.join(self.package_dir, ".bashrc") @property def entrypoint_script_source(self) -> str: - """Path to container-entrypoint.sh source file.""" - return os.path.join(CLICE_WORKDIR, "docker/linux/container-entrypoint.sh") + return os.path.join(PROJECT_ROOT, "docker/linux/container-entrypoint.sh") # ======================================================================== # ๐Ÿ—๏ธ Component Instances and Build Stage Organization # ======================================================================== -# Create component instances APT = APTComponent() UV = UVComponent() XMAKE = XMakeComponent() CMAKE = CMakeComponent() +P7ZIP = P7ZipComponent() TOOLCHAIN = ToolchainComponent() -CLICE_SETUP_SCRIPTS = CliceSetupScriptsComponent() BASHRC = BashrcComponent() # ======================================================================== # ๐Ÿ“‹ Build Stage Component Groups # ======================================================================== -# Dependencies downloader stage components DEPENDENCIES_DOWNLOADER_STAGE: list[Component] = [ APT, UV, @@ -544,22 +424,16 @@ def entrypoint_script_source(self) -> str: XMAKE, ] -# Image packer stage components (scripts and configs that go into package) IMAGE_PACKER_STAGE: list[Component] = [ - CLICE_SETUP_SCRIPTS, BASHRC, ] -# Toolchain builder stage components TOOLCHAIN_BUILDER_STAGE: list[Component] = [ TOOLCHAIN, ] -# Master component registry ALL_COMPONENTS = [ *DEPENDENCIES_DOWNLOADER_STAGE, *IMAGE_PACKER_STAGE, *TOOLCHAIN_BUILDER_STAGE, ] - - diff --git a/config/default-toolchain-version.json b/config/default-toolchain-version.json index 8f55df48..4913612e 100644 --- a/config/default-toolchain-version.json +++ b/config/default-toolchain-version.json @@ -1,5 +1,5 @@ { - "xmake": "3.0.2", + "xmake": "3.0.3", "cmake": "3.31.8", "python": "3.13", "uv": "0.9.2", diff --git a/docker/.dockerignore b/docker/.dockerignore deleted file mode 100644 index 24bef13a..00000000 --- a/docker/.dockerignore +++ /dev/null @@ -1,10 +0,0 @@ -build/ -out/ -.cache -__pycache__ - -.clice/ -.llvm*/ -.xmake/ -.vscode/ -.vs/ diff --git a/docker/linux/Dockerfile b/docker/linux/Dockerfile index a3e2c3fe..62b0b7ff 100644 --- a/docker/linux/Dockerfile +++ b/docker/linux/Dockerfile @@ -1,32 +1,28 @@ # check=skip=InvalidDefaultArgInFrom,experimental=all # ======================================================================== -# ๐Ÿš€ Clice Dev Container Multi-Stage Build System -# ======================================================================== -# File: docker/linux/Dockerfile -# Purpose: Optimized multi-stage build for Clice development container -# -# This Dockerfile implements Python-first build approach with minimal -# system dependencies, letting Python scripts handle dependency installation. +# Clice Dev Container Multi-Stage Build System # ======================================================================== # Arguments passed from docker image build system ARG COMPILER ARG PACKED_IMAGE_NAME -ARG CLICE_WORKDIR=/clice # Global config shared in multi-stage builds +ARG CLICE_DEV_CONTAINER_BASE_IMAGE=ubuntu:24.04 +ARG CLICE_WORKDIR=/clice ARG RELEASE_PACKAGE_DIR=/clice-dev-container-package -ARG PACKED_RELEASE_PACKAGE_PATH=/release-pkg.tar.xz +ARG PACKED_RELEASE_PACKAGE_PATH=/release-pkg.7z.run ARG ENVIRONMENT_CONFIG_FILE=/root/.bashrc ARG BUILD_CACHE_DIR=/var/cache/clice-dev-container +ARG SETUP_SCRIPTS_DIR=${RELEASE_PACKAGE_DIR}/setup_scripts-unknown # APT system paths configuration ARG APT_CACHE_DIR=/var/cache/apt ARG APT_STATE_CACHE_DIR=/var/lib/apt # UV docker layer cache configuration -ARG UV_PACKAGE_DIR_NAME=uv-package +ARG UV_PACKAGE_DIR_NAME=uv-packages ARG UV_TARBALL_DIR_NAME=tarball # Python build scripts communicate via these environment variables @@ -40,8 +36,8 @@ BUILD_CACHE_DIR=${BUILD_CACHE_DIR}" # ======================================================================== # ๐Ÿ Base Stage: Python Environment Foundation # ======================================================================== -FROM ubuntu:24.04 AS base-python-environment-for-build -LABEL description="Base image with consistent Python and uv environment for builder stages" +FROM ${CLICE_DEV_CONTAINER_BASE_IMAGE} AS base-python-environment-for-build +LABEL description="Base image with Python and uv environment for builder stages" ARG CLICE_WORKDIR ARG APT_CACHE_DIR @@ -60,14 +56,14 @@ ENV DEBIAN_FRONTEND=noninteractive COPY docker/linux/utility/pyproject.toml ${CLICE_WORKDIR}/docker/linux/utility/pyproject.toml COPY config/default-toolchain-version.json ${CLICE_WORKDIR}/config/default-toolchain-version.json -# Install minimal system dependencies and uv -RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked \ - --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked \ - --mount=type=cache,target=${BUILD_CACHE_DIR},sharing=locked \ +# Install minimal system dependencies, uv, and Python +RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=python-build-env-apt \ + --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=python-build-env-apt-state \ + --mount=type=cache,target=${BUILD_CACHE_DIR},sharing=locked,id=python-build-env-cache \ bash -eux - <<'SCRIPT' set -e - # Disable auto cleanup to keep apt cache + # Disable APT auto cleanup to keep apt cache # This option would override Binary::apt::APT::Keep-Downloaded-Packages rm -f /etc/apt/apt.conf.d/docker-clean # It is strange that apt will accept APT::Keep-Downloaded-Packages in commandline, @@ -105,7 +101,7 @@ RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked \ UV_BUILD_CACHE_TARBALL_FILE="${UV_BUILD_CACHE_TARBALL_DIR}/${UV_TARBALL_NAME}" mkdir -p "${UV_BUILD_CACHE_TARBALL_DIR}" - # Create package directory for UV + # Create package directory for UV, to be copied to release package UV_PACKAGE_ROOT="${RELEASE_PACKAGE_DIR}/uv-${UV_VERSION}" UV_PACKAGE_CACHE_DIR="${UV_PACKAGE_ROOT}/${UV_PACKAGE_DIR_NAME}" UV_PACKAGE_TARBALL_DIR="${UV_PACKAGE_ROOT}/${UV_TARBALL_DIR_NAME}" @@ -133,11 +129,6 @@ RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked \ echo "โœ… UV installed successfully:" uv --version - # Save UV version to a file for expanded-image stage to read - # This allows expanded-image to find UV package directory without globbing - echo "$UV_VERSION" > "${RELEASE_PACKAGE_DIR}/.uv-version" - echo "๐Ÿ“ Saved UV version to: ${RELEASE_PACKAGE_DIR}/.uv-version" - # Get Python version from configuration PYTHON_VERSION=$(jq -r .python ${CLICE_WORKDIR}/config/default-toolchain-version.json) echo "๐Ÿ Installing Python version: $PYTHON_VERSION" @@ -168,7 +159,7 @@ WORKDIR ${CLICE_WORKDIR} # ๐Ÿ—๏ธ Stage 1: Compiler Toolchain Builder # ======================================================================== FROM base-python-environment-for-build AS toolchain-builder -LABEL description="Builds custom compiler toolchain with static libstdc++ for glibc compatibility" +LABEL description="Builds custom compiler toolchain" ARG CLICE_WORKDIR ARG APT_CACHE_DIR @@ -201,7 +192,7 @@ SCRIPT # ๐Ÿ—๏ธ Stage 2: Dependencies Downloader (Parallel to Stage 1) # ======================================================================== FROM base-python-environment-for-build AS dependencies-downloader -LABEL description="Downloads dev-container dependencies for cache optimization" +LABEL description="Downloads dev-container dependencies" ARG CLICE_WORKDIR ARG APT_CACHE_DIR @@ -243,19 +234,22 @@ ARG CLICE_WORKDIR ARG RELEASE_PACKAGE_DIR ARG APT_CACHE_DIR ARG APT_STATE_CACHE_DIR +ARG BUILD_CACHE_DIR ARG PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES +ARG SETUP_SCRIPTS_DIR -# For execution in this layer and for package in final image +# For execution in this layer only COPY config/build_config.py ${CLICE_WORKDIR}/config/build_config.py COPY config/default-toolchain-version.json ${CLICE_WORKDIR}/config/default-toolchain-version.json +COPY docker/linux/container-entrypoint.sh ${CLICE_WORKDIR}/docker/linux/container-entrypoint.sh COPY docker/linux/utility/build_utils.py ${CLICE_WORKDIR}/docker/linux/utility/build_utils.py - -# For execution in this layer only COPY docker/linux/utility/create_release_package.py ${CLICE_WORKDIR}/docker/linux/utility/create_release_package.py -COPY docker/linux/container-entrypoint.sh ${CLICE_WORKDIR}/docker/linux/container-entrypoint.sh -# For final packaging only -COPY docker/linux/utility/local_setup.py ${CLICE_WORKDIR}/docker/linux/utility/local_setup.py +# For package +COPY config/build_config.py ${SETUP_SCRIPTS_DIR}/config/build_config.py +COPY config/default-toolchain-version.json ${SETUP_SCRIPTS_DIR}/config/default-toolchain-version.json +COPY docker/linux/utility/local_setup.py ${SETUP_SCRIPTS_DIR}/docker/linux/utility/local_setup.py +COPY docker/linux/utility/build_utils.py ${SETUP_SCRIPTS_DIR}/docker/linux/utility/build_utils.py # Copy outputs from previous stages # Merge by RELEASE_PACKAGE_DIR structure, each component has its own directory @@ -269,8 +263,9 @@ COPY --from=toolchain-builder ${RELEASE_PACKAGE_DIR} ${RELEASE_PACKAGE_DIR} COPY --from=dependencies-downloader ${RELEASE_PACKAGE_DIR} ${RELEASE_PACKAGE_DIR} # Setup Python project environment and create final release package -RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=packed-image-apt \ - --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=packed-image-apt-state \ +RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=image-packer-apt \ + --mount=type=cache,target=${APT_STATE_CACHE_DIR},sharing=locked,id=image-packer-apt-state \ + --mount=type=cache,target=${BUILD_CACHE_DIR},sharing=locked,id=dependencies-downloader-cache \ bash -eux - <<'SCRIPT' # Activate Python environment @@ -287,19 +282,29 @@ SCRIPT # ======================================================================== # ๐Ÿ—๏ธ Stage 4: Release Package # ======================================================================== -FROM ubuntu:24.04 AS packed-image +FROM ${CLICE_DEV_CONTAINER_BASE_IMAGE} AS packed-image +ARG CLICE_WORKDIR ARG PACKED_RELEASE_PACKAGE_PATH # Copy only the packed release package # All scripts, configs, and .bashrc are already inside the package COPY --from=image-packer ${PACKED_RELEASE_PACKAGE_PATH} ${PACKED_RELEASE_PACKAGE_PATH} +# Copy build scripts and Dockerfile +# These are needed for the final image +# Instead of using local build.sh and Dockerfile, we use the version packed here +# So we could make breaking changes to build scripts without breaking released images +COPY docker/linux/utility/common.sh ${CLICE_WORKDIR}/docker/linux/utility/common.sh +COPY docker/linux/build.sh ${CLICE_WORKDIR}/docker/linux/build.sh +COPY docker/linux/Dockerfile ${CLICE_WORKDIR}/docker/linux/Dockerfile + # ======================================================================== # ๐Ÿ—๏ธ Stage 5: Development Image (Expanded) # ======================================================================== FROM ${PACKED_IMAGE_NAME} AS expanded-image -LABEL description="Fully expanded development image with all tools installed" +LABEL description="Fully expanded development image" + ARG CLICE_WORKDIR ARG RELEASE_PACKAGE_DIR @@ -307,26 +312,39 @@ ARG PACKED_RELEASE_PACKAGE_PATH ARG PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES ARG UV_PACKAGE_DIR_NAME ARG UV_TARBALL_DIR_NAME +ARG SETUP_SCRIPTS_DIR ENV PATH="/root/.local/bin:${PATH}" # Expand the release image into a full development environment -# We don't mark here with --mount=type=cache because this is executed on clice developer environment +# We don't mark here with --mount=type=cache because here is executed on clice developer environment # clice developer do not have the cache from previous stages RUN bash -eux - <<'SCRIPT' - # Extract the release package first - echo "๐Ÿ“ฆ Extracting release package..." + # Extract self-extracting 7z SFX package (no p7zip-full required) + echo "๐Ÿ“ฆ Extracting self-extracting release package..." mkdir -p "${RELEASE_PACKAGE_DIR}" - tar -xJf "${PACKED_RELEASE_PACKAGE_PATH}" -C "${RELEASE_PACKAGE_DIR}" + + # Make the SFX archive executable and run it + chmod +x "${PACKED_RELEASE_PACKAGE_PATH}" + + # Run SFX to extract to RELEASE_PACKAGE_DIR + # -o: output directory + # -y: assume yes for all prompts + "${PACKED_RELEASE_PACKAGE_PATH}" -o"${RELEASE_PACKAGE_DIR}" -y + echo "โœ… Release package extracted!" # Install UV and Python from packaged files (offline installation) echo "๐Ÿ“ฆ Installing UV from package..." - UV_VERSION_FILE="${RELEASE_PACKAGE_DIR}/.uv-version" - UV_VERSION=$(cat "${UV_VERSION_FILE}") + + # Find UV version from directory name (uv-*) + UV_PACKAGE_ROOT=$(find "${RELEASE_PACKAGE_DIR}" -maxdepth 1 -type d -name "uv-*" | head -n 1) + + # Extract version from directory name (e.g., uv-0.9.2 -> 0.9.2) + UV_VERSION=$(basename "$UV_PACKAGE_ROOT" | sed 's/^uv-//') echo "๐Ÿ“‹ UV version: ${UV_VERSION}" + echo "๐Ÿ“ UV package root: ${UV_PACKAGE_ROOT}" - UV_PACKAGE_ROOT="${RELEASE_PACKAGE_DIR}/uv-${UV_VERSION}" UV_PACKAGE_CACHE_DIR="${UV_PACKAGE_ROOT}/${UV_PACKAGE_DIR_NAME}" UV_TARBALL_PATH="${UV_PACKAGE_ROOT}/${UV_TARBALL_DIR_NAME}/uv-*.tar.gz" UV_INSTALL_DIR="/root/.local/bin" @@ -336,21 +354,19 @@ RUN bash -eux - <<'SCRIPT' tar -xzf ${UV_TARBALL_PATH} -C ${UV_INSTALL_DIR} --strip-components=1 echo "โœ… UV installed successfully!" - # Install Python + # Install Python - read version from .python-version file in UV package PYTHON_VERSION_FILE="${UV_PACKAGE_ROOT}/.python-version" PYTHON_VERSION=$(cat "${PYTHON_VERSION_FILE}") echo "๐Ÿ“‹ Python version: ${PYTHON_VERSION}" echo "๐Ÿ Installing Python ${PYTHON_VERSION}..." - UV_CACHE_DIR=${UV_PACKAGE_CACHE_DIR} uv python install "${PYTHON_VERSION}" --default + UV_CACHE_DIR=${UV_PACKAGE_CACHE_DIR} uv python install "${PYTHON_VERSION}" --default --preview-features python-install-default echo "โœ… Python ${PYTHON_VERSION} installed successfully!" - # Run local setup directly from packaged scripts (no extraction needed) + # Run local setup directly from packaged scripts (no download needed) echo "๐Ÿš€ Running local setup to expand environment..." - # Get path to local_setup.py in package - CLICE_SETUP_SCRIPTS_DIR="${RELEASE_PACKAGE_DIR}/clice-setup-scripts-unknown" - LOCAL_SETUP_SCRIPT="${CLICE_SETUP_SCRIPTS_DIR}/docker/linux/utility/local_setup.py" + LOCAL_SETUP_SCRIPT="${SETUP_SCRIPTS_DIR}/docker/linux/utility/local_setup.py" # Run local setup directly from package (no venv needed, using system Python) eval ${PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES} \ diff --git a/docker/linux/build.sh b/docker/linux/build.sh index d525a87c..54098544 100644 --- a/docker/linux/build.sh +++ b/docker/linux/build.sh @@ -1,13 +1,6 @@ #!/bin/bash # ======================================================================== -# ๐Ÿš€ Clice Development Container Builder -# ======================================================================== -# File: docker/linux/build.sh -# Purpose: Build Clice development container with all tools and dependencies -# -# This script builds a unified development container containing all necessary -# components for the Clice development environment. The container is ready -# to use immediately with all tools pre-installed and configured. +# Clice Development Container Builder # ======================================================================== set -e @@ -27,10 +20,6 @@ PROJECT_ROOT="$(pwd)" trap 'cd "${ORIG_PWD}"' EXIT -# ======================================================================== -# โš™๏ธ Default Configuration -# ======================================================================== - COMPILER="${DEFAULT_COMPILER}" BUILD_STAGE="${DEFAULT_BUILD_STAGE}" CACHE_FROM="" @@ -70,33 +59,10 @@ EXAMPLES: $0 --cache-from clice-io/clice-dev:cache Use cache from existing image $0 --cache-to type=registry,ref=myregistry/myimage:cache Push cache -VERSION-AWARE BUILDING: - When building expanded-image with --version: - โ€ข First checks for existing release image: clice-io/clice:linux-clang-v1.0.0 - โ€ข If found, builds development image from that release image - โ€ข If not found, builds full multi-stage (release + development) - โ€ข Development image will be tagged as: clice-io/clice:linux-clang-v1.0.0-expanded - -BUILD MODES: - โ€ข Multi-stage build: Builds both release and development images - โ€ข Single-stage build: Builds only the specified stage - โ€ข Auto-expansion: Development image can build from existing release image - DEBUG MODE: --debug enables interactive debugging with docker buildx debug build - โ€ข Requires Docker 23.0+ with BuildKit experimental features - โ€ข Automatically sets BUILDX_EXPERIMENTAL=1 - โ€ข On build failure, you can use debug commands to inspect the build state - โ€ข Example debug commands: - - docker buildx debug ps List debug sessions - - docker buildx debug exec Execute commands in failed build - - docker buildx debug shell Open interactive shell in failed build - -The container includes: - โ€ข Custom toolchain (fully installed and ready) - โ€ข All development dependencies - โ€ข Complete development environment - โ€ข Version-aware release image support + Requires Docker 23.0+ with BuildKit experimental features + On build failure, you can use debug commands to inspect the build state EOF } @@ -184,7 +150,6 @@ if [ "$REBUILD" = "true" ]; then fi fi -# Rebuild BUILD_ARGS with correct release base image BUILD_ARGS=( "--progress=plain" "--target" @@ -201,7 +166,6 @@ BUILD_ARGS=( "BUILDKIT_INLINE_CACHE=1" ) -# Add cache configuration if [ -n "$CACHE_FROM" ]; then BUILD_ARGS+=("--cache-from=${CACHE_FROM}") fi @@ -211,10 +175,9 @@ if [ -n "$CACHE_TO" ]; then echo "๐Ÿ“ Starting build with cache-to logging enabled..." fi -# Add final arguments to complete the build command BUILD_ARGS+=("-t" "${TARGET_IMAGE_NAME}" "-f" "${DOCKERFILE_PATH}" ".") -# Execute build with or without debug mode +# Execute with or without debug mode if [ "$DEBUG" = "true" ]; then # Enable BuildKit experimental features for debug mode echo "๐Ÿ› Debug mode enabled (BUILDX_EXPERIMENTAL=1)" @@ -230,7 +193,6 @@ ${BUILD_COMMAND} "${BUILD_ARGS[@]}" BUILD_SUCCESS=$? -# Log cache operations if cache-to was used if [ -n "$CACHE_TO" ]; then echo "๐Ÿ’พ Cache operations completed. Cache pushed to: ${CACHE_TO}" fi @@ -260,7 +222,6 @@ if [ $BUILD_SUCCESS -eq 0 ]; then echo "๐Ÿš€ NEXT STEPS:" echo " โ€ข Run container: ./docker/linux/run.sh --compiler ${COMPILER}" echo " โ€ข Use container: docker run --rm -it ${TARGET_IMAGE_NAME} /bin/bash" - echo " โ€ข Development environment is ready to use immediately" fi echo "=========================================================================" @@ -269,11 +230,6 @@ else echo "โŒ BUILD FAILED!" echo "=========================================================================" echo "๐Ÿ” Check the build output above for error details" - echo "๐Ÿ’ก Common issues:" - echo " โ€ข Network connectivity problems" - echo " โ€ข Insufficient disk space" - echo " โ€ข Docker daemon not running" - echo " โ€ข Invalid build arguments" echo "=========================================================================" exit 1 fi diff --git a/docker/linux/container-entrypoint.sh b/docker/linux/container-entrypoint.sh index c30c347f..0dfd7dfe 100644 --- a/docker/linux/container-entrypoint.sh +++ b/docker/linux/container-entrypoint.sh @@ -13,14 +13,9 @@ # Only run in interactive shells to avoid breaking non-interactive scripts if [[ $- == *i* ]]; then - # Check if .venv exists, if not, run uv sync - - # Read UV version to set cache directory - UV_VERSION_FILE="${RELEASE_PACKAGE_DIR}/.uv-version" - UV_VERSION=$(cat "${UV_VERSION_FILE}") - UV_PACKAGE_ROOT="${RELEASE_PACKAGE_DIR}/uv-${UV_VERSION}" + UV_PACKAGE_ROOT=$(find "${RELEASE_PACKAGE_DIR}" -maxdepth 1 -type d -name "uv-*" | head -n 1) UV_PACKAGE_CACHE_DIR="${UV_PACKAGE_ROOT}/${UV_PACKAGE_DIR_NAME}" - + echo "๐Ÿ“ฆ Running uv sync..." if UV_CACHE_DIR="${UV_PACKAGE_CACHE_DIR}" uv sync --project "${CLICE_WORKDIR}/pyproject.toml"; then @@ -34,3 +29,5 @@ if [[ $- == *i* ]]; then source "${CLICE_WORKDIR}/.venv/bin/activate" fi fi + +alias ll='ls -alF --color=auto' diff --git a/docker/linux/run.sh b/docker/linux/run.sh index 53213146..a1fb3ca3 100644 --- a/docker/linux/run.sh +++ b/docker/linux/run.sh @@ -1,16 +1,6 @@ #!/bin/bash # ======================================================================== -# ๐Ÿš€ Clice Development Container Runner -# ======================================================================== -# File: docker/linux/run.sh -# Purpose: Run and manage Clice development containers -# -# This script handles the complete container lifecycle including: -# โ€ข Automatic image building/pulling if needed -# โ€ข Container creation and management -# โ€ข Development environment initialization -# -# Note: Auto-expansion logic (Release โ†’ Development) is handled in build.sh +# Clice Development Container Runner # ======================================================================== set -e @@ -30,10 +20,6 @@ PROJECT_ROOT="$(pwd)" trap 'cd "${ORIG_PWD}"' EXIT -# ======================================================================== -# โš™๏ธ Default Configuration -# ======================================================================== - COMPILER="${DEFAULT_COMPILER}" RESET="false" UPDATE="false" @@ -64,11 +50,6 @@ EXAMPLES: $0 --reset Remove container and recreate $0 --update Pull latest image and update $0 bash Run specific command in container - -CONTAINER LIFECYCLE: - 1. Check/build development image (build.sh handles auto-expansion) - 2. Create/start container from development image - 3. Attach to development shell EOF } @@ -149,12 +130,11 @@ elif ! docker image inspect "${PACKED_IMAGE_NAME}" >/dev/null 2>&1; then UPDATE_REASON="๐Ÿ”„ Packed image ${PACKED_IMAGE_NAME} not found locally, pulling..." fi -# Handle image update if needed if [ -n "$UPDATE_REASON" ]; then echo "${UPDATE_REASON}" - # Try to remove existing expanded image before pulling + # Remove existing expanded image before pulling (avoid conflicts) if docker image inspect "${EXPANDED_IMAGE_NAME}" >/dev/null 2>&1; then echo "๐Ÿงน Cleaning existing expanded image: ${EXPANDED_IMAGE_NAME}..." if ! docker rmi "${EXPANDED_IMAGE_NAME}" >/dev/null 2>&1; then @@ -194,12 +174,7 @@ if ! docker image inspect "${EXPANDED_IMAGE_NAME}" >/dev/null 2>&1; then echo "=========================================================================" # Run packed image container and execute its internal build.sh for expansion - # Why use container's build.sh instead of local: - # 1. Container's build.sh is the same version as the packed image - # 2. Container has all the correct tools and environment - # 3. Local build.sh might be from a different branch/version - # 4. Ensures consistent expansion regardless of host environment - # + # To keep the expansion process consistent and reliable, we use the build.sh script from the container itself. # Mounts: # โ€ข /var/run/docker.sock - Allow container to build images on host Docker daemon if docker run --rm \ @@ -216,10 +191,6 @@ if ! docker image inspect "${EXPANDED_IMAGE_NAME}" >/dev/null 2>&1; then echo "=========================================================================" echo "โŒ EXPANSION FAILED" echo "=========================================================================" - echo "๐Ÿ’ก Troubleshooting tips:" - echo " โ€ข Check packed image is valid: docker run --rm ${PACKED_IMAGE_NAME} ls -la" - echo " โ€ข Review expansion logs above for specific error" - echo "=========================================================================" exit 1 fi else @@ -230,7 +201,7 @@ fi if docker ps -a --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then echo "๐Ÿ” Found existing container: ${CONTAINER_NAME}" - # Get image IDs for comparison (more reliable than names) + # Check if container uses current development image (compare image IDs) CONTAINER_IMAGE_ID=$(docker inspect --format='{{.Image}}' "${CONTAINER_NAME}" 2>/dev/null || echo "") EXPECTED_IMAGE_ID=$(docker inspect --format='{{.Id}}' "${EXPANDED_IMAGE_NAME}" 2>/dev/null || echo "") @@ -240,9 +211,9 @@ if docker ps -a --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then echo "๐Ÿš€ Starting and attaching to container..." else CONTAINER_IMAGE_NAME=$(docker inspect --format='{{.Config.Image}}' "${CONTAINER_NAME}" 2>/dev/null || echo "unknown") - echo "โš ๏ธ WARNING: Container image mismatch detected!" + echo "โš ๏ธ WARNING: Container image mismatch!" echo " ๐Ÿ“ฆ Container using: ${CONTAINER_IMAGE_NAME} (ID: ${CONTAINER_IMAGE_ID})" - echo " ๐ŸŽฏ Expected image: ${EXPANDED_IMAGE_NAME} (ID: ${EXPECTED_IMAGE_ID})" + echo " ๐ŸŽฏ Expected: ${EXPANDED_IMAGE_NAME} (ID: ${EXPECTED_IMAGE_ID})" echo "" echo "๐Ÿ’ก Your container is using a different image version." echo "๐Ÿ›ก๏ธ To ensure data safety, please:" @@ -263,7 +234,7 @@ if docker ps -a --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then exit 0 fi -# Create new container from development image +# Create new container DOCKER_RUN_ARGS=(-it -w "${CONTAINER_WORKDIR}") DOCKER_RUN_ARGS+=(--name "${CONTAINER_NAME}") DOCKER_RUN_ARGS+=(--mount "type=bind,src=${PROJECT_ROOT},target=${CONTAINER_WORKDIR}") diff --git a/docker/linux/utility/build_clice_compiler_toolchain.py b/docker/linux/utility/build_clice_compiler_toolchain.py index e3323473..73eb55ab 100644 --- a/docker/linux/utility/build_clice_compiler_toolchain.py +++ b/docker/linux/utility/build_clice_compiler_toolchain.py @@ -1,130 +1,48 @@ -# ======================================================================== -# ๐Ÿš€ Clice Compiler Toolchain Builder -# ======================================================================== -# File: docker/linux/utility/build_clice_compiler_toolchain.py -# Purpose: Automated toolchain construction orchestrator -# -# This module implements a high-performance parallel build system for -# constructing the complete Clice compiler toolchain from source. -# -# Components Built: -# โ€ข glibc (GNU C Library) -# โ€ข GCC libstdc++ (C++ Standard Library) -# โ€ข Linux Kernel Headers -# โ€ข LLVM Project (prepared for future builds) -# -# Features: -# โ€ข Parallel task execution with dependency resolution -# โ€ข Robust error handling and recovery -# โ€ข GPG signature verification -# โ€ข Automated path fixing for relocatable builds -# ======================================================================== - +#!/usr/bin/env python3 """ -๐Ÿ—๏ธ Clice Compiler Toolchain Builder - -A sophisticated build orchestrator that constructs a complete compiler toolchain -from source components using parallel execution and dependency management. - -This system builds the fundamental components required for the Clice development -environment, including system libraries, C++ standard library, and kernel headers. -All components are built with careful attention to compatibility and performance. - -The build process is organized into clearly defined stages: -1. ๐Ÿ“ฆ Setup - Install prerequisites and prepare environment -2. โฌ‡๏ธ Download - Fetch source archives with verification -3. ๐Ÿ“‚ Extract - Unpack source code to build directories -4. ๐Ÿ”จ Build - Compile and install components with proper configuration -5. ๐Ÿ”ง Post-process - Fix paths and finalize installation - -Each stage is executed in parallel where possible, with automatic dependency -resolution ensuring correct build order. +Builds custom compiler toolchain (glibc, libstdc++, Linux headers) from source +using parallel execution with dependency management. """ -import sys import os +import sys -# ======================================================================== -# ๐Ÿ”ง Project Path Configuration -# ======================================================================== -# Dynamic project root discovery - enables importing from parent directories -# This allows the utility scripts to access shared configuration modules +# Ensure utility directory is in Python path for imports project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) if project_root not in sys.path: sys.path.insert(0, project_root) -# ======================================================================== -# ๐Ÿ“š Standard Library Imports -# ======================================================================== -from typing import Dict, Set # Type hints for better code clarity +from typing import Dict, Set -# ======================================================================== -# ๐Ÿ› ๏ธ Build System Components -# ======================================================================== from build_utils import ( - Job, # Individual build task representation - ParallelTaskScheduler, # High-performance parallel execution engine - run_command, # Shell command execution with environment control - install_download_prerequisites, # Download prerequisite installation - install_extract_prerequisites, # Extract prerequisite installation - download_and_verify, # Component source download and verification - extract_source, # Component source extraction + Job, + ParallelTaskScheduler, + run_command, + install_download_prerequisites, + install_extract_prerequisites, + download_and_verify, + extract_source, ) -# ======================================================================== -# โš™๏ธ Configuration Constants -# ======================================================================== from config.build_config import ( - TOOLCHAIN_BUILD_ENV_VARS, # Build environment variables - # Import component instances for structured access + TOOLCHAIN_BUILD_ENV_VARS, TOOLCHAIN, - Component, - ToolchainSubComponent + ToolchainComponent, + GccSubComponent, + LinuxSubComponent, + GlibcSubComponent ) -# ======================================================================== -# ๐ŸŽฏ Build Task Implementations -# ======================================================================== -# Each function represents a discrete build task that can be executed -# independently once its dependencies are satisfied. The parallel scheduler -# coordinates execution order based on the dependency graph. -# ======================================================================== - # ======================================================================== # ๐Ÿ“ฆ Environment Setup Tasks # ======================================================================== -def update_apt(): - """ - ๐Ÿ”„ Update APT Package Database - - Refreshes the APT package manager's local database to ensure we have - access to the latest package versions and security updates. - - This is the foundation step that must complete before any package - installation can proceed safely. - """ +def update_apt() -> None: print("๐Ÿ”„ [SETUP] Refreshing APT package database...") run_command("apt update -o DPkg::Lock::Timeout=-1") -def install_build_prerequisites(component): - """ - ๐Ÿ”จ Install Build Stage Prerequisites - - Installs the build prerequisites for all sub-components of the given component. - This collects and deduplicates all build_prerequisites from sub-components, - then installs them in a single batch operation. - - For toolchain component, this includes: - โ€ข Core build tools (make, binutils, rsync) - โ€ข Text processing tools (gawk, bison) for glibc - โ€ข GCC 9 toolchain for glibc compilation - โ€ข GCC 14 toolchain for libstdc++ compilation - - Args: - component: The parent component (e.g., TOOLCHAIN) whose sub-components' - build prerequisites should be installed - +def install_build_prerequisites(component: ToolchainComponent) -> None: + """ Note: We maintain multiple GCC versions because glibc requires GCC < 10 to avoid linker symbol conflicts, while modern libstdc++ benefits from the latest compiler features. @@ -152,11 +70,8 @@ def install_build_prerequisites(component): # ======================================================================== # ๐Ÿ“š GNU C Library (glibc) Tasks # ======================================================================== -# glibc is the core system library providing POSIX API implementation. -# It requires special handling due to its fundamental role in the system. -# ======================================================================== -def fix_glibc_paths(): +def fix_glibc_paths() -> None: """ ๐Ÿ”ง Fix Hardcoded Build Paths in glibc Installation @@ -166,13 +81,6 @@ def fix_glibc_paths(): This function scans all installed files and removes build-specific paths, making the toolchain portable across different installation directories. - - Process: - 1. Walk through all installed files - 2. Identify text files (skip binaries) - 3. Search for hardcoded paths - 4. Remove absolute path references - 5. Preserve relative path structure """ search_path = TOOLCHAIN.sysroot_dir print(f"๐Ÿ”ง [POST-PROCESS] Sanitizing hardcoded paths in {search_path}...") @@ -210,25 +118,14 @@ def fix_glibc_paths(): print(f"โœ… [POST-PROCESS] Path fixing complete ({files_processed} files processed)") - -def build_and_install_glibc(glibc_component: ToolchainSubComponent, linux_component: ToolchainSubComponent): - """ - ๐Ÿ—๏ธ Build and Install GNU C Library (glibc) - - Configures, compiles, and installs glibc - the foundational C library - that provides POSIX API implementation and system call interface. - +def build_and_install_glibc(glibc_component: GlibcSubComponent, linux_component: LinuxSubComponent) -> None: + """ Build Configuration: - โ€ข Uses GCC 9 (required: GCC < 10 to avoid symbol conflicts) - โ€ข Targets x86_64 architecture with 64-bit support + โ€ข Uses GCC 9 (required: GCC < 10 to avoid symbol conflicts link error) โ€ข Disables compiler warnings as errors (--disable-werror) โ€ข Enables 64-bit libraries, disables 32-bit compatibility - - Post-installation includes path sanitization to ensure relocatable builds. - - Note: glibc is built out-of-tree in a separate build directory to - maintain clean separation between source and build artifacts. """ + print(f"๐Ÿ—๏ธ [BUILD] Starting {glibc_component.name} compilation...") print(f" ๐Ÿ“‹ Using GCC 9 (required for glibc compatibility)") print(f" ๐ŸŽฏ Target: {TOOLCHAIN.host_triplet} ({TOOLCHAIN.host_machine})") @@ -265,29 +162,8 @@ def build_and_install_glibc(glibc_component: ToolchainSubComponent, linux_compon # ======================================================================== # ๐Ÿง Linux Kernel Headers Installation # ======================================================================== -# Kernel headers provide system call definitions and kernel API interfaces -# required for userspace programs to interact with the Linux kernel. -# ======================================================================== -def install_linux_headers(component): - """ - ๐Ÿง Install Linux Kernel Headers - - Extracts and installs sanitized Linux kernel headers that provide - system call definitions and kernel API interfaces for userspace programs. - - The kernel headers are essential for: - โ€ข System call interface definitions - โ€ข Kernel data structure layouts - โ€ข Device driver interfaces - โ€ข Architecture-specific constants - - Installation Process: - 1. Use kernel's built-in header installation system - 2. Filter out kernel-internal definitions - 3. Install sanitized headers to sysroot/usr - 4. Ensure compatibility with userspace programs - """ +def install_linux_headers(component: LinuxSubComponent) -> None: install_path = os.path.join(TOOLCHAIN.sysroot_dir, "usr") print(f"๐Ÿง [INSTALL] Installing Linux kernel headers...") print(f" ๐Ÿ—๏ธ Architecture: {TOOLCHAIN.host_machine}") @@ -315,52 +191,15 @@ def install_linux_headers(component): # ======================================================================== # ๐Ÿ› ๏ธ GCC Compiler Collection Tasks # ======================================================================== -# GCC provides the C++ standard library (libstdc++) and essential runtime -# libraries. We build only the target libraries, not the full compiler. -# ======================================================================== -def download_gcc_prerequisites(component): - """ - ๐Ÿ“ฆ Download GCC Mathematical Prerequisites - - Downloads and sets up the mathematical libraries required for GCC: - โ€ข GMP (GNU Multiple Precision Arithmetic Library) - โ€ข MPFR (Multiple Precision Floating-Point Reliable Library) - โ€ข MPC (Multiple Precision Complex Library) - - These libraries are essential for GCC's internal computations and - optimizations. The GCC source tree includes a convenience script - that automatically downloads the correct versions. - """ +def download_gcc_prerequisites(component: GccSubComponent) -> None: print(f"๐Ÿ“ฆ [DOWNLOAD] Fetching {component.name} mathematical prerequisites...") print(f" ๐Ÿ“‹ Components: GMP, MPFR, MPC") run_command("./contrib/download_prerequisites", cwd=component.src_dir) print(f"โœ… [DOWNLOAD] GCC prerequisites ready") -def build_and_install_libstdcpp(component): - """ - ๐Ÿ—๏ธ Build and Install C++ Standard Library (libstdc++) - - Builds the C++ standard library and essential runtime libraries from GCC. - We configure GCC but only build the target libraries we need, avoiding - the full compiler build which would be unnecessary and time-consuming. - - Target Libraries Built: - โ€ข libgcc - Low-level runtime support (exception handling, etc.) - โ€ข libstdc++-v3 - Complete C++ standard library - โ€ข libsanitizer - Address/memory/thread sanitizer support - โ€ข libatomic - Atomic operations for lock-free programming - โ€ข libbacktrace - Stack backtrace support for debugging - โ€ข libgomp - OpenMP parallel programming runtime - โ€ข libquadmath - Quadruple precision floating-point math - - Configuration highlights: - โ€ข Uses modern GCC 14 for latest C++ features - โ€ข Links against our custom glibc build - โ€ข Enables LTO for better optimization - โ€ข Static linking for portable distribution - """ - print(f"๐Ÿ—๏ธ [BUILD] Starting {component.name} C++ standard library build...") +def build_and_install_libstdcpp(component: GccSubComponent) -> None: + print(f"๐Ÿ”ง [BUILD] Starting {component.name} C++ standard library build...") print(f" ๐Ÿ“‹ Using GCC 14 (modern C++ support)") print(f" ๐ŸŽฏ Target libraries: {', '.join(component.target_libs)}") print(f" ๐Ÿ”— Linking with glibc v{TOOLCHAIN.glibc.version}") @@ -412,30 +251,7 @@ def build_and_install_libstdcpp(component): # ๐ŸŽญ Main Build Orchestrator # ======================================================================== -def main(): - """ - ๐Ÿš€ Main Toolchain Build Orchestrator - - Coordinates the entire toolchain build process using a sophisticated - parallel task scheduler with dependency resolution. The build is organized - as a directed acyclic graph (DAG) where each node represents a build task - and edges represent dependencies. - - Build Phases: - 1. ๐Ÿ”„ Setup - System preparation and prerequisite installation - 2. โฌ‡๏ธ Download - Source code fetching with verification - 3. ๐Ÿ“‚ Extract - Archive extraction and preparation - 4. ๐Ÿ—๏ธ Build - Compilation and installation - - The scheduler automatically determines the optimal execution order and - runs independent tasks in parallel to minimize total build time. - - Dependency Graph Structure: - โ€ข Setup tasks run first and can execute in parallel - โ€ข Download tasks depend on download prerequisites - โ€ข Extract tasks depend on both download completion and extract tools - โ€ข Build tasks have complex interdependencies (glibc before libstdc++) - """ +def main() -> None: print("๐Ÿš€ ========================================================================") print("๐Ÿš€ CLICE COMPILER TOOLCHAIN BUILD SYSTEM") print("๐Ÿš€ ========================================================================") @@ -443,13 +259,6 @@ def main(): print(f"๐ŸŽฏ Target Architecture: {TOOLCHAIN.target_triplet} ({TOOLCHAIN.target_machine})") print(f"๐Ÿ“‹ Components: glibc, Linux headers, libstdc++, LLVM (prepared)") print("๐Ÿš€ ========================================================================\n") - - # ==================================================================== - # ๐Ÿ“‹ Build Task Registry - # ==================================================================== - # Each job represents an atomic build operation that can be executed - # independently once its dependencies are satisfied. - # ==================================================================== all_jobs: Dict[str, Job] = { # ๐Ÿ“ฆ System Setup Tasks @@ -478,17 +287,9 @@ def main(): "download_llvm": Job("download_llvm", download_and_verify, (TOOLCHAIN.llvm,)), "extract_llvm": Job("extract_llvm", extract_source, (TOOLCHAIN.llvm,)), } - - # ==================================================================== - # ๐Ÿ”— Dependency Graph Definition - # ==================================================================== - # Defines the build order constraints. Each task lists its prerequisites - # that must complete before it can begin execution. - # ==================================================================== dependency_graph: Dict[str, Set[str]] = { - # ๐Ÿ“ฆ Setup Phase - Foundation tasks - "update_apt": set(), # No dependencies - can start immediately + "update_apt": set(), "install_download_prerequisites": {"update_apt"}, "install_extract_prerequisites": {"update_apt"}, "install_build_prerequisites": {"update_apt"}, @@ -522,10 +323,7 @@ def main(): "download_llvm": {"install_download_prerequisites"}, "extract_llvm": {"download_llvm", "install_extract_prerequisites"} } - - # ==================================================================== - # ๐Ÿš€ Launch Parallel Build System - # ==================================================================== + print(f"๐Ÿ“Š Initializing parallel scheduler with {len(all_jobs)} tasks...") print(f"๐Ÿ”— Total dependencies: {sum(len(deps) for deps in dependency_graph.values())}") print(f"โšก Maximum parallelism: {len([job for job, deps in dependency_graph.items() if not deps])} initial tasks\n") diff --git a/docker/linux/utility/build_utils.py b/docker/linux/utility/build_utils.py index 8f61079d..10bf9587 100644 --- a/docker/linux/utility/build_utils.py +++ b/docker/linux/utility/build_utils.py @@ -3,7 +3,7 @@ import os import tarfile -from config.build_config import Component +from config.build_config import Component, ToolchainSubComponent # Add project root to the Python path to allow importing 'config' module project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) @@ -14,25 +14,20 @@ import hashlib import concurrent.futures import time -from typing import Dict, Set, Tuple, Optional, List, Callable +from typing import Dict, Set, Tuple, Optional, List, Callable, Union from graphlib import TopologicalSorter from collections import defaultdict from enum import Enum def download_file(url: str, dest: str) -> None: - """ - Downloads a file from a URL to a destination using aria2c for acceleration. - Skips the download if the destination file already exists. - Disables SSL verification to work behind corporate proxies. - """ if os.path.exists(dest): - print(f"File {os.path.basename(dest)} already exists. Skipping download.") + print(f"File {os.path.basename(dest)} already exists. Skipping download.", flush=True) return dest_dir = os.path.dirname(dest) dest_name = os.path.basename(dest) - print(f"Downloading {url} to {dest} (SSL verification disabled)...") + print(f"Downloading {url} to {dest} (SSL verification disabled)...", flush=True) command = [ "aria2c", @@ -40,6 +35,7 @@ def download_file(url: str, dest: str) -> None: "--split=8", "--max-connection-per-server=8", "--min-split-size=1M", + "--file-allocation=falloc", # Preallocate file space "--check-certificate=false", # Corresponds to verify=False f'--dir="{dest_dir}"', f'--out="{dest_name}"', @@ -47,17 +43,10 @@ def download_file(url: str, dest: str) -> None: ] run_command(" ".join(command)) - print("Download complete.") + print("Download complete.", flush=True) def run_command(command: str, cwd: str = os.getcwd(), env: Dict[str, str] = {}) -> None: - """ - Executes a shell command, directing its output to the current shell. - Sets DEBIAN_FRONTEND to noninteractive to prevent interactive prompts. - - Output is streamed in real-time to stdout/stderr for better visibility - in both direct execution and parallel task scenarios. - """ - print(f"--- Running command: {{{command}}} in {cwd or os.getcwd()} ---") + print(f"--- Running command: {{{command}}} in {cwd or os.getcwd()} ---", flush=True) # Setup environment process_env = os.environ.copy() @@ -84,29 +73,21 @@ def run_command(command: str, cwd: str = os.getcwd(), env: Dict[str, str] = {}) raise subprocess.CalledProcessError(process.returncode, command) def verify_signature(signature_path: str, data_path: str) -> None: - """ - Simplified signature verification: only checks if files exist. - Skips actual signature verification when GPG environment is not available. - - Args: - signature_path: Path to the .asc signature file. - data_path: Path to the signed data file. - """ - print(f"--- Skipping signature verification for {os.path.basename(data_path)} (GPG not available) ---") + """Check if data file and signature file exist (actual GPG verification skipped).""" + print(f"--- Skipping signature verification for {os.path.basename(data_path)} (GPG not available) ---", flush=True) if not os.path.exists(data_path): raise RuntimeError(f"Data file {data_path} does not exist") if os.path.exists(signature_path): - print(f"Signature file found: {os.path.basename(signature_path)}") + print(f"Signature file found: {os.path.basename(signature_path)}", flush=True) else: - print(f"No signature file found: {os.path.basename(signature_path)}") + print(f"No signature file found: {os.path.basename(signature_path)}", flush=True) - print(f"File verification completed for {os.path.basename(data_path)}") + print(f"File verification completed for {os.path.basename(data_path)}", flush=True) def verify_sha256(file_path: str, expected_hash: str) -> bool: - """Verifies the SHA256 checksum of a file.""" - print(f"Verifying SHA256 for {file_path}...") + print(f"Verifying SHA256 for {file_path}...", flush=True) sha256 = hashlib.sha256() with open(file_path, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): @@ -114,10 +95,10 @@ def verify_sha256(file_path: str, expected_hash: str) -> bool: actual_hash = sha256.hexdigest() if actual_hash.lower() == expected_hash.lower(): - print("SHA256 verification successful.") + print("SHA256 verification successful.", flush=True) return True else: - print(f"SHA256 verification failed! Expected {expected_hash}, got {actual_hash}") + print(f"SHA256 verification failed! Expected {expected_hash}, got {actual_hash}", flush=True) return False @@ -130,7 +111,7 @@ def __init__(self, name: str, func: Callable, args: Tuple = ()): self.func = func self.args = args - def __repr__(self): + def __repr__(self) -> str: return f"Job(name='{self.name}')" @@ -143,11 +124,11 @@ class TaskState(Enum): FAILED = "failed" -def run_job(job: Job): +def run_job(job: Job) -> str: """Executor function to run a job.""" - print(f"--- Starting Job: {job.name} ---") + print(f"--- Starting Job: {job.name} ---", flush=True) job.func(*job.args) - print(f"--- Finished Job: {job.name} ---") + print(f"--- Finished Job: {job.name} ---", flush=True) return job.name @@ -194,18 +175,18 @@ def _get_ready_jobs(self) -> List[str]: ready_jobs.append(job_name) return ready_jobs - def _submit_job(self, executor, job_name: str): + def _submit_job(self, executor: concurrent.futures.Executor, job_name: str) -> concurrent.futures.Future: """Submit a job for execution.""" job = self.jobs[job_name] self.task_states[job_name] = TaskState.RUNNING self.job_start_times[job_name] = time.time() - print(f"๐Ÿš€ [Scheduler] Starting job: {job_name}") + print(f"๐Ÿš€ [Scheduler] Starting job: {job_name}", flush=True) future = executor.submit(run_job, job) self.running_futures[future] = job_name return future - def _handle_completed_job(self, job_name: str, success: bool = True): + def _handle_completed_job(self, job_name: str, success: bool = True) -> None: """Handle job completion and update states.""" duration = time.time() - self.job_start_times[job_name] self.job_durations[job_name] = duration @@ -214,13 +195,13 @@ def _handle_completed_job(self, job_name: str, success: bool = True): self.task_states[job_name] = TaskState.COMPLETED self.completed_jobs.add(job_name) self.sorter.done(job_name) - print(f"โœ… [Scheduler] Job '{job_name}' completed successfully in {duration:.2f}s") + print(f"โœ… [Scheduler] Job '{job_name}' completed successfully in {duration:.2f}s", flush=True) else: self.task_states[job_name] = TaskState.FAILED self.failed_jobs.add(job_name) - print(f"โŒ [Scheduler] Job '{job_name}' failed after {duration:.2f}s") + print(f"โŒ [Scheduler] Job '{job_name}' failed after {duration:.2f}s", flush=True) - def _print_progress(self): + def _print_progress(self) -> None: """Print current execution progress.""" total = len(self.jobs) completed = len(self.completed_jobs) @@ -230,31 +211,31 @@ def _print_progress(self): elapsed = time.time() - self.start_time if self.start_time else 0 - print(f"\n๐Ÿ“Š [Progress] Total: {total} | โœ… Done: {completed} | ๐Ÿƒ Running: {running} | โณ Pending: {pending} | โŒ Failed: {failed}") - print(f"โฑ๏ธ [Time] Elapsed: {elapsed:.1f}s | Running jobs: {list(self.running_futures.values())}") + print(f"\n๐Ÿ“Š [Progress] Total: {total} | โœ… Done: {completed} | ๐Ÿƒ Running: {running} | โณ Pending: {pending} | โŒ Failed: {failed}", flush=True) + print(f"โฑ๏ธ [Time] Elapsed: {elapsed:.1f}s | Running jobs: {list(self.running_futures.values())}", flush=True) if completed > 0 and elapsed > 0: rate = completed / elapsed eta = (total - completed) / rate if rate > 0 else 0 - print(f"๐Ÿ“ˆ [Stats] Rate: {rate:.2f} jobs/s | ETA: {eta:.1f}s") + print(f"๐Ÿ“ˆ [Stats] Rate: {rate:.2f} jobs/s | ETA: {eta:.1f}s", flush=True) - def run(self, max_workers: Optional[int] = None): + def run(self, max_workers: Optional[int] = None) -> None: """ Execute all jobs with optimal parallel scheduling. Args: max_workers: Maximum number of parallel workers (default: CPU count) """ - print("๐ŸŽฏ [Scheduler] Initializing High-Performance Parallel Task Scheduler") - print(f"๐Ÿ“‹ [Scheduler] Total jobs: {len(self.jobs)}") - print(f"๐Ÿ”— [Scheduler] Total dependencies: {sum(len(deps) for deps in self.dependencies.values())}") + print("๐ŸŽฏ [Scheduler] Initializing High-Performance Parallel Task Scheduler", flush=True) + print(f"๐Ÿ“‹ [Scheduler] Total jobs: {len(self.jobs)}", flush=True) + print(f"๐Ÿ”— [Scheduler] Total dependencies: {sum(len(deps) for deps in self.dependencies.values())}", flush=True) self.start_time = time.time() with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor: # Submit initial ready jobs ready_jobs = self._get_ready_jobs() - print(f"๐Ÿšฆ [Scheduler] Initial ready jobs: {ready_jobs}") + print(f"๐Ÿšฆ [Scheduler] Initial ready jobs: {ready_jobs}", flush=True) for job_name in ready_jobs: self._submit_job(executor, job_name) @@ -279,16 +260,16 @@ def run(self, max_workers: Optional[int] = None): self._handle_completed_job(job_name, success=True) newly_completed.append(job_name) except Exception as e: - print(f"๐Ÿ’ฅ [Scheduler] Job '{job_name}' failed with detailed error:") + print(f"๐Ÿ’ฅ [Scheduler] Job '{job_name}' failed with detailed error:", flush=True) self._handle_completed_job(job_name, success=False) # Implement fail-fast: cancel all running jobs and exit immediately - print(f"๐Ÿ›‘ [Scheduler] FAIL-FAST: Cancelling all remaining jobs due to failure in '{job_name}'") + print(f"๐Ÿ›‘ [Scheduler] FAIL-FAST: Cancelling all remaining jobs due to failure in '{job_name}'", flush=True) for remaining_future in self.running_futures.keys(): if remaining_future != future: remaining_future.cancel() remaining_job = self.running_futures[remaining_future] - print(f"โŒ [Scheduler] Cancelled job: {remaining_job}") + print(f"โŒ [Scheduler] Cancelled job: {remaining_job}", flush=True) # Clean up and raise the error immediately raise RuntimeError(f"โŒ Build failed in job '{job_name}': {str(e)}") from e @@ -310,104 +291,64 @@ def run(self, max_workers: Optional[int] = None): # Note: With fail-fast implementation, we won't reach here if any job failed # The exception will be raised immediately when the first job fails - def _print_final_report(self, total_time: float): + def _print_final_report(self, total_time: float) -> None: """Print comprehensive execution report.""" - print("\n" + "="*60) - print("๐ŸŽ‰ PARALLEL TASK EXECUTION COMPLETED!") - print("="*60) + print("\n" + "="*60, flush=True) + print("๐ŸŽ‰ PARALLEL TASK EXECUTION COMPLETED!", flush=True) + print("="*60, flush=True) - print(f"โฑ๏ธ Total execution time: {total_time:.2f}s") - print(f"โœ… Successfully completed: {len(self.completed_jobs)}/{len(self.jobs)} jobs") + print(f"โฑ๏ธ Total execution time: {total_time:.2f}s", flush=True) + print(f"โœ… Successfully completed: {len(self.completed_jobs)}/{len(self.jobs)} jobs", flush=True) if self.failed_jobs: - print(f"โŒ Failed jobs: {len(self.failed_jobs)}") + print(f"โŒ Failed jobs: {len(self.failed_jobs)}", flush=True) for job in self.failed_jobs: - print(f" - {job}") + print(f" - {job}", flush=True) # Show job timing analysis if self.job_durations: - print(f"\n๐Ÿ“Š Job Performance Analysis:") + print(f"\n๐Ÿ“Š Job Performance Analysis:", flush=True) sorted_jobs = sorted(self.job_durations.items(), key=lambda x: x[1], reverse=True) - print(f" Slowest jobs:") + print(f" Slowest jobs:", flush=True) for job, duration in sorted_jobs[:5]: - print(f" - {job:<30} {duration:>8.2f}s") + print(f" - {job:<30} {duration:>8.2f}s", flush=True) avg_duration = sum(self.job_durations.values()) / len(self.job_durations) - print(f" Average job duration: {avg_duration:.2f}s") + print(f" Average job duration: {avg_duration:.2f}s", flush=True) # Calculate theoretical sequential time vs actual parallel time sequential_time = sum(self.job_durations.values()) speedup = sequential_time / total_time if total_time > 0 else 1 efficiency = speedup / max(len(self.running_futures), 1) * 100 - print(f" Sequential time would be: {sequential_time:.2f}s") - print(f" Parallel speedup: {speedup:.2f}x") - print(f" Parallel efficiency: {efficiency:.1f}%") + print(f" Sequential time would be: {sequential_time:.2f}s", flush=True) + print(f" Parallel speedup: {speedup:.2f}x", flush=True) + print(f" Parallel efficiency: {efficiency:.1f}%", flush=True) - print("="*60) + print("="*60, flush=True) # ======================================================================== # ๐Ÿ› ๏ธ Component Build Utilities # ======================================================================== -# Generic functions for component-based building -# ======================================================================== -def install_download_prerequisites(component: Component): - """ - โฌ‡๏ธ Install Download Stage Prerequisites - - Installs essential tools required for fetching source code archives: - โ€ข aria2c - High-speed multi-connection downloader - โ€ข gnupg - GPG signature verification system - - These tools enable secure, accelerated downloading of toolchain sources. - """ - print("โฌ‡๏ธ [SETUP] Installing download prerequisites (aria2c, gnupg)...") +def install_download_prerequisites(component: Component) -> None: + print("โฌ‡๏ธ [SETUP] Installing download prerequisites (aria2c, gnupg)...", flush=True) download_prerequisites = component.download_prerequisites pkg_list = " ".join(download_prerequisites) run_command(f"apt install -y --no-install-recommends=true -o DPkg::Lock::Timeout=-1 {pkg_list}") - print("โœ… [SETUP] Download tools ready") + print("โœ… [SETUP] Download tools ready", flush=True) -def install_extract_prerequisites(component: Component): - """ - ๐Ÿ“‚ Install Archive Extraction Prerequisites - - Installs compression tools needed for extracting various archive formats: - โ€ข bzip2 - Required for GCC prerequisite archives (.tar.bz2) - - Different toolchain components use different compression formats, - so we ensure all extraction tools are available. - """ - print("๐Ÿ“‚ [SETUP] Installing archive extraction tools...") +def install_extract_prerequisites(component: Component) -> None: + print("๐Ÿ“‚ [SETUP] Installing archive extraction tools...", flush=True) extract_prerequisites = component.extract_prerequisites pkg_list = " ".join(extract_prerequisites) run_command(f"apt install -y --no-install-recommends=true -o DPkg::Lock::Timeout=-1 {pkg_list}") - print("โœ… [SETUP] Extraction tools ready") - + print("โœ… [SETUP] Extraction tools ready", flush=True) -def download_and_verify(component): - """ - โฌ‡๏ธ Download and Verify Component Source - - Downloads the source tarball for a specified toolchain component and - verifies its authenticity using GPG signatures when available. - - The process includes: - 1. Create organized download directory structure - 2. Download source archive using high-speed aria2c - 3. Download GPG signature file (if available) - 4. Verify archive integrity and authenticity - 5. Clean up on verification failure - - Args: - component: Component instance (glibc, gcc, llvm, or linux) - - Raises: - Exception: If download fails or signature verification fails - """ +def download_and_verify(component: Component) -> None: version = component.version - print(f"โฌ‡๏ธ [DOWNLOAD] Fetching {component.name} v{version}...") + print(f"โฌ‡๏ธ [DOWNLOAD] Fetching {component.name} v{version}...", flush=True) # Ensure directories exist os.makedirs(component.cache_dir, exist_ok=True) @@ -426,50 +367,29 @@ def download_and_verify(component): signature_path = os.path.join(component.cache_dir, signature_name) signature_url = component.verification_url try: - print(f"๐Ÿ” [VERIFY] Downloading signature for {component.name}...") + print(f"๐Ÿ” [VERIFY] Downloading signature for {component.name}...", flush=True) download_file(signature_url, signature_path) verify_signature(signature_path, tarball_path) - print(f"โœ… [VERIFY] {component.name} signature verified") + print(f"โœ… [VERIFY] {component.name} signature verified", flush=True) except Exception as e: - print(f"โŒ [ERROR] Signature verification failed for {component.name}: {e}", file=sys.stderr) + print(f"โŒ [ERROR] Signature verification failed for {component.name}: {e}", file=sys.stderr, flush=True) shutil.rmtree(component.cache_dir, ignore_errors=True) raise else: - print(f"โš ๏ธ [INFO] No signature verification available for {component.name}") + print(f"โš ๏ธ [INFO] No signature verification available for {component.name}", flush=True) -def extract_source(component): - """ - ๐Ÿ“‚ Extract Component Source Archive - - Extracts the downloaded source tarball to the appropriate directory - structure, automatically detecting compression format and stripping - the top-level directory. - - Supports multiple archive formats: - โ€ข .tar.xz (LZMA compression) - Used by most GNU projects - โ€ข .tar.gz (Gzip compression) - Used by Linux kernel - - The function automatically handles archives with a top-level directory: - 1. Extracts directly to target directory - 2. Detects if there's a single top-level directory wrapper - 3. Moves all contents up one level - 4. Removes the empty wrapper directory - - Args: - component: Component instance (glibc, gcc, llvm, or linux) - """ +def extract_source(component: ToolchainSubComponent) -> None: version = component.version - print(f"๐Ÿ“‚ [EXTRACT] Unpacking {component.name} v{version}...") + print(f"๐Ÿ“‚ [EXTRACT] Unpacking {component.name} v{version}...", flush=True) # Ensure extraction directory exists os.makedirs(component.src_dir, exist_ok=True) # Determine archive location and format - tarball_name = component.tarball_name_pattern.format(version=version) - tarball_path = os.path.join(component.cache_dir, tarball_name) + tarball_path = os.path.join(component.cache_dir, component.tarball_name) - print(f" ๐Ÿ“ Source: {tarball_path}") - print(f" ๐Ÿ“ Target: {component.extracted_dir}") + print(f" ๐Ÿ“ Source: {tarball_path}", flush=True) + print(f" ๐Ÿ“ Target: {component.extracted_dir}", flush=True) # Auto-detect compression format and extract directly mode = "r:xz" if tarball_path.endswith(".tar.xz") else "r:gz" @@ -483,7 +403,7 @@ def extract_source(component): # Single top-level directory found - strip it top_dir_name = extracted_items[0] top_dir_path = os.path.join(component.extracted_dir, top_dir_name) - print(f" ๐Ÿ”„ Stripping top-level directory: {top_dir_name}") + print(f" ๐Ÿ”„ Stripping top-level directory: {top_dir_name}", flush=True) # Move all contents from top_dir to parent (extracted_dir) for item in os.listdir(top_dir_path): @@ -494,4 +414,4 @@ def extract_source(component): # Remove the now-empty top-level directory os.rmdir(top_dir_path) - print(f"โœ… [EXTRACT] {component.name} extraction complete") + print(f"โœ… [EXTRACT] {component.name} extraction complete", flush=True) diff --git a/docker/linux/utility/common.sh b/docker/linux/utility/common.sh index 1244fde8..869346fd 100644 --- a/docker/linux/utility/common.sh +++ b/docker/linux/utility/common.sh @@ -1,12 +1,6 @@ #!/bin/bash # ======================================================================== -# ๐Ÿš€ Clice Development Container Common Variables -# ======================================================================== -# File: docker/linux/utility/common.sh -# Purpose: Defines shared variables and functions for build and run scripts. -# -# This script is sourced by other scripts to ensure consistency in image -# and container naming conventions. +# Clice Development Container Common Variables # ======================================================================== set -e @@ -24,7 +18,6 @@ CLICE_DIR="/clice" # pwd inside the container when you open a shell DEFAULT_CONTAINER_WORKDIR="${CLICE_DIR}" -# Dockerfile and build configuration DOCKERFILE_PATH="docker/linux/Dockerfile" # ======================================================================== diff --git a/docker/linux/utility/create_release_package.py b/docker/linux/utility/create_release_package.py index 65f4f0c0..188f3766 100644 --- a/docker/linux/utility/create_release_package.py +++ b/docker/linux/utility/create_release_package.py @@ -1,27 +1,12 @@ #!/usr/bin/env python3 """ -๐Ÿ“ฆ Clice Release Package Creator - Stage 3 - -This script handles the final packaging stage of the multi-stage Docker build. -It merges the outputs from Stage 1 (toolchain) and Stage 2 (dependencies), -creates a comprehensive manifest, and packages everything into a single -compressed archive for the release image. - -Components Merged: - โ€ข Custom compiler toolchain from Stage 1 - โ€ข Development dependencies from Stage 2 - โ€ข Combined dependency manifest - โ€ข Final compressed release package - -The script ensures all components from both stages are properly combined -and packaged for efficient Docker layer caching and distribution. +Stage 3: Create final release package by merging toolchain and dependencies, +generating manifest, and packaging into 7z SFX archive. """ import os import sys -import tarfile import json -import shutil # Add project root to Python path project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) @@ -36,30 +21,23 @@ ALL_COMPONENTS, # Component instances for structured access TOOLCHAIN, - CLICE_SETUP_SCRIPTS, BASHRC, - UV + UV, + P7ZIP, ) -# Import build utilities for parallel execution from build_utils import ( Job, - ParallelTaskScheduler + ParallelTaskScheduler, + run_command ) # ======================================================================== # ๐ŸŒ Environment Setup Functions # ======================================================================== -def setup_environment_variables_and_entrypoint(): - """ - Setup .bashrc with environment variables and container entrypoint script. - - This function creates a complete .bashrc file that: - 1. Exports environment variables from DEVELOPMENT_SHELL_VARS for persistent shell use - 2. Sets internal variables (CLICE_WORKDIR, etc.) without export for script-only use - 3. Embeds the container entrypoint script for auto Python environment setup - """ +def setup_environment_variables_and_entrypoint() -> None: + """Create .bashrc with environment variables and container entrypoint script.""" print("๐ŸŒ Setting up .bashrc with environment variables and entrypoint script...") # Read container entrypoint script from BashrcComponent @@ -111,38 +89,17 @@ def setup_environment_variables_and_entrypoint(): print(" ๐Ÿ“ Internal variables: CLICE_WORKDIR, RELEASE_PACKAGE_DIR, UV_PACKAGE_DIR_NAME") print(" ๐Ÿ“ Container entrypoint script embedded") -def copy_setup_scripts(): - """Copy setup scripts and configuration files as complete directory structure.""" - print("๐Ÿ“‹ Copying setup scripts and configuration files...") - - # Get files to copy from component definition - for src_rel in CLICE_SETUP_SCRIPTS.files_to_copy: - src = os.path.join(CLICE_WORKDIR, src_rel) - dst = os.path.join(CLICE_SETUP_SCRIPTS.package_dir, src_rel) - os.makedirs(os.path.dirname(dst), exist_ok=True) - shutil.copy2(src, dst) - print(f" โœ… Copied: {src} -> {dst}") - - print(f"โœ… Setup scripts and configs copied to {CLICE_SETUP_SCRIPTS.package_dir}") - # ======================================================================== # ๐Ÿ“‹ Manifest Creation Functions # ======================================================================== -def create_comprehensive_manifest(): - """ - ๐Ÿ“‹ Create Comprehensive Release Manifest - - Creates a detailed manifest of all components based on ALL_COMPONENTS - configuration. Analyzes actual package directories and creates a - comprehensive overview of the release package contents. - """ +def create_comprehensive_manifest() -> None: print("๐Ÿ“‹ Creating comprehensive release manifest based on ALL_COMPONENTS...") # Create base manifest structure manifest = { "release_info": { - "created_at": os.stat(RELEASE_PACKAGE_DIR).st_ctime if os.path.exists(RELEASE_PACKAGE_DIR) else None, + "created_at": os.stat(RELEASE_PACKAGE_DIR).st_birthtime if os.path.exists(RELEASE_PACKAGE_DIR) else None, "stage": "final_release", "version": "1.0.0" }, @@ -238,20 +195,21 @@ def create_comprehensive_manifest(): print(f"๐Ÿ“ Total files: {manifest['summary']['total_files']}") print(f"๐Ÿ“ฆ Total size: {manifest['summary']['total_size_mb']} MB") -def create_final_release_package(): - """ - ๐Ÿ“ฆ Create Final Release Package - - Creates the final compressed archive containing all components from - both build stages. Uses maximum XZ compression for minimal size. +def update_apt() -> None: + print("๐Ÿ”„ Updating APT package database...") + run_command("apt update -o DPkg::Lock::Timeout=-1") + print("โœ… APT database updated") + +def install_p7zip() -> None: + print("๐Ÿ“ฆ Installing p7zip for archive creation...") - The package contains: - - Custom compiler toolchain (Stage 1) - - Development dependencies (Stage 2) - - Comprehensive release manifest - - All directory structures preserved - """ - print("๐Ÿ“ฆ Creating final release package with maximum XZ compression...") + packages = " ".join(P7ZIP.build_prerequisites) + run_command(f"apt install -y --no-install-recommends -o DPkg::Lock::Timeout=-1 {packages}") + print("โœ… p7zip installed successfully") + +def create_final_release_package() -> None: + """Create self-extracting 7z archive containing all components.""" + print("๐Ÿ“ฆ Creating self-extracting release package with 7z SFX...") if not os.path.exists(RELEASE_PACKAGE_DIR): print("โš ๏ธ No release package directory found") @@ -261,53 +219,33 @@ def create_final_release_package(): packed_dir = os.path.dirname(PACKED_RELEASE_PACKAGE_PATH) os.makedirs(packed_dir, exist_ok=True) - # Create archive with maximum XZ compression print(f" ๐Ÿ“ Source: {RELEASE_PACKAGE_DIR}") print(f" ๐Ÿ“ Target: {PACKED_RELEASE_PACKAGE_PATH}") - # LZMA could be optimized with multithreading, but reduces compress rate - # With higher preset, multithreading benefits diminish. Ref: https://github.com/python/cpython/pull/114954 - # So we choose single-threaded for best compression - with tarfile.open(PACKED_RELEASE_PACKAGE_PATH, 'w:xz', preset=9) as tar: - # Add all subdirectories and files, preserving original directory structure - for item in os.listdir(RELEASE_PACKAGE_DIR): - item_path = os.path.join(RELEASE_PACKAGE_DIR, item) - print(f" ๐Ÿ“ฆ Adding: {item}") - tar.add(item_path, arcname=item) - # Report package statistics - package_size_mb = os.path.getsize(PACKED_RELEASE_PACKAGE_PATH) / (1024 * 1024) + # Create self-extracting archive using 7z with SFX module + # The -sfx option creates a self-extracting executable + print(f"๐Ÿ”ง Creating SFX archive with settings: {P7ZIP.compression_options}...") - # Calculate source directory size for compression ratio - source_size_mb = sum( - os.path.getsize(os.path.join(dirpath, filename)) - for dirpath, _, filenames in os.walk(RELEASE_PACKAGE_DIR) - for filename in filenames - ) / (1024 * 1024) + seven_zip_cmd = ( + f"7z a {P7ZIP.sfx_option} {" ".join(P7ZIP.compression_options)} " + f"{PACKED_RELEASE_PACKAGE_PATH} " + f"{RELEASE_PACKAGE_DIR}/*" + ) + run_command(seven_zip_cmd) - compression_ratio = (source_size_mb - package_size_mb) / source_size_mb * 100 if source_size_mb > 0 else 0 + # Report package statistics + package_size_mb = os.path.getsize(PACKED_RELEASE_PACKAGE_PATH) / (1024 * 1024) - print(f"โœ… Final release package created: {PACKED_RELEASE_PACKAGE_PATH}") - print(f"๐Ÿ“Š Source size: {source_size_mb:.1f} MB") + print(f"โœ… Self-extracting release package created: {PACKED_RELEASE_PACKAGE_PATH}") print(f"๐Ÿ“Š Package size: {package_size_mb:.1f} MB") - print(f"๐Ÿ“Š Compression ratio: {compression_ratio:.1f}%") + print(f"โ„น๏ธ Extract with: {PACKED_RELEASE_PACKAGE_PATH} -o") # ======================================================================== # ๐Ÿš€ Main Execution # ======================================================================== -def main(): - """ - ๐Ÿš€ Main Stage 3 Execution - - Orchestrates the final packaging stage using parallel task execution: - 1. Setup .bashrc with environment variables and entrypoint script - 2. Copy setup scripts and configuration files - 3. Create comprehensive manifest based on ALL_COMPONENTS - 4. Package everything into final release archive - - This creates the complete release package ready for deployment. - """ +def main() -> None: print("๐Ÿš€ ========================================================================") print("๐Ÿš€ CLICE RELEASE PACKAGE CREATOR - STAGE 3") print("๐Ÿš€ ========================================================================") @@ -316,21 +254,25 @@ def main(): # Define packaging jobs with proper dependency management jobs = { + "update_apt": Job("update_apt", update_apt, ()), "setup_bashrc": Job("setup_bashrc", setup_environment_variables_and_entrypoint, ()), - "copy_setup_scripts": Job("copy_setup_scripts", copy_setup_scripts, ()), "create_manifest": Job("create_manifest", create_comprehensive_manifest, ()), + "install_p7zip": Job("install_p7zip", install_p7zip, ()), "create_package": Job("create_package", create_final_release_package, ()), } # Define dependencies - # - bashrc setup and script copy can run in parallel + # - APT update runs first to refresh package lists + # - bashrc setup and script copy can run in parallel with APT update + # - p7zip installation depends on APT update being complete # - Manifest creation depends on bashrc and scripts being ready - # - Package creation depends on manifest + # - Package creation depends on manifest and p7zip being ready dependencies = { + "update_apt": set(), # Runs first, no dependencies "setup_bashrc": set(), - "copy_setup_scripts": set(), - "create_manifest": {"setup_bashrc", "copy_setup_scripts"}, - "create_package": {"create_manifest"}, + "create_manifest": {"setup_bashrc"}, + "install_p7zip": {"update_apt"}, # Depends on APT update + "create_package": {"create_manifest", "install_p7zip"}, # Depends on manifest and 7z } # Execute packaging tasks in parallel where possible @@ -343,7 +285,6 @@ def main(): print(f"โœ… Final release package: {PACKED_RELEASE_PACKAGE_PATH}") print(f"โœ… Manifest: {RELEASE_PACKAGE_DIR}/manifest.json") print(f"โœ… Bashrc: {BASHRC.bashrc_path}") - print(f"โœ… Setup scripts: {CLICE_SETUP_SCRIPTS.package_dir}") print("๐ŸŽ‰ ========================================================================") if __name__ == "__main__": diff --git a/docker/linux/utility/download_dependencies.py b/docker/linux/utility/download_dependencies.py index 21776212..16fb4674 100644 --- a/docker/linux/utility/download_dependencies.py +++ b/docker/linux/utility/download_dependencies.py @@ -1,30 +1,7 @@ -# ======================================================================== -# ๐Ÿ“ฆ Clice Dependencies Downloader -# ======================================================================== -# File: docker/linux/utility/download_dependencies.py -# Purpose: Download all dev-container dependencies without installing them -# -# This module downloads all required packages, tools, and dependencies -# without installing them, maximizing Docker build cache efficiency. -# Downloaded packages are stored in cache directories for later installation. -# ======================================================================== - +#!/usr/bin/env python3 """ -๐Ÿš€ Clice Dependencies Downloader - -Downloads all required development dependencies for the Clice dev container -without installing them. This approach maximizes Docker build cache efficiency -by separating the download phase from the installation phase. - -Components Downloaded: - โ€ข APT packages for development tools - โ€ข CMake binary releases - โ€ข XMake installation packages - โ€ข Python packages and dependencies - โ€ข LLVM prebuilt binaries (if available) - -The downloaded packages are stored in structured cache directories that -can be efficiently copied and cached by Docker's build system. +Download all dev dependencies (APT packages, CMake, XMake, Python packages) +without installing them for Docker build cache efficiency. """ import os @@ -56,7 +33,6 @@ # ======================================================================== def install_download_prerequisites() -> None: - """Install prerequisites required for downloading dependencies.""" print("๐Ÿ“ฆ Installing dependencies download prerequisites...") # Update package lists first @@ -69,12 +45,7 @@ def install_download_prerequisites() -> None: print(f"โœ… Installed {len(download_prerequisites)} download prerequisites") def get_apt_package_list(base_packages: List[str]) -> List[str]: - """ - Get all required APT packages using the exact StackOverflow command pattern. - - Uses: apt-cache depends --recurse ... | awk '$1 ~ /^Depends:/{print $2}' - Returns: Deduplicated list of packages to download - """ + """Get recursive APT dependencies using apt-cache depends + awk pattern.""" print("๐Ÿ” Resolving recursive dependencies using StackOverflow command pattern...") all_packages = set() @@ -111,7 +82,7 @@ def get_apt_package_list(base_packages: List[str]) -> List[str]: # Filter available packages (remove virtual/unavailable packages) print(f"๐Ÿ” Found {len(all_packages)} total dependency packages, filtering available ones...") - available_packages = [] + available_packages = base_packages.copy() for package in sorted(all_packages): try: @@ -130,13 +101,6 @@ def get_apt_package_list(base_packages: List[str]) -> List[str]: return available_packages def download_apt_packages() -> None: - """ - Download all APT packages using the exact StackOverflow command pattern. - - Two-stage approach: - 1. Get package list using apt-cache depends + awk pattern - 2. Download packages using apt-get download - """ print("๐Ÿ“ฆ Downloading APT packages with StackOverflow command pattern...") # Create both download cache and package directories using component structure @@ -216,7 +180,7 @@ def download_apt_packages() -> None: print(f"๐Ÿ“ Cache directory: {APT.cache_dir} (preserved for future builds)") def download_cmake() -> None: - """Download CMake binary release with verification files and verify integrity.""" + """Download CMake installer and verify SHA256 integrity.""" print("๐Ÿ”ง Downloading CMake with verification...") # Create both cache and package directories using component structure @@ -267,7 +231,6 @@ def download_cmake() -> None: print(f"๐Ÿ“ฆ CMake copied to package: {cmake_package_file}") def download_xmake() -> None: - """Download XMake bundle for direct installation.""" print("๐Ÿ”จ Downloading XMake bundle...") # Create both cache and package directories using component structure @@ -295,11 +258,6 @@ def download_xmake() -> None: print(f"๐Ÿ“ฆ XMake copied to package: {xmake_package_file}") def download_python_packages() -> None: - """ - Download Python packages from pyproject.toml using uv sync. - - Uses uv sync to download all dependencies to UV's packages cache directory. - """ print("๐Ÿ Downloading Python packages from pyproject.toml...") # Create cache directory for packages @@ -327,8 +285,7 @@ def download_python_packages() -> None: # ๐Ÿš€ Main Execution # ======================================================================== -def main(): - """Main execution function with parallel task scheduling.""" +def main() -> None: print("๐Ÿš€ Starting Clice Dependencies Download Process...") # Create main cache directory diff --git a/docker/linux/utility/local_setup.py b/docker/linux/utility/local_setup.py index 7f5cd021..870ec212 100644 --- a/docker/linux/utility/local_setup.py +++ b/docker/linux/utility/local_setup.py @@ -1,31 +1,11 @@ -# ======================================================================== -# ๐Ÿš€ Clice Local Setup -# ======================================================================== -# File: docker/linux/utility/local_setup.py -# Purpose: Final setup phase for the Clice development container -# -# This script handles the installation and configuration of all pre-downloaded -# packages and tools to complete the development environment setup. -# ======================================================================== - +#!/usr/bin/env python3 """ -๐Ÿš€ Clice Local Setup Script - -Handles the final setup phase of the Clice development container by: -1. Installing all pre-downloaded APT packages -2. Extracting and installing the custom toolchain -3. Setting up CMake and XMake build systems -4. Installing Python packages via uv -5. Configuring environment variables and PATH - -This script maximizes Docker build cache efficiency by separating the -installation phase from the download phase, allowing for independent -caching of each step. +Final setup: Install pre-downloaded packages (APT, toolchain, CMake, XMake, Python) +and deploy .bashrc configuration. """ import os import sys -import tarfile import shutil # Ensure utility directory is in Python path for imports @@ -33,15 +13,13 @@ if project_root not in sys.path: sys.path.insert(0, project_root) -# Import all configuration from build_config using new component structure from config.build_config import ( RELEASE_PACKAGE_DIR, CLICE_WORKDIR, - # Import component instances for structured access - APT, UV, CMAKE, XMAKE, TOOLCHAIN, BASHRC + APT, UV, CMAKE, XMAKE, TOOLCHAIN, BASHRC, + APTComponent, UVComponent, CMakeComponent, XMakeComponent, ToolchainComponent ) -# Import build_utils for run_command and other utilities from build_utils import ( Job, ParallelTaskScheduler, @@ -52,7 +30,7 @@ # ๐ŸŒ Environment Setup Functions # ======================================================================== -def deploy_bashrc(): +def deploy_bashrc() -> None: """Deploy .bashrc from package to /root/.bashrc.""" print("๐ŸŒ Deploying .bashrc configuration...") @@ -72,8 +50,7 @@ def deploy_bashrc(): # ๐Ÿ“ฆ Package Installation Functions # ======================================================================== -def install_apt_packages(apt_component): - """Install APT development packages from downloaded .deb files.""" +def install_apt_packages(apt_component: APTComponent) -> None: print("๐Ÿ“ฆ Installing APT development packages...") # Install all .deb files found in the package directory @@ -86,13 +63,12 @@ def install_apt_packages(apt_component): else: print("โš ๏ธ No .deb files found in APT package directory") -def install_toolchain(toolchain_component): - """Install the custom toolchain.""" +def install_toolchain(toolchain_component: ToolchainComponent) -> None: print("๐Ÿ”ง Installing custom toolchain...") print(f"โœ… Toolchain available at: {toolchain_component.package_dir}") -def install_cmake(cmake_component): +def install_cmake(cmake_component: CMakeComponent) -> None: """Install CMake from pre-downloaded installer.""" print("๐Ÿ”ง Installing CMake...") @@ -120,7 +96,7 @@ def install_cmake(cmake_component): print(f"โœ… CMake installed to {cmake_install_dir}") -def install_xmake(xmake_component): +def install_xmake(xmake_component: XMakeComponent) -> None: """Install XMake from pre-downloaded package.""" print("๐Ÿ”จ Installing XMake...") @@ -135,8 +111,7 @@ def install_xmake(xmake_component): print("โœ… XMake installed successfully") -def install_python_packages(uv_component): - """Install Python packages from uv cache.""" +def install_python_packages(uv_component: UVComponent) -> None: print("๐Ÿ Installing Python packages...") # Install wheel files found in the UV package directory @@ -154,15 +129,14 @@ def install_python_packages(uv_component): # ๐Ÿ“‹ Setup Orchestration # ======================================================================== -def setup_git_safe_directory(): +def setup_git_safe_directory() -> None: """Configure git to treat the workspace as safe.""" print("๐Ÿ”ง Configuring git safe directory...") run_command(f"git config --global --add safe.directory {CLICE_WORKDIR}") print("โœ… Git safe directory configured") -def main(): - """Main setup orchestration function with parallel task scheduling.""" +def main() -> None: print("๐Ÿš€ Setting up Clice Dev Container...") # Define setup jobs with proper dependency management From 3f7c5350b31bbc9132362e12a5b181b86ac4f51c Mon Sep 17 00:00:00 2001 From: sora_mono <849526320@qq.com> Date: Wed, 3 Dec 2025 21:08:51 +0800 Subject: [PATCH 4/4] Added Zig compiler support --- .github/workflows/docker-build-push.yml | 110 +++++ config/build_config.py | 439 ------------------ docker/linux/Dockerfile | 66 +-- docker/linux/build.sh | 5 +- docker/linux/run.sh | 38 +- docker/linux/test_build.sh | 301 ------------ .../utility/build_clice_compiler_toolchain.py | 155 ++++--- docker/linux/utility/build_utils.py | 330 ++++++++----- docker/linux/utility/common.sh | 2 +- .../{ => utility}/container-entrypoint.sh | 4 +- .../linux/utility/create_release_package.py | 87 ++-- docker/linux/utility/download_dependencies.py | 266 ++++++----- docker/linux/utility/local_setup.py | 81 +++- docker/linux/utility/pyproject.toml | 3 +- 14 files changed, 742 insertions(+), 1145 deletions(-) create mode 100644 .github/workflows/docker-build-push.yml delete mode 100644 config/build_config.py delete mode 100644 docker/linux/test_build.sh rename docker/linux/{ => utility}/container-entrypoint.sh (93%) diff --git a/.github/workflows/docker-build-push.yml b/.github/workflows/docker-build-push.yml new file mode 100644 index 00000000..979fcdc5 --- /dev/null +++ b/.github/workflows/docker-build-push.yml @@ -0,0 +1,110 @@ +name: Docker Build and Push + +on: + push: + branches: + - main + - develop + release: + types: [created] + workflow_dispatch: + inputs: + compiler: + description: 'Compiler to use (gcc, clang, zig)' + required: false + default: 'clang' + type: choice + options: + - gcc + - clang + - zig + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + build-and-push: + strategy: + matrix: + compiler: [gcc, clang, zig] + runs-on: ubuntu-24.04 + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Setup Docker buildx + uses: docker/setup-buildx-action@v3 + + - name: Log into registry ${{ env.REGISTRY }} + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract Docker metadata + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=ref,event=branch,suffix=-${{ matrix.compiler }} + type=ref,event=pr,suffix=-${{ matrix.compiler }} + type=semver,pattern={{version}},suffix=-${{ matrix.compiler }} + type=semver,pattern={{major}}.{{minor}},suffix=-${{ matrix.compiler }} + type=raw,value=latest-${{ matrix.compiler }},enable={{is_default_branch}} + + - name: Build Docker image using build.sh + run: | + # Make build script executable + chmod +x ./docker/linux/build.sh + + # Run build script with cache configuration + ./docker/linux/build.sh \ + --compiler ${{ matrix.compiler }} \ + --cache-from type=gha,scope=${{ matrix.compiler }} \ + --cache-to type=gha,mode=max,scope=${{ matrix.compiler }} \ + --rebuild + + - name: Tag and push image to registry + run: | + # Get the local image name from build.sh + LOCAL_IMAGE=$(docker images --format "{{.Repository}}:{{.Tag}}" | grep "clice-dev-container.*${{ matrix.compiler }}" | head -n 1) + + if [ -z "$LOCAL_IMAGE" ]; then + echo "Error: Could not find built image" + exit 1 + fi + + echo "Built image: $LOCAL_IMAGE" + + # Tag and push for each metadata tag + echo '${{ steps.meta.outputs.tags }}' | while read -r tag; do + if [ -n "$tag" ]; then + echo "Tagging as: $tag" + docker tag "$LOCAL_IMAGE" "$tag" + docker push "$tag" + fi + done + + - name: Get image digest + id: digest + run: | + # Get the digest of the pushed image + FIRST_TAG=$(echo '${{ steps.meta.outputs.tags }}' | head -n 1) + DIGEST=$(docker inspect --format='{{index .RepoDigests 0}}' "$FIRST_TAG" | cut -d'@' -f2) + echo "digest=$DIGEST" >> $GITHUB_OUTPUT + + - name: Generate artifact attestation + uses: actions/attest-build-provenance@v1 + with: + subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + subject-digest: ${{ steps.digest.outputs.digest }} + push-to-registry: true diff --git a/config/build_config.py b/config/build_config.py deleted file mode 100644 index b8d2f42c..00000000 --- a/config/build_config.py +++ /dev/null @@ -1,439 +0,0 @@ -"""Centralized configuration constants for toolchain build system.""" - -import json -import os -from typing import Any, List, Dict - -# ======================================================================== -# ๐ŸŒ Environment Variables and Core Paths -# ======================================================================== - -# Global environment variables that will be used in dev-container shells -DEVELOPMENT_SHELL_VARS: Dict[str, str] = { - "PATH": "/root/.local/bin:${PATH}", - "XMAKE_ROOT": "y" -} - -# Environment variables specifically for toolchain build processes -# NOT used in dev-container shells -TOOLCHAIN_BUILD_ENV_VARS: Dict[str, str] = { - "ORIGIN": "$ORIGIN" # Enable relative rpath for portable binary distribution -} - -# CLICE_WORKDIR is different from PROJECT_ROOT -# CLICE_WORKDIR means where the Clice project is mounted inside the container -CLICE_WORKDIR: str = os.getenv("CLICE_WORKDIR", "") -# PROJECT_ROOT is the root directory of the Clice project -# It could be different from CLICE_WORKDIR if the build_config.py is executed in expand-stage -PROJECT_ROOT: str = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) -PYPROJECT_PATH: str = os.path.join(PROJECT_ROOT, "pyproject.toml") -TOOLCHAIN_CONFIG_PATH: str = os.path.join(PROJECT_ROOT, "config/default-toolchain-version.json") - -# ======================================================================== -# ๐Ÿ“ฆ Release Package Configuration (Cross-Stage Variables) -# ======================================================================== - -# main package directory -RELEASE_PACKAGE_DIR: str = os.getenv("RELEASE_PACKAGE_DIR", "") -# compressed package archive -PACKED_RELEASE_PACKAGE_PATH: str = os.getenv("PACKED_RELEASE_PACKAGE_PATH", "") -# bashrc environment configuration file -ENVIRONMENT_CONFIG_FILE: str = os.getenv("ENVIRONMENT_CONFIG_FILE", "") -# File cache directory for toolchain build -# Only save what is always same, e.g. source code tarballs -BUILD_CACHE_DIR: str = os.getenv("BUILD_CACHE_DIR", "") -WORKDIR_ROOT: str = "/dev-container-build" # Temporary work directory for builds (not persistent) - -TOOLCHAIN_VERSIONS: Dict[str, Any] = {} -with open(TOOLCHAIN_CONFIG_PATH, "r") as f: - TOOLCHAIN_VERSIONS = json.load(f) - -# ======================================================================== -# ๐Ÿ” GPG Verification Configuration -# ======================================================================== - -GPG_KEY_SERVER: List[str] = [ - "keys.openpgp.org", - "keyserver.ubuntu.com" -] - -# ======================================================================== -# ๐Ÿงฉ Component Architecture Definitions -# ======================================================================== - -class Component: - # Class-level URL patterns (overridden by subclasses) - base_url: str = "" - tarball_name_pattern: str = "" - verification_name_pattern: str = "" - - # Class-level prerequisites configuration (overridden by subclasses) - download_prerequisites: List[str] = [ - "aria2", # High-speed multi-connection downloader - "gnupg", # For GPG signature verification, python-gnupg is just a wrapper - ] - - extract_prerequisites: List[str] = [ - "bzip2", # Required for .tar.bz2 archives (GCC prerequisites) - "xz-utils", # Required for extracting .xz archives (toolchain sources) - ] - - build_prerequisites: List[str] = [] # To be defined by subclasses if needed - - # Where the component will be deployed - host_system: str = "linux" - host_machine: str = "x86_64" - - # Where the constructed output (like clice binary) runs on - target_system: str = host_system - target_machine: str = host_machine - - def __init__(self, name: str, version: str = "unknown"): - self.name = name - self.version = version - - @property - def package_dir(self) -> str: - return os.path.join(RELEASE_PACKAGE_DIR, self.versioned_name) - - @property - def cache_dir(self) -> str: - return os.path.join(BUILD_CACHE_DIR, self.versioned_name) - - @property - def work_dir(self) -> str: - return os.path.join(WORKDIR_ROOT, self.versioned_name) - - @property - def versioned_name(self) -> str: - return f"{self.name}-{self.version}" - - @property - def tarball_name(self) -> str: - if not self.tarball_name_pattern: - raise ValueError(f"Component '{self.name}' missing required tarball_name_pattern") - - return self.tarball_name_pattern.format(version=self.version, system=self.host_system, machine=self.host_machine) - - @property - def tarball_url(self) -> str: - if not self.base_url: - raise ValueError(f"Component '{self.name}' missing required base_url") - if not self.tarball_name_pattern: - raise ValueError(f"Component '{self.name}' missing required tarball_name_pattern") - - formatted_base_url = self.base_url.format(version=self.version, system=self.host_system, machine=self.host_machine) - return f"{formatted_base_url}/{self.tarball_name}" - - @property - def verification_name(self) -> str: - if not self.verification_name_pattern: - raise ValueError(f"Component '{self.name}' missing required verification_name_pattern") - - return self.verification_name_pattern.format(version=self.version, system=self.host_system, machine=self.host_machine) - - @property - def verification_url(self) -> str: - formatted_base_url = self.base_url.format(version=self.version, system=self.host_system, machine=self.host_machine) - return f"{formatted_base_url}/{self.verification_name}" - - @property - def host_triplet(self) -> str: - return f"{self.host_machine}-{self.host_system}-gnu" - - @property - def target_triplet(self) -> str: - return f"{self.target_machine}-{self.target_system}-gnu" - - -class ToolchainSubComponent(Component): - def __init__(self, name: str, parent_component: Component): - version = TOOLCHAIN_VERSIONS[name] - super().__init__(name, version) - self.parent_component = parent_component - - @property - def package_dir(self) -> str: - return os.path.join(self.parent_component.package_dir, self.versioned_name) - - @property - def cache_dir(self) -> str: - return os.path.join(self.parent_component.cache_dir, self.versioned_name) - - @property - def work_dir(self) -> str: - return os.path.join(self.parent_component.work_dir, self.versioned_name) - - @property - def extracted_dir(self) -> str: - return self.src_dir - - @property - def src_dir(self) -> str: - return os.path.join(self.work_dir, "src") - - @property - def build_dir(self) -> str: - return os.path.join(self.src_dir, "build") - -# ======================================================================== -# ๐Ÿ”ง Concrete Component Classes -# ======================================================================== - -class APTComponent(Component): - - download_prerequisites: List[str] = Component.download_prerequisites + [ - "apt-rdepends", # For resolving APT package dependencies - ] - - def __init__(self): - super().__init__("apt") - - @property - def all_packages(self) -> List[str]: - return [ - "git", # For XMake initialize - "ca-certificates", # For ssl verification, git needs it - "ninja", # For CMake and XMake build system - "curl", # For XMake initialize - "make" # For XMake initialize - ] - - -class UVComponent(Component): - base_url = "https://github.com/astral-sh/uv/releases/download/{version}" - tarball_name_pattern = "uv-{machine}-unknown-linux-gnu.tar.gz" - - def __init__(self): - version = TOOLCHAIN_VERSIONS["uv"] - super().__init__("uv", version) - self.python_version = TOOLCHAIN_VERSIONS["python"] - - @property - def tarball_cache_dir(self) -> str: - return os.path.join(self.cache_dir, "tarball") - - @property - def tarball_package_dir(self) -> str: - return os.path.join(self.package_dir, "tarball") - - @property - def install_dir(self) -> str: - return "/root/.local/bin" - - @property - def packages_package_dir(self) -> str: - return os.path.join(self.package_dir, "uv-packages") - -class XMakeComponent(Component): - base_url = "https://github.com/xmake-io/xmake/releases/download/v{version}" - tarball_name_pattern = "xmake-bundle-v{version}.{system}.{machine}" - - def __init__(self): - version = TOOLCHAIN_VERSIONS["xmake"] - super().__init__("xmake", version) - -class CMakeComponent(Component): - base_url = "https://github.com/Kitware/CMake/releases/download/v{version}" - tarball_name_pattern = "cmake-{version}-linux-x86_64.sh" - verification_name_pattern = "cmake-{version}-SHA-256.txt" - - def __init__(self): - version = TOOLCHAIN_VERSIONS["cmake"] - super().__init__("cmake", version) - -class P7ZipComponent(Component): - build_prerequisites: List[str] = [ - "p7zip-full", - ] - - compression_level: str = "9" # Maximum compression (0-9) - - def __init__(self): - # p7zip doesn't have a version we track, using "system" as placeholder - super().__init__("p7zip", "system") - - @property - def compression_options(self) -> List[str]: - return [ - f"-t7z", # Archive type - f"-mx={self.compression_level}", # Compression level - "-mmt=on", # Use all available CPU cores - "-ms=on" # Better compression for similar files - ] - - @property - def sfx_option(self) -> str: - return f"-sfx7zCon.sfx" - -class ToolchainComponent(Component): - - def __init__(self): - super().__init__("toolchain", "custom") - - self.glibc: GlibcSubComponent = GlibcSubComponent(self) - self.gcc: GccSubComponent = GccSubComponent(self) - self.llvm: LlvmSubComponent = LlvmSubComponent(self) - self.linux: LinuxSubComponent = LinuxSubComponent(self) - - self.sub_components: list[ToolchainSubComponent] = [ - self.glibc, - self.gcc, - self.llvm, - self.linux, - ] - - @property - def sysroot_dir(self) -> str: - return f"{self.package_dir}/sysroot/{self.host_triplet}/{self.target_triplet}/glibc{self.glibc.version}-libstdc++{self.gcc.version}-linux{self.linux.version}" - -# ======================================================================== -# ๐Ÿงฉ Toolchain Sub-Component Classes -# ======================================================================== - -class GlibcSubComponent(ToolchainSubComponent): - - base_url = "https://ftpmirror.gnu.org/gnu/glibc" - tarball_name_pattern = "glibc-{version}.tar.xz" - verification_name_pattern = "glibc-{version}.tar.xz.sig" - build_prerequisites: List[str] = [ - "make", - "binutils", - "gawk", # Text processing (required by glibc build system) - "bison", # Parser generator (required by glibc build system) - "gcc-9", # GNU C compiler version 9 (for glibc < 2.36) - - *ToolchainSubComponent.build_prerequisites - ] - - def __init__(self, parent_component: ToolchainComponent): - super().__init__("glibc", parent_component) - - -class GccSubComponent(ToolchainSubComponent): - - base_url = "https://ftpmirror.gnu.org/gnu/gcc/gcc-{version}" - tarball_name_pattern = "gcc-{version}.tar.xz" - verification_name_pattern = "gcc-{version}.tar.xz.sig" - build_prerequisites: List[str] = [ - "make", - "binutils", - "file", # File type identification (libcc1 requires this tool) - - "gcc-14", - "g++-14", - "libstdc++-14-dev", - - *ToolchainSubComponent.build_prerequisites - ] - - def __init__(self, parent_component: ToolchainComponent): - super().__init__("gcc", parent_component) - - @property - def target_libs(self) -> List[str]: - """Selective GCC library build targets.""" - return [ - "libgcc", # Low-level runtime support library (exception handling, etc.) - "libstdc++-v3", # C++ standard library implementation - "libsanitizer", # Address/memory/thread sanitizer runtime libraries - "libatomic", # Atomic operations library for lock-free programming - "libbacktrace", # Stack backtrace support for debugging - "libgomp", # OpenMP parallel programming runtime - "libquadmath" # Quadruple precision math library - ] - - -class LlvmSubComponent(ToolchainSubComponent): - base_url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-{version}" - tarball_name_pattern = "llvm-project-{version}.src.tar.xz" - verification_name_pattern = "llvm-project-{version}.src.tar.xz.sig" - - download_prerequisites = ToolchainSubComponent.download_prerequisites + [ - "git", # Required for git clone llvm - ] - - def __init__(self, parent_component: ToolchainComponent): - super().__init__("llvm", parent_component) - - -class LinuxSubComponent(ToolchainSubComponent): - base_url = "https://github.com/torvalds/linux/archive/refs/tags" - tarball_name_pattern = "v{version}.tar.gz" - verification_name_pattern = "" - build_prerequisites: List[str] = [ - "make", - "binutils", - "rsync", # File synchronization (Linux kernel headers) - - "gcc-9", # Even though we don't build the kernel, configure requires gcc - - *ToolchainSubComponent.build_prerequisites - ] - - def __init__(self, parent_component: ToolchainComponent): - super().__init__("linux", parent_component) - -class CliceSetupScriptsComponent(Component): - def __init__(self): - super().__init__("clice-setup-scripts", "project") - - @property - def files_to_copy(self) -> list[str]: - return [ - 'config/build_config.py', - 'config/default-toolchain-version.json', - 'docker/linux/utility/local_setup.py', - 'docker/linux/utility/build_utils.py', - ] - -class BashrcComponent(Component): - """Bash configuration with environment variables and container entrypoint.""" - - def __init__(self): - super().__init__("bashrc", "project") - - @property - def bashrc_path(self) -> str: - return os.path.join(self.package_dir, ".bashrc") - - @property - def entrypoint_script_source(self) -> str: - return os.path.join(PROJECT_ROOT, "docker/linux/container-entrypoint.sh") - -# ======================================================================== -# ๐Ÿ—๏ธ Component Instances and Build Stage Organization -# ======================================================================== - -APT = APTComponent() -UV = UVComponent() -XMAKE = XMakeComponent() -CMAKE = CMakeComponent() -P7ZIP = P7ZipComponent() -TOOLCHAIN = ToolchainComponent() -BASHRC = BashrcComponent() - -# ======================================================================== -# ๐Ÿ“‹ Build Stage Component Groups -# ======================================================================== - -DEPENDENCIES_DOWNLOADER_STAGE: list[Component] = [ - APT, - UV, - CMAKE, - XMAKE, -] - -IMAGE_PACKER_STAGE: list[Component] = [ - BASHRC, -] - -TOOLCHAIN_BUILDER_STAGE: list[Component] = [ - TOOLCHAIN, -] - -ALL_COMPONENTS = [ - *DEPENDENCIES_DOWNLOADER_STAGE, - *IMAGE_PACKER_STAGE, - *TOOLCHAIN_BUILDER_STAGE, -] diff --git a/docker/linux/Dockerfile b/docker/linux/Dockerfile index 62b0b7ff..7e89f9fa 100644 --- a/docker/linux/Dockerfile +++ b/docker/linux/Dockerfile @@ -27,6 +27,7 @@ ARG UV_TARBALL_DIR_NAME=tarball # Python build scripts communicate via these environment variables ARG PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES="\ +COMPILER=${COMPILER} \ CLICE_WORKDIR=${CLICE_WORKDIR} \ RELEASE_PACKAGE_DIR=${RELEASE_PACKAGE_DIR} \ PACKED_RELEASE_PACKAGE_PATH=${PACKED_RELEASE_PACKAGE_PATH} \ @@ -53,8 +54,8 @@ ENV DEBIAN_FRONTEND=noninteractive # Do NOT copy all config at once, or all stages would be rebuilt when any file changes # Only copy what is needed for this stage -COPY docker/linux/utility/pyproject.toml ${CLICE_WORKDIR}/docker/linux/utility/pyproject.toml -COPY config/default-toolchain-version.json ${CLICE_WORKDIR}/config/default-toolchain-version.json +COPY docker/linux/utility/pyproject.toml ${CLICE_WORKDIR}/docker/linux/utility/pyproject.toml +COPY config/docker_build_stages/default-toolchain-version.json ${CLICE_WORKDIR}/config/docker_build_stages/default-toolchain-version.json # Install minimal system dependencies, uv, and Python RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=python-build-env-apt \ @@ -75,7 +76,7 @@ RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=python-build-en apt install -y --no-install-recommends -o DPkg::Lock::Timeout=-1 curl jq ca-certificates # Get uv version from configuration - UV_VERSION=$(jq -r .uv ${CLICE_WORKDIR}/config/default-toolchain-version.json) + UV_VERSION=$(jq -r .uv ${CLICE_WORKDIR}/config/docker_build_stages/default-toolchain-version.json) echo "๐Ÿ“ฆ Installing uv version: $UV_VERSION" # Determine architecture for uv standalone build @@ -130,7 +131,7 @@ RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=python-build-en uv --version # Get Python version from configuration - PYTHON_VERSION=$(jq -r .python ${CLICE_WORKDIR}/config/default-toolchain-version.json) + PYTHON_VERSION=$(jq -r .python ${CLICE_WORKDIR}/config/docker_build_stages/default-toolchain-version.json) echo "๐Ÿ Installing Python version: $PYTHON_VERSION" # Install Python using UV with package_dir as cache @@ -161,16 +162,18 @@ WORKDIR ${CLICE_WORKDIR} FROM base-python-environment-for-build AS toolchain-builder LABEL description="Builds custom compiler toolchain" +ARG COMPILER ARG CLICE_WORKDIR ARG APT_CACHE_DIR ARG APT_STATE_CACHE_DIR ARG BUILD_CACHE_DIR ARG PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES -COPY config/build_config.py ${CLICE_WORKDIR}/config/build_config.py -COPY config/default-toolchain-version.json ${CLICE_WORKDIR}/config/default-toolchain-version.json -COPY docker/linux/utility/build_utils.py ${CLICE_WORKDIR}/docker/linux/utility/build_utils.py -COPY docker/linux/utility/build_clice_compiler_toolchain.py ${CLICE_WORKDIR}/docker/linux/utility/build_clice_compiler_toolchain.py +COPY config/docker_build_stages/common.py ${CLICE_WORKDIR}/config/docker_build_stages/common.py +COPY config/docker_build_stages/default-toolchain-version.json ${CLICE_WORKDIR}/config/docker_build_stages/default-toolchain-version.json +COPY config/docker_build_stages/toolchain_config.py ${CLICE_WORKDIR}/config/docker_build_stages/toolchain_config.py +COPY docker/linux/utility/build_utils.py ${CLICE_WORKDIR}/docker/linux/utility/build_utils.py +COPY docker/linux/utility/build_clice_compiler_toolchain.py ${CLICE_WORKDIR}/docker/linux/utility/build_clice_compiler_toolchain.py # Build the custom toolchain (Python script handles all dependencies) RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=toolchain-builder-apt \ @@ -194,19 +197,21 @@ SCRIPT FROM base-python-environment-for-build AS dependencies-downloader LABEL description="Downloads dev-container dependencies" +ARG COMPILER ARG CLICE_WORKDIR ARG APT_CACHE_DIR ARG APT_STATE_CACHE_DIR ARG BUILD_CACHE_DIR ARG PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES -COPY config/build_config.py ${CLICE_WORKDIR}/config/build_config.py -COPY config/default-toolchain-version.json ${CLICE_WORKDIR}/config/default-toolchain-version.json -COPY docker/linux/utility/build_utils.py ${CLICE_WORKDIR}/docker/linux/utility/build_utils.py -COPY docker/linux/utility/download_dependencies.py ${CLICE_WORKDIR}/docker/linux/utility/download_dependencies.py +COPY config/docker_build_stages/common.py ${CLICE_WORKDIR}/config/docker_build_stages/common.py +COPY config/docker_build_stages/default-toolchain-version.json ${CLICE_WORKDIR}/config/docker_build_stages/default-toolchain-version.json +COPY config/docker_build_stages/dependencies_config.py ${CLICE_WORKDIR}/config/docker_build_stages/dependencies_config.py +COPY docker/linux/utility/build_utils.py ${CLICE_WORKDIR}/docker/linux/utility/build_utils.py +COPY docker/linux/utility/download_dependencies.py ${CLICE_WORKDIR}/docker/linux/utility/download_dependencies.py # for download python dependencies -COPY pyproject.toml ${CLICE_WORKDIR}/pyproject.toml +COPY tests/pyproject.toml ${CLICE_WORKDIR}/tests/pyproject.toml # Setup Python project environment and download dependencies RUN --mount=type=cache,target=${APT_CACHE_DIR},sharing=locked,id=dependencies-downloader-apt \ @@ -230,6 +235,7 @@ SCRIPT FROM base-python-environment-for-build AS image-packer LABEL description="Merges toolchain and dependencies into final release package" +ARG COMPILER ARG CLICE_WORKDIR ARG RELEASE_PACKAGE_DIR ARG APT_CACHE_DIR @@ -239,17 +245,23 @@ ARG PYTHON_BUILD_SCRIPT_BASE_ENV_VARIABLES ARG SETUP_SCRIPTS_DIR # For execution in this layer only -COPY config/build_config.py ${CLICE_WORKDIR}/config/build_config.py -COPY config/default-toolchain-version.json ${CLICE_WORKDIR}/config/default-toolchain-version.json -COPY docker/linux/container-entrypoint.sh ${CLICE_WORKDIR}/docker/linux/container-entrypoint.sh -COPY docker/linux/utility/build_utils.py ${CLICE_WORKDIR}/docker/linux/utility/build_utils.py -COPY docker/linux/utility/create_release_package.py ${CLICE_WORKDIR}/docker/linux/utility/create_release_package.py - -# For package -COPY config/build_config.py ${SETUP_SCRIPTS_DIR}/config/build_config.py -COPY config/default-toolchain-version.json ${SETUP_SCRIPTS_DIR}/config/default-toolchain-version.json -COPY docker/linux/utility/local_setup.py ${SETUP_SCRIPTS_DIR}/docker/linux/utility/local_setup.py -COPY docker/linux/utility/build_utils.py ${SETUP_SCRIPTS_DIR}/docker/linux/utility/build_utils.py +COPY config/docker_build_stages/common.py ${CLICE_WORKDIR}/config/docker_build_stages/common.py +COPY config/docker_build_stages/default-toolchain-version.json ${CLICE_WORKDIR}/config/docker_build_stages/default-toolchain-version.json +COPY config/docker_build_stages/toolchain_config.py ${CLICE_WORKDIR}/config/docker_build_stages/toolchain_config.py +COPY config/docker_build_stages/dependencies_config.py ${CLICE_WORKDIR}/config/docker_build_stages/dependencies_config.py +COPY config/docker_build_stages/package_config.py ${CLICE_WORKDIR}/config/docker_build_stages/package_config.py +COPY docker/linux/utility/container-entrypoint.sh ${CLICE_WORKDIR}/docker/linux/utility/container-entrypoint.sh +COPY docker/linux/utility/build_utils.py ${CLICE_WORKDIR}/docker/linux/utility/build_utils.py +COPY docker/linux/utility/create_release_package.py ${CLICE_WORKDIR}/docker/linux/utility/create_release_package.py + +# For package (setup scripts) +COPY config/docker_build_stages/common.py ${SETUP_SCRIPTS_DIR}/config/docker_build_stages/common.py +COPY config/docker_build_stages/default-toolchain-version.json ${SETUP_SCRIPTS_DIR}/config/docker_build_stages/default-toolchain-version.json +COPY config/docker_build_stages/dependencies_config.py ${SETUP_SCRIPTS_DIR}/config/docker_build_stages/dependencies_config.py +COPY config/docker_build_stages/toolchain_config.py ${SETUP_SCRIPTS_DIR}/config/docker_build_stages/toolchain_config.py +COPY config/docker_build_stages/package_config.py ${SETUP_SCRIPTS_DIR}/config/docker_build_stages/package_config.py +COPY docker/linux/utility/build_utils.py ${SETUP_SCRIPTS_DIR}/docker/linux/utility/build_utils.py +COPY docker/linux/utility/local_setup.py ${SETUP_SCRIPTS_DIR}/docker/linux/utility/local_setup.py # Copy outputs from previous stages # Merge by RELEASE_PACKAGE_DIR structure, each component has its own directory @@ -296,8 +308,8 @@ COPY --from=image-packer ${PACKED_RELEASE_PACKAGE_PATH} ${PACKED_RELEASE_PACKAGE # Instead of using local build.sh and Dockerfile, we use the version packed here # So we could make breaking changes to build scripts without breaking released images COPY docker/linux/utility/common.sh ${CLICE_WORKDIR}/docker/linux/utility/common.sh -COPY docker/linux/build.sh ${CLICE_WORKDIR}/docker/linux/build.sh -COPY docker/linux/Dockerfile ${CLICE_WORKDIR}/docker/linux/Dockerfile +COPY docker/linux/build.sh ${CLICE_WORKDIR}/docker/linux/build.sh +COPY docker/linux/Dockerfile ${CLICE_WORKDIR}/docker/linux/Dockerfile # ======================================================================== # ๐Ÿ—๏ธ Stage 5: Development Image (Expanded) @@ -305,7 +317,7 @@ COPY docker/linux/Dockerfile ${CLICE_WORKDIR}/docker/linux/Dockerfile FROM ${PACKED_IMAGE_NAME} AS expanded-image LABEL description="Fully expanded development image" - +ARG COMPILER ARG CLICE_WORKDIR ARG RELEASE_PACKAGE_DIR ARG PACKED_RELEASE_PACKAGE_PATH diff --git a/docker/linux/build.sh b/docker/linux/build.sh index 54098544..f640ca00 100644 --- a/docker/linux/build.sh +++ b/docker/linux/build.sh @@ -108,7 +108,8 @@ elif [ "$BUILD_STAGE" = "expanded-image" ]; then TARGET_IMAGE_NAME="$EXPANDED_IMAGE_NAME" else TARGET_IMAGE_NAME="clice-dev_container-debug_build-$BUILD_STAGE" - echo "๐Ÿ”ง Debug Building Intermediate Stage: $BUILD_STAGE" >&2; usage; + BUILD_COMMAND_DEBUG_EXTRA="--on always" + echo "๐Ÿ”ง Debug Building Intermediate Stage: $BUILD_STAGE" >&2; fi # ======================================================================== @@ -183,7 +184,7 @@ if [ "$DEBUG" = "true" ]; then echo "๐Ÿ› Debug mode enabled (BUILDX_EXPERIMENTAL=1)" export BUILDX_EXPERIMENTAL=1 - BUILD_COMMAND="docker buildx debug --invoke /bin/bash build" + BUILD_COMMAND="docker buildx debug --invoke /bin/bash ${BUILD_COMMAND_DEBUG_EXTRA} build" else BUILD_COMMAND="docker buildx build" fi diff --git a/docker/linux/run.sh b/docker/linux/run.sh index a1fb3ca3..d1a22242 100644 --- a/docker/linux/run.sh +++ b/docker/linux/run.sh @@ -175,12 +175,44 @@ if ! docker image inspect "${EXPANDED_IMAGE_NAME}" >/dev/null 2>&1; then # Run packed image container and execute its internal build.sh for expansion # To keep the expansion process consistent and reliable, we use the build.sh script from the container itself. + # + # Since newer Docker versions don't include CLI without installation, we can't use docker buildx directly. + # Instead, we use chroot approach: + # 1. Mount host root directory to a temp folder inside container + # 2. Copy /clice to host temp directory + # 3. chroot into host root and execute build.sh + # # Mounts: - # โ€ข /var/run/docker.sock - Allow container to build images on host Docker daemon + # โ€ข / (host root) - Mount to temp directory for chroot access + + # Create temp directory on host for chroot + HOST_TEMP_DIR=$(mktemp -d -p /tmp clice-expand.XXXXXX) + + echo "๐Ÿ“ Created host temp directory: ${HOST_TEMP_DIR}" + echo "๐Ÿ”„ Preparing chroot environment..." + if docker run --rm \ - -v /var/run/docker.sock:/var/run/docker.sock \ + -v "/:/host-root" \ + -e "HOST_TEMP_DIR=${HOST_TEMP_DIR}" \ + -e "COMPILER=${COMPILER}" \ + -e "VERSION=${VERSION}" \ "${PACKED_IMAGE_NAME}" \ - /bin/bash -c "cd ${CLICE_DIR} && ./docker/linux/build.sh --stage expanded-image --compiler ${COMPILER} --version ${VERSION}"; then + /bin/bash -c ' + set -e + echo "๐Ÿ“ฆ Copying /clice to host temp directory..." + cp -r /clice "/host-root${HOST_TEMP_DIR}/" + + echo "๐Ÿ”ง Executing build.sh via chroot..." + chroot /host-root /bin/bash -c " + cd ${HOST_TEMP_DIR}/clice && \ + ./docker/linux/build.sh --stage expanded-image --compiler ${COMPILER} --version ${VERSION} --debug + " + + echo "๐Ÿงน Cleaning up temp directory..." + rm -rf "/host-root${HOST_TEMP_DIR}" + '; then + # Clean up host temp directory (in case container cleanup failed) + rm -rf "${HOST_TEMP_DIR}" 2>/dev/null || true echo "=========================================================================" echo "โœ… EXPANSION COMPLETED SUCCESSFULLY" echo "=========================================================================" diff --git a/docker/linux/test_build.sh b/docker/linux/test_build.sh deleted file mode 100644 index 78d17180..00000000 --- a/docker/linux/test_build.sh +++ /dev/null @@ -1,301 +0,0 @@ -#!/bin/bash -# ======================================================================== -# ๐Ÿงช Clice Build Test Script -# ======================================================================== -# File: docker/linux/test_build.sh -# Purpose: Test building Clice with different configurations -# -# This script tests the complete Clice build process with four different -# configurations to ensure the development environment is working correctly. -# It also runs xmake tests to validate the built binaries. -# ======================================================================== - -set -e - -# ======================================================================== -# ๐Ÿ”ง Environment Setup -# ======================================================================== - -# SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -CONTAINER_NAME="" -COMPILER="clang" -PARALLEL_JOBS=$(nproc 2>/dev/null || echo "4") -BUILD_DIR_BASE="/tmp/clice-test-builds" - -# ======================================================================== -# โš™๏ธ Configuration Matrix -# ======================================================================== - -# Define the four build configurations to test -declare -A BUILD_CONFIGS=( - ["debug"]="--mode=debug" - ["release"]="--mode=release" - ["debug-optimized"]="--mode=debug --release=y" - ["release-optimized"]="--mode=release --release=y" -) - -# ======================================================================== -# ๐Ÿ“š Usage Information -# ======================================================================== - -usage() { -cat < Use specific container name - --compiler Target compiler (default: ${COMPILER}) - --jobs Number of parallel jobs (default: ${PARALLEL_JOBS}) - --config Test specific configuration only - --help, -h Show this help message - -CONFIGURATIONS: - debug Debug build (--mode=debug) - release Release build (--mode=release) - debug-optimized Debug with optimizations (--mode=debug --release=y) - release-optimized Release with optimizations (--mode=release --release=y) - -EXAMPLES: - $0 Test all configurations - $0 --config debug Test debug configuration only - $0 --compiler gcc --jobs 8 Use GCC with 8 parallel jobs - -This script will: - 1. Set up separate build directories for each configuration - 2. Build Clice with each configuration - 3. Run xmake tests for each build - 4. Report results and timing information -EOF -} - -# ======================================================================== -# ๐Ÿ” Command Line Parsing -# ======================================================================== - -SPECIFIC_CONFIG="" - -while [ "$#" -gt 0 ]; do - case "$1" in - --container) - CONTAINER_NAME="$2"; shift 2;; - --compiler) - COMPILER="$2"; shift 2;; - --jobs) - PARALLEL_JOBS="$2"; shift 2;; - --config) - SPECIFIC_CONFIG="$2"; shift 2;; - -h|--help) - usage; exit 0;; - *) - echo "โŒ Unknown parameter: $1" >&2; usage; exit 1;; - esac -done - -# Auto-detect container if not specified -if [ -z "$CONTAINER_NAME" ]; then - CONTAINER_NAME="clice-dev-linux-${COMPILER}" -fi - -# ======================================================================== -# ๐Ÿงช Test Functions -# ======================================================================== - -run_in_container() { - local cmd="$1" - echo "๐Ÿƒ Running in container: $cmd" - docker exec -w "/clice" "$CONTAINER_NAME" bash -c "$cmd" -} - -test_build_configuration() { - local config_name="$1" - local build_args="$2" - local build_dir="$BUILD_DIR_BASE/$config_name" - - echo "" - echo "=========================================================================" - echo "๐Ÿ”จ Testing Configuration: $config_name" - echo "=========================================================================" - echo "๐Ÿ“ Build Directory: $build_dir" - echo "โš™๏ธ Build Arguments: $build_args" - echo "๐Ÿ”ง Compiler: $COMPILER" - echo "โšก Parallel Jobs: $PARALLEL_JOBS" - echo "=========================================================================" - - local start_time - start_time=$(date +%s) - - # Create build directory - run_in_container "mkdir -p $build_dir" - - # Configure build - echo "๐Ÿ”ง Configuring build..." - if ! run_in_container "cd $build_dir && xmake config $build_args --jobs=$PARALLEL_JOBS"; then - echo "โŒ Configuration failed for $config_name" - return 1 - fi - - # Build project - echo "๐Ÿ—๏ธ Building project..." - if ! run_in_container "cd $build_dir && xmake build --jobs=$PARALLEL_JOBS"; then - echo "โŒ Build failed for $config_name" - return 1 - fi - - # Run tests - echo "๐Ÿงช Running tests..." - if ! run_in_container "cd $build_dir && xmake test"; then - echo "โš ๏ธ Tests failed for $config_name (build succeeded)" - # Don't return error for test failures, just note them - fi - - local end_time - end_time=$(date +%s) - local duration=$((end_time - start_time)) - - echo "โœ… Configuration $config_name completed in ${duration}s" - - # Store build info - run_in_container "cd $build_dir && echo 'Build completed at: $(date)' > build_info.txt" - run_in_container "cd $build_dir && echo 'Build duration: ${duration}s' >> build_info.txt" - run_in_container "cd $build_dir && echo 'Configuration: $config_name' >> build_info.txt" - run_in_container "cd $build_dir && echo 'Build args: $build_args' >> build_info.txt" - - return 0 -} - -check_prerequisites() { - echo "๐Ÿ” Checking prerequisites..." - - # Check if container exists and is running - if ! docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then - echo "โŒ Container $CONTAINER_NAME is not running" - echo "๐Ÿ’ก Start it with: ./docker/linux/run.sh --compiler $COMPILER" - exit 1 - fi - - # Check if xmake is available in container - if ! run_in_container "command -v xmake >/dev/null"; then - echo "โŒ xmake is not available in container $CONTAINER_NAME" - echo "๐Ÿ’ก Make sure the container setup completed successfully" - exit 1 - fi - - # Check if we're in the clice project directory - if ! run_in_container "test -f xmake.lua"; then - echo "โŒ xmake.lua not found in container /clice directory" - echo "๐Ÿ’ก Make sure the project is properly mounted in the container" - exit 1 - fi - - echo "โœ… Prerequisites check passed" -} - -generate_report() { - echo "" - echo "=========================================================================" - echo "๐Ÿ“Š BUILD TEST REPORT" - echo "=========================================================================" - - local total_configs=0 - local successful_configs=0 - local failed_configs=() - - # Count and report results - for config_name in "${!BUILD_CONFIGS[@]}"; do - if [ -n "$SPECIFIC_CONFIG" ] && [ "$config_name" != "$SPECIFIC_CONFIG" ]; then - continue - fi - - total_configs=$((total_configs + 1)) - - local build_dir="$BUILD_DIR_BASE/$config_name" - if run_in_container "test -f $build_dir/build_info.txt" 2>/dev/null; then - successful_configs=$((successful_configs + 1)) - echo "โœ… $config_name: SUCCESS" - run_in_container "cat $build_dir/build_info.txt" | sed 's/^/ /' - else - failed_configs+=("$config_name") - echo "โŒ $config_name: FAILED" - fi - echo "" - done - - # Summary - echo "=========================================================================" - echo "๐Ÿ“ˆ SUMMARY" - echo "=========================================================================" - echo "๐ŸŽฏ Total Configurations: $total_configs" - echo "โœ… Successful: $successful_configs" - echo "โŒ Failed: ${#failed_configs[@]}" - - if [ ${#failed_configs[@]} -gt 0 ]; then - echo "" - echo "Failed configurations:" - for config in "${failed_configs[@]}"; do - echo " โ€ข $config" - done - fi - - echo "" - echo "๐Ÿ“ Build artifacts location: $BUILD_DIR_BASE" - echo "๐Ÿณ Container: $CONTAINER_NAME" - echo "๐Ÿ”ง Compiler: $COMPILER" - echo "=========================================================================" - - # Return exit code based on results - if [ ${#failed_configs[@]} -eq 0 ]; then - return 0 - else - return 1 - fi -} - -# ======================================================================== -# ๐Ÿš€ Main Execution -# ======================================================================== - -main() { - echo "=========================================================================" - echo "๐Ÿงช CLICE BUILD TEST RUNNER" - echo "=========================================================================" - echo "๐Ÿณ Container: $CONTAINER_NAME" - echo "๐Ÿ”ง Compiler: $COMPILER" - echo "โšก Parallel Jobs: $PARALLEL_JOBS" - if [ -n "$SPECIFIC_CONFIG" ]; then - echo "๐ŸŽฏ Testing Configuration: $SPECIFIC_CONFIG" - else - echo "๐ŸŽฏ Testing All Configurations: ${!BUILD_CONFIGS[*]}" - fi - echo "=========================================================================" - - # Check prerequisites - check_prerequisites - - # Clean up previous build directories - echo "๐Ÿงน Cleaning up previous build directories..." - run_in_container "rm -rf $BUILD_DIR_BASE" - - # Test configurations - for config_name in "${!BUILD_CONFIGS[@]}"; do - # Skip if testing specific configuration - if [ -n "$SPECIFIC_CONFIG" ] && [ "$config_name" != "$SPECIFIC_CONFIG" ]; then - continue - fi - - test_build_configuration "$config_name" "${BUILD_CONFIGS[$config_name]}" || true - done - - # Generate final report - if generate_report; then - echo "๐ŸŽ‰ All build tests completed successfully!" - exit 0 - else - echo "๐Ÿ’ฅ Some build tests failed!" - exit 1 - fi -} - -# Run main function -main "$@" \ No newline at end of file diff --git a/docker/linux/utility/build_clice_compiler_toolchain.py b/docker/linux/utility/build_clice_compiler_toolchain.py index 73eb55ab..c730c356 100644 --- a/docker/linux/utility/build_clice_compiler_toolchain.py +++ b/docker/linux/utility/build_clice_compiler_toolchain.py @@ -12,7 +12,7 @@ if project_root not in sys.path: sys.path.insert(0, project_root) -from typing import Dict, Set +from typing import Dict, List, Set from build_utils import ( Job, @@ -22,25 +22,23 @@ install_extract_prerequisites, download_and_verify, extract_source, + extract_package, ) -from config.build_config import ( - TOOLCHAIN_BUILD_ENV_VARS, +from config.docker_build_stages.common import TOOLCHAIN_BUILD_ENV_VARS, COMPILER +from config.docker_build_stages.toolchain_config import ( TOOLCHAIN, ToolchainComponent, GccSubComponent, LinuxSubComponent, - GlibcSubComponent + GlibcSubComponent, + ZigSubComponent ) # ======================================================================== # ๐Ÿ“ฆ Environment Setup Tasks # ======================================================================== -def update_apt() -> None: - print("๐Ÿ”„ [SETUP] Refreshing APT package database...") - run_command("apt update -o DPkg::Lock::Timeout=-1") - def install_build_prerequisites(component: ToolchainComponent) -> None: """ Note: We maintain multiple GCC versions because glibc requires @@ -49,9 +47,8 @@ def install_build_prerequisites(component: ToolchainComponent) -> None: """ # Collect all build prerequisites from sub-components all_prerequisites = set() - if hasattr(component, 'sub_components'): - for sub_component in component.sub_components: - all_prerequisites.update(sub_component.build_prerequisites) + for sub_component in component.sub_components: + all_prerequisites.update(sub_component.build_prerequisites) if not all_prerequisites: print(f"โ„น๏ธ [SETUP] No build prerequisites for {component.name}") @@ -247,6 +244,18 @@ def build_and_install_libstdcpp(component: GccSubComponent) -> None: run_command(f"make -j {install_targets}", cwd=component.build_dir, env=compiler_env) print(f"โœ… [COMPLETE] C++ standard library build finished") +# ======================================================================== +# โšก Zig Compiler Tasks +# ======================================================================== + +def extract_zig(component: ZigSubComponent) -> None: + archive_path = os.path.join(component.cache_dir, component.tarball_name) + extract_package( + archive_path=archive_path, + target_dir=component.package_dir, + strip_top_level=True + ) + # ======================================================================== # ๐ŸŽญ Main Build Orchestrator # ======================================================================== @@ -257,78 +266,74 @@ def main() -> None: print("๐Ÿš€ ========================================================================") print(f"๐Ÿ“ Sysroot Directory: {TOOLCHAIN.sysroot_dir}") print(f"๐ŸŽฏ Target Architecture: {TOOLCHAIN.target_triplet} ({TOOLCHAIN.target_machine})") - print(f"๐Ÿ“‹ Components: glibc, Linux headers, libstdc++, LLVM (prepared)") + print(f"๐Ÿ”ง Selected Compiler: {COMPILER}") print("๐Ÿš€ ========================================================================\n") - all_jobs: Dict[str, Job] = { - # ๐Ÿ“ฆ System Setup Tasks - "update_apt": Job("update_apt", update_apt), - "install_download_prerequisites": Job("install_download_prerequisites", install_download_prerequisites, (TOOLCHAIN,)), - "install_extract_prerequisites": Job("install_extract_prerequisites", install_extract_prerequisites, (TOOLCHAIN,)), - "install_build_prerequisites": Job("install_build_prerequisites", install_build_prerequisites, (TOOLCHAIN,)), - - # ๐Ÿ“š GNU C Library (glibc) Pipeline - "download_glibc": Job("download_glibc", download_and_verify, (TOOLCHAIN.glibc,)), - "extract_glibc": Job("extract_glibc", extract_source, (TOOLCHAIN.glibc,)), - "build_and_install_glibc": Job("build_and_install_glibc", build_and_install_glibc, (TOOLCHAIN.glibc, TOOLCHAIN.linux)), - - # ๐Ÿง Linux Kernel Headers Pipeline - "download_linux": Job("download_linux", download_and_verify, (TOOLCHAIN.linux,)), - "extract_linux": Job("extract_linux", extract_source, (TOOLCHAIN.linux,)), - "install_linux_headers": Job("install_linux_headers", install_linux_headers, (TOOLCHAIN.linux,)), - - # ๐Ÿ› ๏ธ GCC C++ Standard Library Pipeline - "download_gcc": Job("download_gcc", download_and_verify, (TOOLCHAIN.gcc,)), - "extract_gcc": Job("extract_gcc", extract_source, (TOOLCHAIN.gcc,)), - "download_gcc_prerequisites": Job("download_gcc_prerequisites", download_gcc_prerequisites, (TOOLCHAIN.gcc,)), - "build_and_install_libstdcpp": Job("build_and_install_libstdcpp", build_and_install_libstdcpp, (TOOLCHAIN.gcc,)), - - # โšก LLVM Project Pipeline (prepared for future builds) - "download_llvm": Job("download_llvm", download_and_verify, (TOOLCHAIN.llvm,)), - "extract_llvm": Job("extract_llvm", extract_source, (TOOLCHAIN.llvm,)), - } + # Define all jobs with dependencies + install_download_prereq_job = Job("install_download_prerequisites", install_download_prerequisites, (TOOLCHAIN,)) + install_extract_prereq_job = Job("install_extract_prerequisites", install_extract_prerequisites, (TOOLCHAIN,)) + install_build_prereq_job = Job("install_build_prerequisites", install_build_prerequisites, (TOOLCHAIN,)) + + all_jobs = [ + install_download_prereq_job, + install_extract_prereq_job, + install_build_prereq_job, + ] - dependency_graph: Dict[str, Set[str]] = { - "update_apt": set(), - "install_download_prerequisites": {"update_apt"}, - "install_extract_prerequisites": {"update_apt"}, - "install_build_prerequisites": {"update_apt"}, + extend_jobs: List[Job] = [] + + # Conditional: Build gcc/clang toolchain (glibc + libstdc++) OR download zig + match COMPILER: + case "clang": + # Glibc Pipeline + download_glibc_job = Job("download_glibc", download_and_verify, (TOOLCHAIN.glibc,), [install_download_prereq_job]) + extract_glibc_job = Job("extract_glibc", extract_source, (TOOLCHAIN.glibc,), [download_glibc_job, install_extract_prereq_job]) - # ๐Ÿ“š glibc Build Pipeline - "download_glibc": {"install_download_prerequisites"}, - "extract_glibc": {"download_glibc", "install_extract_prerequisites"}, - "build_and_install_glibc": { - "extract_glibc", - "install_build_prerequisites", - "install_linux_headers" # glibc requires Linux headers for compilation - }, + # Linux Headers Pipeline + download_linux_job = Job("download_linux", download_and_verify, (TOOLCHAIN.linux,), [install_download_prereq_job]) + extract_linux_job = Job("extract_linux", extract_source, (TOOLCHAIN.linux,), [download_linux_job, install_extract_prereq_job]) + install_linux_headers_job = Job("install_linux_headers", install_linux_headers, (TOOLCHAIN.linux,), [extract_linux_job, install_build_prereq_job]) + + # Glibc build depends on Linux headers + build_glibc_job = Job("build_and_install_glibc", build_and_install_glibc, (TOOLCHAIN.glibc, TOOLCHAIN.linux), + [extract_glibc_job, install_build_prereq_job, install_linux_headers_job]) + + # GCC Pipeline + download_gcc_job = Job("download_gcc", download_and_verify, (TOOLCHAIN.gcc,), [install_download_prereq_job]) + extract_gcc_job = Job("extract_gcc", extract_source, (TOOLCHAIN.gcc,), [download_gcc_job, install_extract_prereq_job]) + download_gcc_prereq_job = Job("download_gcc_prerequisites", download_gcc_prerequisites, (TOOLCHAIN.gcc,), [extract_gcc_job]) + build_libstdcpp_job = Job("build_and_install_libstdcpp", build_and_install_libstdcpp, (TOOLCHAIN.gcc,), + [download_gcc_prereq_job, build_glibc_job, install_linux_headers_job, install_build_prereq_job]) - # ๐Ÿง Linux Headers Pipeline (must complete before glibc build) - "download_linux": {"install_download_prerequisites"}, - "extract_linux": {"download_linux", "install_extract_prerequisites"}, - "install_linux_headers": {"extract_linux", "install_build_prerequisites"}, - - # ๐Ÿ› ๏ธ GCC Pipeline (requires glibc and kernel headers) - "download_gcc": {"install_download_prerequisites"}, - "extract_gcc": {"download_gcc", "install_extract_prerequisites"}, - "download_gcc_prerequisites": {"extract_gcc"}, - "build_and_install_libstdcpp": { - "download_gcc_prerequisites", # GCC math libraries ready - "build_and_install_glibc", # System library available - "install_linux_headers", # Kernel interfaces available - "install_build_prerequisites" # Build tools ready - }, - - # โšก LLVM Pipeline (prepared for future expansion) - "download_llvm": {"install_download_prerequisites"}, - "extract_llvm": {"download_llvm", "install_extract_prerequisites"} - } + extend_jobs = [ + download_glibc_job, + extract_glibc_job, + download_linux_job, + extract_linux_job, + install_linux_headers_job, + build_glibc_job, + download_gcc_job, + extract_gcc_job, + download_gcc_prereq_job, + build_libstdcpp_job, + ] + case "zig": + # Zig Pipeline: download, verify, and extract using standard component functions + download_zig_job = Job("download_zig", download_and_verify, (TOOLCHAIN.zig,), [install_download_prereq_job]) + extract_zig_job = Job("extract_zig", extract_zig, (TOOLCHAIN.zig,), [download_zig_job, install_extract_prereq_job]) + extend_jobs = [download_zig_job, extract_zig_job] + case _: + raise ValueError(f"Unsupported compiler: {COMPILER}") + + all_jobs.extend(extend_jobs) print(f"๐Ÿ“Š Initializing parallel scheduler with {len(all_jobs)} tasks...") - print(f"๐Ÿ”— Total dependencies: {sum(len(deps) for deps in dependency_graph.values())}") - print(f"โšก Maximum parallelism: {len([job for job, deps in dependency_graph.items() if not deps])} initial tasks\n") + total_deps = sum(len(job.dependencies) for job in all_jobs) + print(f"๐Ÿ”— Total dependency edges: {total_deps}") + independent_jobs = [job.name for job in all_jobs if not job.dependencies] + print(f"โšก Maximum parallelism: {len(independent_jobs)} initial tasks: {independent_jobs}\n") - scheduler = ParallelTaskScheduler(all_jobs, dependency_graph) + scheduler = ParallelTaskScheduler(all_jobs) scheduler.run() print("\n๐ŸŽ‰ ========================================================================") diff --git a/docker/linux/utility/build_utils.py b/docker/linux/utility/build_utils.py index 10bf9587..2b4e7ba7 100644 --- a/docker/linux/utility/build_utils.py +++ b/docker/linux/utility/build_utils.py @@ -2,19 +2,18 @@ import sys import os import tarfile +import subprocess +import hashlib +import concurrent.futures +import time -from config.build_config import Component, ToolchainSubComponent - -# Add project root to the Python path to allow importing 'config' module project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) if project_root not in sys.path: sys.path.insert(0, project_root) -import subprocess -import hashlib -import concurrent.futures +from config.docker_build_stages.common import Component, ToolchainSubComponent import time -from typing import Dict, Set, Tuple, Optional, List, Callable, Union +from typing import Dict, Set, Tuple, Optional, List, Callable from graphlib import TopologicalSorter from collections import defaultdict from enum import Enum @@ -72,19 +71,67 @@ def run_command(command: str, cwd: str = os.getcwd(), env: Dict[str, str] = {}) if process.returncode != 0: raise subprocess.CalledProcessError(process.returncode, command) -def verify_signature(signature_path: str, data_path: str) -> None: - """Check if data file and signature file exist (actual GPG verification skipped).""" - print(f"--- Skipping signature verification for {os.path.basename(data_path)} (GPG not available) ---", flush=True) - +def verify_signature(signature_path: str, data_path: str, public_key: Optional[str] = None) -> bool: if not os.path.exists(data_path): raise RuntimeError(f"Data file {data_path} does not exist") - if os.path.exists(signature_path): - print(f"Signature file found: {os.path.basename(signature_path)}", flush=True) - else: - print(f"No signature file found: {os.path.basename(signature_path)}", flush=True) + if not os.path.exists(signature_path): + print(f"No signature file found: {os.path.basename(signature_path)}, skipping verification", flush=True) + return True - print(f"File verification completed for {os.path.basename(data_path)}", flush=True) + # Detect signature type by extension and use match statement + match os.path.splitext(signature_path)[1]: + case '.minisig': + # Minisign verification using pyminisign + if not public_key: + print(f"Warning: Minisign public key not provided for {os.path.basename(signature_path)}, skipping verification", flush=True) + return True + + print(f"Verifying minisign signature for {os.path.basename(data_path)}...", flush=True) + + try: + import minisign + + # Verify using py-minisign + # verify() raises an exception if verification fails + minisign.verify(signature_path, data_path, public_key) + print("Minisign verification successful.", flush=True) + return True + + except Exception as e: + print(f"Minisign verification failed: {e}", flush=True) + return False + + case '.sig': + # GPG verification using python-gnupg + print(f"Verifying GPG signature for {os.path.basename(data_path)}...", flush=True) + + try: + import gnupg + + gpg = gnupg.GPG() + + # Verify the signature + with open(signature_path, 'rb') as sig_file: + verified = gpg.verify_file(sig_file, data_path) + + if verified.valid: + print("GPG signature verification successful.", flush=True) + return True + else: + print(f"GPG signature verification failed: {verified.status}", flush=True) + return False + + except ImportError: + print("python-gnupg library not found. Skipping signature verification.", flush=True) + return True + except Exception as e: + print(f"Error during GPG signature verification: {e}, skipping", flush=True) + return True + + case _: + print(f"Unknown signature type for {os.path.basename(signature_path)}, skipping verification", flush=True) + return True def verify_sha256(file_path: str, expected_hash: str) -> bool: print(f"Verifying SHA256 for {file_path}...", flush=True) @@ -106,13 +153,21 @@ def verify_sha256(file_path: str, expected_hash: str) -> bool: class Job: """Represents a single unit of work in the build process.""" - def __init__(self, name: str, func: Callable, args: Tuple = ()): - self.name = name + + def __init__(self, name: str, func: Callable, args: Tuple = (), dependencies: Optional[List['Job']] = None) -> None: + self.name = name # Name is only for debugging, not guaranteed unique self.func = func self.args = args + self.dependencies: List['Job'] = dependencies or [] def __repr__(self) -> str: - return f"Job(name='{self.name}')" + return f"Job(name='{self.name}', deps={[d.name for d in self.dependencies]})" + + def __hash__(self) -> int: + return id(self) # Use Python's built-in object id + + def __eq__(self, other) -> bool: + return self is other # Identity comparison class TaskState(Enum): @@ -124,12 +179,12 @@ class TaskState(Enum): FAILED = "failed" -def run_job(job: Job) -> str: +def run_job(job: Job) -> Job: """Executor function to run a job.""" print(f"--- Starting Job: {job.name} ---", flush=True) job.func(*job.args) print(f"--- Finished Job: {job.name} ---", flush=True) - return job.name + return job # Return the Job object itself class ParallelTaskScheduler: @@ -144,66 +199,82 @@ class ParallelTaskScheduler: - Efficient resource utilization """ - def __init__(self, jobs: Dict[str, Job], dependencies: Dict[str, Set[str]]): - self.jobs = jobs - self.dependencies = dependencies - self.task_states = {name: TaskState.PENDING for name in jobs} - self.running_futures = {} # future -> job_name mapping - self.completed_jobs = set() - self.failed_jobs = set() + def __init__(self, jobs: List[Job]): + # Store all jobs as a set for fast lookups + self.all_jobs: Set[Job] = set(jobs) + + # Build Job-based dependency graph + self.dependencies: Dict[Job, Set[Job]] = { + job: set(job.dependencies) + for job in jobs + } + + # Task states use Job objects as keys + self.task_states: Dict[Job, TaskState] = {job: TaskState.PENDING for job in jobs} + self.running_futures: Dict[concurrent.futures.Future, Job] = {} # future -> Job mapping + self.completed_jobs: Set[Job] = set() + self.failed_jobs: Set[Job] = set() - # Performance tracking - self.start_time = None - self.job_start_times = {} - self.job_durations = {} + # Performance tracking uses Job objects as keys + self.start_time: Optional[float] = None + self.job_start_times: Dict[Job, float] = {} + self.job_durations: Dict[Job, float] = {} - # Initialize dependency sorter - self.sorter = TopologicalSorter(dependencies) + # Build string-based graph ONLY for TopologicalSorter (library requirement) + # Map job ID to Job object for reverse lookup + self.id_to_job: Dict[int, Job] = {id(job): job for job in jobs} + string_deps: Dict[int, Set[int]] = { + id(job): {id(dep) for dep in job.dependencies} + for job in jobs + } + + # Initialize dependency sorter with IDs + self.sorter = TopologicalSorter(string_deps) self.sorter.prepare() - # Reverse dependency mapping for efficient lookups - self.dependents = defaultdict(set) - for job, deps in dependencies.items(): + # Reverse dependency mapping: which jobs depend on this job + self.dependents: Dict[Job, Set[Job]] = defaultdict(set) + for job, deps in self.dependencies.items(): for dep in deps: self.dependents[dep].add(job) - def _get_ready_jobs(self) -> List[str]: + def _get_ready_jobs(self) -> List[Job]: """Get all jobs that are ready to run (dependencies satisfied).""" - ready_jobs = [] - for job_name in self.sorter.get_ready(): - if self.task_states[job_name] == TaskState.PENDING: - ready_jobs.append(job_name) + ready_jobs: List[Job] = [] + for job_id in self.sorter.get_ready(): + job = self.id_to_job[job_id] + if self.task_states[job] == TaskState.PENDING: + ready_jobs.append(job) return ready_jobs - def _submit_job(self, executor: concurrent.futures.Executor, job_name: str) -> concurrent.futures.Future: + def _submit_job(self, executor: concurrent.futures.Executor, job: Job) -> concurrent.futures.Future: """Submit a job for execution.""" - job = self.jobs[job_name] - self.task_states[job_name] = TaskState.RUNNING - self.job_start_times[job_name] = time.time() + self.task_states[job] = TaskState.RUNNING + self.job_start_times[job] = time.time() - print(f"๐Ÿš€ [Scheduler] Starting job: {job_name}", flush=True) + print(f"๐Ÿš€ [Scheduler] Starting job: {job.name}", flush=True) future = executor.submit(run_job, job) - self.running_futures[future] = job_name + self.running_futures[future] = job return future - def _handle_completed_job(self, job_name: str, success: bool = True) -> None: + def _handle_completed_job(self, job: Job, success: bool = True) -> None: """Handle job completion and update states.""" - duration = time.time() - self.job_start_times[job_name] - self.job_durations[job_name] = duration + duration = time.time() - self.job_start_times[job] + self.job_durations[job] = duration if success: - self.task_states[job_name] = TaskState.COMPLETED - self.completed_jobs.add(job_name) - self.sorter.done(job_name) - print(f"โœ… [Scheduler] Job '{job_name}' completed successfully in {duration:.2f}s", flush=True) + self.task_states[job] = TaskState.COMPLETED + self.completed_jobs.add(job) + self.sorter.done(id(job)) # Tell sorter using object ID + print(f"โœ… [Scheduler] Job '{job.name}' completed successfully in {duration:.2f}s", flush=True) else: - self.task_states[job_name] = TaskState.FAILED - self.failed_jobs.add(job_name) - print(f"โŒ [Scheduler] Job '{job_name}' failed after {duration:.2f}s", flush=True) + self.task_states[job] = TaskState.FAILED + self.failed_jobs.add(job) + print(f"โŒ [Scheduler] Job '{job.name}' failed after {duration:.2f}s", flush=True) def _print_progress(self) -> None: """Print current execution progress.""" - total = len(self.jobs) + total = len(self.all_jobs) completed = len(self.completed_jobs) running = len(self.running_futures) failed = len(self.failed_jobs) @@ -211,8 +282,9 @@ def _print_progress(self) -> None: elapsed = time.time() - self.start_time if self.start_time else 0 + running_job_names = [job.name for job in self.running_futures.values()] print(f"\n๐Ÿ“Š [Progress] Total: {total} | โœ… Done: {completed} | ๐Ÿƒ Running: {running} | โณ Pending: {pending} | โŒ Failed: {failed}", flush=True) - print(f"โฑ๏ธ [Time] Elapsed: {elapsed:.1f}s | Running jobs: {list(self.running_futures.values())}", flush=True) + print(f"โฑ๏ธ [Time] Elapsed: {elapsed:.1f}s | Running jobs: {running_job_names}", flush=True) if completed > 0 and elapsed > 0: rate = completed / elapsed @@ -227,18 +299,24 @@ def run(self, max_workers: Optional[int] = None) -> None: max_workers: Maximum number of parallel workers (default: CPU count) """ print("๐ŸŽฏ [Scheduler] Initializing High-Performance Parallel Task Scheduler", flush=True) - print(f"๐Ÿ“‹ [Scheduler] Total jobs: {len(self.jobs)}", flush=True) - print(f"๐Ÿ”— [Scheduler] Total dependencies: {sum(len(deps) for deps in self.dependencies.values())}", flush=True) + print(f"๐Ÿ“‹ [Scheduler] Total jobs: {len(self.all_jobs)}", flush=True) + total_deps = sum(len(deps) for deps in self.dependencies.values()) + print(f"๐Ÿ”— [Scheduler] Total dependency edges: {total_deps}", flush=True) + if total_deps > 0: + # Print dependency graph with names for debugging + debug_graph = {job.name: [dep.name for dep in deps] for job, deps in self.dependencies.items() if deps} + print(f" Dependency graph: {debug_graph}", flush=True) self.start_time = time.time() with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor: # Submit initial ready jobs ready_jobs = self._get_ready_jobs() - print(f"๐Ÿšฆ [Scheduler] Initial ready jobs: {ready_jobs}", flush=True) + ready_job_names = [job.name for job in ready_jobs] + print(f"๐Ÿšฆ [Scheduler] Initial ready jobs: {ready_job_names}", flush=True) - for job_name in ready_jobs: - self._submit_job(executor, job_name) + for job in ready_jobs: + self._submit_job(executor, job) # Main execution loop while self.running_futures: @@ -251,28 +329,28 @@ def run(self, max_workers: Optional[int] = None) -> None: ) # Process all completed jobs in this batch - newly_completed = [] + newly_completed: List[Job] = [] for future in done_futures: - job_name = self.running_futures[future] + job = self.running_futures[future] try: result = future.result() # This will raise exception if job failed - self._handle_completed_job(job_name, success=True) - newly_completed.append(job_name) + self._handle_completed_job(job, success=True) + newly_completed.append(job) except Exception as e: - print(f"๐Ÿ’ฅ [Scheduler] Job '{job_name}' failed with detailed error:", flush=True) - self._handle_completed_job(job_name, success=False) + print(f"๐Ÿ’ฅ [Scheduler] Job '{job.name}' failed with detailed error:", flush=True) + self._handle_completed_job(job, success=False) # Implement fail-fast: cancel all running jobs and exit immediately - print(f"๐Ÿ›‘ [Scheduler] FAIL-FAST: Cancelling all remaining jobs due to failure in '{job_name}'", flush=True) + print(f"๐Ÿ›‘ [Scheduler] FAIL-FAST: Cancelling all remaining jobs due to failure in '{job.name}'", flush=True) for remaining_future in self.running_futures.keys(): if remaining_future != future: remaining_future.cancel() remaining_job = self.running_futures[remaining_future] - print(f"โŒ [Scheduler] Cancelled job: {remaining_job}", flush=True) + print(f"โŒ [Scheduler] Cancelled job: {remaining_job.name}", flush=True) # Clean up and raise the error immediately - raise RuntimeError(f"โŒ Build failed in job '{job_name}': {str(e)}") from e + raise RuntimeError(f"โŒ Build failed in job '{job.name}': {str(e)}") from e # Clean up completed future del self.running_futures[future] @@ -280,9 +358,9 @@ def run(self, max_workers: Optional[int] = None) -> None: # Submit any newly ready jobs if newly_completed: ready_jobs = self._get_ready_jobs() - for job_name in ready_jobs: - if job_name not in self.running_futures.values(): - self._submit_job(executor, job_name) + for job in ready_jobs: + if job not in self.running_futures.values(): + self._submit_job(executor, job) # Final results total_time = time.time() - self.start_time @@ -298,12 +376,12 @@ def _print_final_report(self, total_time: float) -> None: print("="*60, flush=True) print(f"โฑ๏ธ Total execution time: {total_time:.2f}s", flush=True) - print(f"โœ… Successfully completed: {len(self.completed_jobs)}/{len(self.jobs)} jobs", flush=True) + print(f"โœ… Successfully completed: {len(self.completed_jobs)}/{len(self.all_jobs)} jobs", flush=True) if self.failed_jobs: print(f"โŒ Failed jobs: {len(self.failed_jobs)}", flush=True) for job in self.failed_jobs: - print(f" - {job}", flush=True) + print(f" - {job.name}", flush=True) # Show job timing analysis if self.job_durations: @@ -311,7 +389,7 @@ def _print_final_report(self, total_time: float) -> None: sorted_jobs = sorted(self.job_durations.items(), key=lambda x: x[1], reverse=True) print(f" Slowest jobs:", flush=True) for job, duration in sorted_jobs[:5]: - print(f" - {job:<30} {duration:>8.2f}s", flush=True) + print(f" - {job.name:<30} {duration:>8.2f}s", flush=True) avg_duration = sum(self.job_durations.values()) / len(self.job_durations) print(f" Average job duration: {avg_duration:.2f}s", flush=True) @@ -361,15 +439,19 @@ def download_and_verify(component: Component) -> None: # Download main source archive download_file(tarball_url, tarball_path) - # Handle GPG signature verification when available + # Handle signature verification when available if component.verification_name_pattern: signature_name = component.verification_name signature_path = os.path.join(component.cache_dir, signature_name) signature_url = component.verification_url + + # Get public key if component has minisig_public_key attribute + public_key = getattr(component, 'minisig_public_key', None) + try: print(f"๐Ÿ” [VERIFY] Downloading signature for {component.name}...", flush=True) download_file(signature_url, signature_path) - verify_signature(signature_path, tarball_path) + verify_signature(signature_path, tarball_path, public_key) print(f"โœ… [VERIFY] {component.name} signature verified", flush=True) except Exception as e: print(f"โŒ [ERROR] Signature verification failed for {component.name}: {e}", file=sys.stderr, flush=True) @@ -378,40 +460,62 @@ def download_and_verify(component: Component) -> None: else: print(f"โš ๏ธ [INFO] No signature verification available for {component.name}", flush=True) -def extract_source(component: ToolchainSubComponent) -> None: - version = component.version - print(f"๐Ÿ“‚ [EXTRACT] Unpacking {component.name} v{version}...", flush=True) +def extract_package(archive_path: str, target_dir: str, strip_top_level: bool = True) -> None: + print(f"๐Ÿ“‚ [EXTRACT] Unpacking package...") + print(f" ๐Ÿ“ Source: {archive_path}") + print(f" ๐Ÿ“ Target: {target_dir}") - # Ensure extraction directory exists - os.makedirs(component.src_dir, exist_ok=True) - - # Determine archive location and format - tarball_path = os.path.join(component.cache_dir, component.tarball_name) - - print(f" ๐Ÿ“ Source: {tarball_path}", flush=True) - print(f" ๐Ÿ“ Target: {component.extracted_dir}", flush=True) + # Ensure target directory exists + os.makedirs(target_dir, exist_ok=True) - # Auto-detect compression format and extract directly - mode = "r:xz" if tarball_path.endswith(".tar.xz") else "r:gz" - with tarfile.open(tarball_path, mode) as tar: - tar.extractall(path=component.extracted_dir, filter='data') + # Auto-detect compression format + if archive_path.endswith(".tar.xz"): + mode = "r:xz" + elif archive_path.endswith(".tar.gz") or archive_path.endswith(".tgz"): + mode = "r:gz" + elif archive_path.endswith(".tar.bz2"): + mode = "r:bz2" + elif archive_path.endswith(".tar"): + mode = "r" + else: + raise ValueError(f"Unsupported archive format: {archive_path}") - # Check if we need to strip a top-level directory - extracted_items = os.listdir(component.extracted_dir) + # Extract to target directory + with tarfile.open(archive_path, mode) as tar: + tar.extractall(path=target_dir, filter='data') - if len(extracted_items) == 1 and os.path.isdir(os.path.join(component.extracted_dir, extracted_items[0])): - # Single top-level directory found - strip it - top_dir_name = extracted_items[0] - top_dir_path = os.path.join(component.extracted_dir, top_dir_name) - print(f" ๐Ÿ”„ Stripping top-level directory: {top_dir_name}", flush=True) + # Optionally strip single top-level directory + if strip_top_level: + extracted_items = os.listdir(target_dir) - # Move all contents from top_dir to parent (extracted_dir) - for item in os.listdir(top_dir_path): - src = os.path.join(top_dir_path, item) - dst = os.path.join(component.extracted_dir, item) - shutil.move(src, dst) - - # Remove the now-empty top-level directory - os.rmdir(top_dir_path) + if len(extracted_items) == 1 and os.path.isdir(os.path.join(target_dir, extracted_items[0])): + # Single top-level directory found - strip it + top_dir_name = extracted_items[0] + top_dir_path = os.path.join(target_dir, top_dir_name) + print(f" ๐Ÿ”„ Stripping top-level directory: {top_dir_name}") + + # Move all contents from top_dir to parent (target_dir) + for item in os.listdir(top_dir_path): + src = os.path.join(top_dir_path, item) + dst = os.path.join(target_dir, item) + shutil.move(src, dst) + + # Remove the now-empty top-level directory + os.rmdir(top_dir_path) - print(f"โœ… [EXTRACT] {component.name} extraction complete", flush=True) + print(f"โœ… [EXTRACT] package extraction complete") + +def extract_source(component: ToolchainSubComponent) -> None: + """ + Extract source code for a toolchain component. + Wrapper around extract_package for backward compatibility. + """ + version = component.version + os.makedirs(component.src_dir, exist_ok=True) + + archive_path = os.path.join(component.cache_dir, component.tarball_name) + extract_package( + archive_path=archive_path, + target_dir=component.extracted_dir, + strip_top_level=True, + ) diff --git a/docker/linux/utility/common.sh b/docker/linux/utility/common.sh index 869346fd..814a9f43 100644 --- a/docker/linux/utility/common.sh +++ b/docker/linux/utility/common.sh @@ -39,7 +39,7 @@ get_packed_image_name() { local version="$2" local image_tag image_tag=$(get_image_tag "$compiler" "$version") - echo "clice.io/clice:${image_tag}" + echo "clice.io/clice-dev:${image_tag}" } # Generates the full name for the expanded (development) image. diff --git a/docker/linux/container-entrypoint.sh b/docker/linux/utility/container-entrypoint.sh similarity index 93% rename from docker/linux/container-entrypoint.sh rename to docker/linux/utility/container-entrypoint.sh index 0dfd7dfe..66e26d8d 100644 --- a/docker/linux/container-entrypoint.sh +++ b/docker/linux/utility/container-entrypoint.sh @@ -1,7 +1,7 @@ # ======================================================================== # ๐Ÿš€ Clice Dev Container Shell Initialization # ======================================================================== -# File: docker/linux/container-entrypoint.sh +# File: docker/linux/utility/container-entrypoint.sh # Purpose: Bash initialization script for Clice dev container # # This script is sourced by .bashrc and performs: @@ -18,7 +18,7 @@ if [[ $- == *i* ]]; then echo "๐Ÿ“ฆ Running uv sync..." - if UV_CACHE_DIR="${UV_PACKAGE_CACHE_DIR}" uv sync --project "${CLICE_WORKDIR}/pyproject.toml"; then + if UV_CACHE_DIR="${UV_PACKAGE_CACHE_DIR}" uv sync --project "${CLICE_WORKDIR}/tests/pyproject.toml"; then echo "โœ… Python environment ready at ${CLICE_WORKDIR}/.venv" else echo "โš ๏ธ Failed to sync Python environment (pyproject.toml might not exist)" diff --git a/docker/linux/utility/create_release_package.py b/docker/linux/utility/create_release_package.py index 188f3766..cf68dce2 100644 --- a/docker/linux/utility/create_release_package.py +++ b/docker/linux/utility/create_release_package.py @@ -13,16 +13,19 @@ if project_root not in sys.path: sys.path.insert(0, project_root) -from config.build_config import ( - PACKED_RELEASE_PACKAGE_PATH, +from config.docker_build_stages.common import ( + COMPILER, RELEASE_PACKAGE_DIR, + PACKED_RELEASE_PACKAGE_PATH, CLICE_WORKDIR, DEVELOPMENT_SHELL_VARS, + TOOLCHAIN_VERSIONS, +) +from config.docker_build_stages.toolchain_config import TOOLCHAIN +from config.docker_build_stages.dependencies_config import UV +from config.docker_build_stages.package_config import ( ALL_COMPONENTS, - # Component instances for structured access - TOOLCHAIN, BASHRC, - UV, P7ZIP, ) @@ -68,6 +71,35 @@ def setup_environment_variables_and_entrypoint() -> None: f.write(f'export {key}="{value}"\n') f.write("\n") + # Export compiler-specific environment variables + f.write("# Compiler environment variables\n") + match COMPILER: + case "gcc": + gcc_path = f"/usr/bin/gcc-{TOOLCHAIN_VERSIONS['gcc']}" + gxx_path = f"/usr/bin/g++-{TOOLCHAIN_VERSIONS['gcc']}" + f.write(f'export CC="{gcc_path}"\n') + f.write(f'export CXX="{gxx_path}"\n') + # For GCC/Clang, SYSROOT points to custom-built glibc/libstdc++ + sysroot = TOOLCHAIN.sysroot_dir + f.write(f'export SYSROOT="{sysroot}"\n') + case "clang": + clang_path = f"/usr/bin/clang-{TOOLCHAIN_VERSIONS['clang']}" + clangxx_path = f"/usr/bin/clang++-{TOOLCHAIN_VERSIONS['clang']}" + f.write(f'export CC="{clang_path}"\n') + f.write(f'export CXX="{clangxx_path}"\n') + # For GCC/Clang, SYSROOT points to custom-built glibc/libstdc++ + sysroot = TOOLCHAIN.sysroot_dir + f.write(f'export SYSROOT="{sysroot}"\n') + case "zig": + zig_bin = os.path.join(TOOLCHAIN.zig.package_dir, 'zig') + f.write(f'export CC="{zig_bin} cc"\n') + f.write(f'export CXX="{zig_bin} c++"\n') + # Zig uses its own bundled libc, no traditional sysroot + f.write('# Note: Zig uses its own bundled libc\n') + case _: + raise ValueError(f"Unsupported compiler: {COMPILER}") + f.write("\n") + # Set internal variables for container entrypoint (not exported) f.write("# Internal variables for container entrypoint (not exported to user environment)\n") f.write(f'CLICE_WORKDIR="{CLICE_WORKDIR}"\n') @@ -86,6 +118,10 @@ def setup_environment_variables_and_entrypoint() -> None: print(f" ๐Ÿ“ Exported variables: {len(DEVELOPMENT_SHELL_VARS)} from DEVELOPMENT_SHELL_VARS") for key in DEVELOPMENT_SHELL_VARS.keys(): print(f" โ€ข {key}") + print(f" ๐Ÿ“ Compiler: {COMPILER}") + print(f" โ€ข CC and CXX configured for {COMPILER}") + if COMPILER in ["gcc", "clang"]: + print(f" โ€ข SYSROOT={TOOLCHAIN.sysroot_dir}") print(" ๐Ÿ“ Internal variables: CLICE_WORKDIR, RELEASE_PACKAGE_DIR, UV_PACKAGE_DIR_NAME") print(" ๐Ÿ“ Container entrypoint script embedded") @@ -99,7 +135,7 @@ def create_comprehensive_manifest() -> None: # Create base manifest structure manifest = { "release_info": { - "created_at": os.stat(RELEASE_PACKAGE_DIR).st_birthtime if os.path.exists(RELEASE_PACKAGE_DIR) else None, + "created_at": os.stat(RELEASE_PACKAGE_DIR).st_ctime if os.path.exists(RELEASE_PACKAGE_DIR) else None, "stage": "final_release", "version": "1.0.0" }, @@ -161,7 +197,7 @@ def create_comprehensive_manifest() -> None: "glibc_version": TOOLCHAIN.glibc.version, "gcc_version": TOOLCHAIN.gcc.version, "linux_version": TOOLCHAIN.linux.version, - "llvm_version": TOOLCHAIN.llvm.version, + # "llvm_version": TOOLCHAIN.llvm.version, } case "clice-setup-scripts": @@ -195,11 +231,6 @@ def create_comprehensive_manifest() -> None: print(f"๐Ÿ“ Total files: {manifest['summary']['total_files']}") print(f"๐Ÿ“ฆ Total size: {manifest['summary']['total_size_mb']} MB") -def update_apt() -> None: - print("๐Ÿ”„ Updating APT package database...") - run_command("apt update -o DPkg::Lock::Timeout=-1") - print("โœ… APT database updated") - def install_p7zip() -> None: print("๐Ÿ“ฆ Installing p7zip for archive creation...") @@ -253,30 +284,20 @@ def main() -> None: print("๐Ÿš€ ========================================================================\n") # Define packaging jobs with proper dependency management - jobs = { - "update_apt": Job("update_apt", update_apt, ()), - "setup_bashrc": Job("setup_bashrc", setup_environment_variables_and_entrypoint, ()), - "create_manifest": Job("create_manifest", create_comprehensive_manifest, ()), - "install_p7zip": Job("install_p7zip", install_p7zip, ()), - "create_package": Job("create_package", create_final_release_package, ()), - } + setup_bashrc_job = Job("setup_bashrc", setup_environment_variables_and_entrypoint, ()) + install_p7zip_job = Job("install_p7zip", install_p7zip, ()) + create_manifest_job = Job("create_manifest", create_comprehensive_manifest, (), [setup_bashrc_job]) + create_package_job = Job("create_package", create_final_release_package, (), [create_manifest_job, install_p7zip_job]) - # Define dependencies - # - APT update runs first to refresh package lists - # - bashrc setup and script copy can run in parallel with APT update - # - p7zip installation depends on APT update being complete - # - Manifest creation depends on bashrc and scripts being ready - # - Package creation depends on manifest and p7zip being ready - dependencies = { - "update_apt": set(), # Runs first, no dependencies - "setup_bashrc": set(), - "create_manifest": {"setup_bashrc"}, - "install_p7zip": {"update_apt"}, # Depends on APT update - "create_package": {"create_manifest", "install_p7zip"}, # Depends on manifest and 7z - } + all_jobs = [ + setup_bashrc_job, + install_p7zip_job, + create_manifest_job, + create_package_job, + ] # Execute packaging tasks in parallel where possible - scheduler = ParallelTaskScheduler(jobs, dependencies) + scheduler = ParallelTaskScheduler(all_jobs) scheduler.run() print("\n๐ŸŽ‰ ========================================================================") diff --git a/docker/linux/utility/download_dependencies.py b/docker/linux/utility/download_dependencies.py index 16fb4674..bc28f009 100644 --- a/docker/linux/utility/download_dependencies.py +++ b/docker/linux/utility/download_dependencies.py @@ -8,7 +8,7 @@ import shutil import sys import subprocess -from typing import List +from typing import List, Dict, Set, Optional # Add project root to Python path project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) @@ -22,11 +22,8 @@ run_command, verify_sha256 ) -from config.build_config import ( - RELEASE_PACKAGE_DIR, - PYPROJECT_PATH, - APT, UV, CMAKE, XMAKE -) +from config.docker_build_stages.common import RELEASE_PACKAGE_DIR +from config.docker_build_stages.dependencies_config import APT, UV, CMAKE, XMAKE # ======================================================================== # ๐Ÿ› ๏ธ Download Task Implementations @@ -35,11 +32,8 @@ def install_download_prerequisites() -> None: print("๐Ÿ“ฆ Installing dependencies download prerequisites...") - # Update package lists first - run_command("apt update -o DPkg::Lock::Timeout=-1") - # Install all download prerequisites (universal + APT-specific) - download_prerequisites = APT.download_prerequisites + download_prerequisites: List[str] = APT.download_prerequisites run_command(f"apt install -y --no-install-recommends=true -o DPkg::Lock::Timeout=-1 {' '.join(download_prerequisites)}") print(f"โœ… Installed {len(download_prerequisites)} download prerequisites") @@ -48,28 +42,30 @@ def get_apt_package_list(base_packages: List[str]) -> List[str]: """Get recursive APT dependencies using apt-cache depends + awk pattern.""" print("๐Ÿ” Resolving recursive dependencies using StackOverflow command pattern...") - all_packages = set() + all_packages: Set[str] = set() for package in base_packages: try: # Use the exact command from StackOverflow - apt_cache_cmd = [ + apt_cache_cmd: List[str] = [ "apt-cache", "depends", "--recurse", "--no-recommends", "--no-suggests", "--no-conflicts", "--no-breaks", "--no-replaces", "--no-enhances", package ] # Run apt-cache depends command - result = subprocess.run(apt_cache_cmd, capture_output=True, text=True, check=True) + result: subprocess.CompletedProcess[str] = subprocess.run( + apt_cache_cmd, capture_output=True, text=True, check=True + ) # Use awk pattern to extract dependency packages: $1 ~ /^Depends:/{print $2} for line in result.stdout.split('\n'): line = line.strip() if line.startswith('Depends:'): # Extract the package name after "Depends: " - parts = line.split() + parts: List[str] = line.split() if len(parts) >= 2: - pkg_name = parts[1] + pkg_name: str = parts[1] # Remove architecture suffix and version constraints pkg_name = pkg_name.split(':')[0].split('(')[0].split('[')[0].strip() if pkg_name and not pkg_name.startswith('<') and pkg_name != '|': @@ -82,21 +78,18 @@ def get_apt_package_list(base_packages: List[str]) -> List[str]: # Filter available packages (remove virtual/unavailable packages) print(f"๐Ÿ” Found {len(all_packages)} total dependency packages, filtering available ones...") - available_packages = base_packages.copy() + available_packages_set: Set[str] = set(base_packages) for package in sorted(all_packages): - try: - # Quick availability check - result = subprocess.run( - ["apt-cache", "show", package], - capture_output=True, text=True, check=True - ) - if result.returncode == 0: - available_packages.append(package) - except subprocess.CalledProcessError: - # Skip unavailable packages - continue - + # Quick availability check + result = subprocess.run( + ["apt-cache", "show", package], + capture_output=True, text=True, check=True + ) + if result.returncode == 0: + available_packages_set.add(package) + + available_packages: List[str] = sorted(available_packages_set) print(f"๐Ÿ“‹ Final package list: {len(available_packages)} available packages") return available_packages @@ -108,75 +101,110 @@ def download_apt_packages() -> None: os.makedirs(APT.package_dir, exist_ok=True) # Stage 1: Get package list - base_packages = list(set(APT.all_packages)) + base_packages: List[str] = list(set(APT.all_packages)) print(f"๐Ÿ“‹ Base packages from config: {len(base_packages)} packages") - available_packages = get_apt_package_list(base_packages) + available_packages: List[str] = get_apt_package_list(base_packages) # Stage 2: Download packages using apt-get download - print(f"๐Ÿ“ฅ Downloading {len(available_packages)} packages to cache: {APT.cache_dir}") - + print(f"๐Ÿ“ฅ Downloading {len(available_packages)} packages to cache: {APT.cache_dir}") - # Use the exact pattern: apt-get download $(package_list) - # Split into batches to avoid command line length limits - batch_size = 50 - downloaded_count = 0 + # Download all packages at once + packages_str: str = ' '.join(available_packages) + run_command(f"apt-get download {packages_str}", cwd=APT.cache_dir) - for i in range(0, len(available_packages), batch_size): - batch = available_packages[i:i + batch_size] - - print(f"๐Ÿ“ฆ Downloading batch {i//batch_size + 1}/{(len(available_packages) + batch_size - 1)//batch_size} ({len(batch)} packages)...") - + print(f"โœ… Downloaded {len(available_packages)} packages to cache") + + # Count actual .deb files in cache directory + cached_deb_count: int = len([f for f in os.listdir(APT.cache_dir) if f.endswith('.deb')]) + print(f"๐Ÿ“‹ Found {cached_deb_count} .deb files in cache directory") + + # Stage 3: Parse all packages at once with apt-cache show to get real packages and their info + print("๐Ÿ“ฆ Parsing package information to identify real packages and versions...") + + # Get system architecture + arch_result: subprocess.CompletedProcess[str] = subprocess.run( + ["dpkg", "--print-architecture"], + capture_output=True, + text=True, + check=True + ) + system_arch: str = arch_result.stdout.strip() + + # Map package name -> exact filename (only for real packages) + package_to_filename: Dict[str, str] = {} + virtual_packages: List[str] = [] + + for pkg in available_packages: try: - # Run apt-get download for this batch - result = subprocess.run( - ["apt-get", "download"] + batch, - cwd=APT.cache_dir, - capture_output=True, - text=True, + # Single apt-cache show call to get all info at once + show_result: subprocess.CompletedProcess[str] = subprocess.run( + ["apt-cache", "show", pkg], + capture_output=True, + text=True, check=True ) - downloaded_count += len(batch) + + # Parse the output to extract Package, Version, and Architecture + package_name: Optional[str] = None + version: Optional[str] = None + pkg_arch: str = system_arch # Default + + for line in show_result.stdout.split('\n'): + if line.startswith('Package:'): + package_name = line.split(':', 1)[1].strip() + elif line.startswith('Version:'): + version = line.split(':', 1)[1].strip() + elif line.startswith('Architecture:'): + pkg_arch = line.split(':', 1)[1].strip() + + # Stop after first package stanza (in case of multiple versions) + if line.strip() == '' and package_name and version: + break + + # Check if this is a virtual package (Package field doesn't match query) + if not package_name or package_name != pkg: + virtual_packages.append(pkg) + print(f"๐Ÿ“ Skipping virtual package: {pkg} (resolves to {package_name})") + continue + + if not version: + print(f"โš ๏ธ No version found for {pkg}") + continue + + # Construct expected filename based on Debian package naming convention + # URL-encode colons in version + encoded_version: str = version.replace(':', '%3a') + expected_filename: str = f"{pkg}_{encoded_version}_{pkg_arch}.deb" + + package_to_filename[pkg] = expected_filename except subprocess.CalledProcessError as e: - print(f"โš ๏ธ Batch download failed, trying individual packages...") - # Fallback: download packages individually - for package in batch: - try: - subprocess.run( - ["apt-get", "download", package], - cwd=APT.cache_dir, - capture_output=True, - text=True, - check=True - ) - downloaded_count += 1 - except subprocess.CalledProcessError: - print(f"โš ๏ธ Failed to download {package}") - - print(f"โœ… Downloaded {downloaded_count} packages to cache") - - # Copy packages from cache to package directory (only available_packages) - print("๐Ÿ“ฆ Copying packages from cache to package directory...") - copied_count = 0 - - # Create a set of expected package prefixes for efficient lookup - package_prefixes = set() - for pkg in available_packages: - package_prefixes.add(pkg + "_") - - for file in os.listdir(APT.cache_dir): - if file.endswith('.deb'): - # Check if this .deb file corresponds to one of our available packages - file_matches = any(file.startswith(prefix) for prefix in package_prefixes) - if file_matches: - src = os.path.join(APT.cache_dir, file) - dst = os.path.join(APT.package_dir, file) - shutil.copy2(src, dst) - copied_count += 1 - - print(f"โœ… APT packages ready in {APT.package_dir}") - print(f"๐Ÿ“Š Copied {copied_count} packages from cache") + # If apt-cache show fails, it's likely a virtual package + virtual_packages.append(pkg) + print(f"๐Ÿ“ Skipping virtual package (no show output): {pkg}") + + print(f"๐Ÿ“‹ Identified {len(package_to_filename)} real packages with .deb files") + print(f"๐Ÿ“ Identified {len(virtual_packages)} virtual packages (no .deb files)") + + # Also verify that actual .deb files in cache match what we copied + if len(virtual_packages) + len(package_to_filename) != len(available_packages): + error_msg: str = f"File count mismatch: {len(available_packages)} available vs {len(package_to_filename)} real + {len(virtual_packages)} virtual" + print(f"โŒ {error_msg}") + raise RuntimeError(error_msg) + + # Stage 4: Copy only the exact files + print("๐Ÿ“ฆ Copying exact package files from cache to package directory...") + + for pkg, filename in package_to_filename.items(): + src: str = os.path.join(APT.cache_dir, filename) + dst: str = os.path.join(APT.package_dir, filename) + + shutil.copy2(src, dst) + + print(f"๐Ÿ“Š Copied {len(package_to_filename)} real packages") + print(f"๐Ÿ“ Skipped {len(virtual_packages)} virtual packages (no .deb files)") + print(f"โœ… Verification passed: Download count matches copy count") print(f"๐Ÿ“ Cache directory: {APT.cache_dir} (preserved for future builds)") def download_cmake() -> None: @@ -188,31 +216,31 @@ def download_cmake() -> None: os.makedirs(CMAKE.package_dir, exist_ok=True) # Use CMake component configuration - cmake_filename = CMAKE.tarball_name - cmake_url = CMAKE.tarball_url + cmake_filename: str = CMAKE.tarball_name + cmake_url: str = CMAKE.tarball_url # Download to cache directory first - cmake_cache_file = f"{CMAKE.cache_dir}/{cmake_filename}" - cmake_package_file = f"{CMAKE.package_dir}/{cmake_filename}" + cmake_cache_file: str = f"{CMAKE.cache_dir}/{cmake_filename}" + cmake_package_file: str = f"{CMAKE.package_dir}/{cmake_filename}" # Download CMake installer (.sh script) to cache download_file(cmake_url, cmake_cache_file) # Download verification files to cache using component structure - sha_url = CMAKE.verification_url - sha_filename = CMAKE.verification_name - sha_cache_file = f"{CMAKE.cache_dir}/{sha_filename}" + sha_url: str = CMAKE.verification_url + sha_filename: str = CMAKE.verification_name + sha_cache_file: str = f"{CMAKE.cache_dir}/{sha_filename}" # Download SHA file for integrity verification download_file(sha_url, sha_cache_file) # Verify CMake file integrity using build_utils with open(sha_cache_file, 'r') as f: - sha_content = f.read().strip() + sha_content: str = f.read().strip() # Parse SHA file format: "hash filename" for line in sha_content.split('\n'): if cmake_filename in line: - expected_hash = line.split()[0] + expected_hash: str = line.split()[0] if verify_sha256(cmake_cache_file, expected_hash): print("โœ… CMake file integrity verification successful") else: @@ -238,12 +266,12 @@ def download_xmake() -> None: os.makedirs(XMAKE.package_dir, exist_ok=True) # Use XMake component configuration - xmake_filename = XMAKE.tarball_name - xmake_url = XMAKE.tarball_url + xmake_filename: str = XMAKE.tarball_name + xmake_url: str = XMAKE.tarball_url # Download to cache directory first - xmake_cache_file = f"{XMAKE.cache_dir}/{xmake_filename}" - xmake_package_file = f"{XMAKE.package_dir}/{xmake_filename}" + xmake_cache_file: str = f"{XMAKE.cache_dir}/{xmake_filename}" + xmake_package_file: str = f"{XMAKE.package_dir}/{xmake_filename}" # Download XMake bundle to cache download_file(xmake_url, xmake_cache_file) @@ -265,15 +293,13 @@ def download_python_packages() -> None: # Set UV_CACHE_DIR to packages cache directory print(f"๐Ÿ“ฅ Downloading package wheels to UV packages package dir: {UV.packages_package_dir}") - print(f"๐Ÿ“‹ Using pyproject.toml from: {PYPROJECT_PATH}") + print(f"๐Ÿ“‹ Using pyproject.toml from: {UV.pyproject_file_path}") # Run uv sync with project root as working directory # UV will automatically find pyproject.toml in the project root - project_root = os.path.dirname(PYPROJECT_PATH) - run_command( f"UV_CACHE_DIR={UV.packages_package_dir} uv sync --no-install-project --no-editable", - cwd=project_root + cwd=os.path.dirname(UV.pyproject_file_path) ) print(f"โœ… Package wheels cached to: {UV.packages_package_dir}") @@ -292,28 +318,22 @@ def main() -> None: os.makedirs(RELEASE_PACKAGE_DIR, exist_ok=True) # Define download jobs with proper dependency management - # Note: Python installation is now done in Dockerfile, not here - jobs = { - "install_download_prerequisites": Job("install_download_prerequisites", install_download_prerequisites, ()), - "download_apt_packages": Job("download_apt_packages", download_apt_packages, ()), - "download_python_packages": Job("download_python_packages", download_python_packages, ()), - "download_cmake": Job("download_cmake", download_cmake, ()), - "download_xmake": Job("download_xmake", download_xmake, ()), - } - - # Define dependencies - # UV and packages downloads need install_download_prerequisites - # Python installation is handled in Dockerfile base-stage - dependencies = { - "install_download_prerequisites": set(), - "download_apt_packages": {"install_download_prerequisites"}, - "download_python_packages": {"install_download_prerequisites"}, - "download_cmake": {"install_download_prerequisites"}, - "download_xmake": {"install_download_prerequisites"}, - } + install_download_prereq_job = Job("install_download_prerequisites", install_download_prerequisites, ()) + download_apt_job = Job("download_apt_packages", download_apt_packages, (), [install_download_prereq_job]) + download_python_job = Job("download_python_packages", download_python_packages, (), [install_download_prereq_job]) + download_cmake_job = Job("download_cmake", download_cmake, (), [install_download_prereq_job]) + download_xmake_job = Job("download_xmake", download_xmake, (), [install_download_prereq_job]) + + all_jobs = [ + install_download_prereq_job, + download_apt_job, + download_python_job, + download_cmake_job, + download_xmake_job, + ] # Execute downloads in parallel where possible - scheduler = ParallelTaskScheduler(jobs, dependencies) + scheduler = ParallelTaskScheduler(all_jobs) scheduler.run() print("โœ… All dependencies downloaded successfully!") diff --git a/docker/linux/utility/local_setup.py b/docker/linux/utility/local_setup.py index 870ec212..0bf129d7 100644 --- a/docker/linux/utility/local_setup.py +++ b/docker/linux/utility/local_setup.py @@ -4,6 +4,7 @@ and deploy .bashrc configuration. """ +from typing import List, Optional import os import sys import shutil @@ -13,12 +14,15 @@ if project_root not in sys.path: sys.path.insert(0, project_root) -from config.build_config import ( +from config.docker_build_stages.common import ( + COMPILER, RELEASE_PACKAGE_DIR, CLICE_WORKDIR, - APT, UV, CMAKE, XMAKE, TOOLCHAIN, BASHRC, - APTComponent, UVComponent, CMakeComponent, XMakeComponent, ToolchainComponent + TOOLCHAIN_VERSIONS, ) +from config.docker_build_stages.dependencies_config import APT, UV, CMAKE, XMAKE, APTComponent, CMakeComponent, UVComponent, XMakeComponent +from config.docker_build_stages.toolchain_config import TOOLCHAIN, ToolchainComponent +from config.docker_build_stages.package_config import BASHRC from build_utils import ( Job, @@ -66,6 +70,32 @@ def install_apt_packages(apt_component: APTComponent) -> None: def install_toolchain(toolchain_component: ToolchainComponent) -> None: print("๐Ÿ”ง Installing custom toolchain...") + match COMPILER: + case "gcc": + run_command(f'update-alternatives --install /usr/bin/cc cc "/usr/bin/gcc-{TOOLCHAIN_VERSIONS["gcc"]}" {TOOLCHAIN_VERSIONS["gcc"]}') + run_command(f'update-alternatives --install /usr/bin/gcc gcc "/usr/bin/gcc-{TOOLCHAIN_VERSIONS["gcc"]}" {TOOLCHAIN_VERSIONS["gcc"]}') + run_command(f'update-alternatives --install /usr/bin/c++ c++ "/usr/bin/g++-{TOOLCHAIN_VERSIONS["gcc"]}" {TOOLCHAIN_VERSIONS["gcc"]}') + run_command(f'update-alternatives --install /usr/bin/g++ g++ "/usr/bin/g++-{TOOLCHAIN_VERSIONS["gcc"]}" {TOOLCHAIN_VERSIONS["gcc"]}') + case "clang": + run_command(f'update-alternatives --install /usr/bin/cc cc "/usr/bin/clang-{TOOLCHAIN_VERSIONS["clang"]}" {TOOLCHAIN_VERSIONS["clang"]}') + run_command(f'update-alternatives --install /usr/bin/clang clang "/usr/bin/clang-{TOOLCHAIN_VERSIONS["clang"]}" {TOOLCHAIN_VERSIONS["clang"]}') + run_command(f'update-alternatives --install /usr/bin/c++ c++ "/usr/bin/clang++-{TOOLCHAIN_VERSIONS["clang"]}" {TOOLCHAIN_VERSIONS["clang"]}') + run_command(f'update-alternatives --install /usr/bin/clang++ clang++ "/usr/bin/clang++-{TOOLCHAIN_VERSIONS["clang"]}" {TOOLCHAIN_VERSIONS["clang"]}') + case "zig": + # Zig binary is directly in package_dir after stripping top-level directory + zig_bin = os.path.join(toolchain_component.zig.package_dir, 'zig') + + # Setup alternatives for zig, cc, and c++ + run_command(f'update-alternatives --install /usr/bin/zig zig "{zig_bin}" 100') + # run_command(f'update-alternatives --install /usr/bin/cc cc "{zig_bin} cc" 100') + # run_command(f'update-alternatives --install /usr/bin/c++ c++ "{zig_bin} c++" 100') + print(f"โœ… Zig compiler configured: {zig_bin}") + case _: + raise ValueError(f"Unsupported compiler specified: {COMPILER}") + + # clice requires to link with lld + run_command(f'update-alternatives --install /usr/bin/ld ld "/usr/bin/lld-{TOOLCHAIN_VERSIONS["clang"]}" {TOOLCHAIN_VERSIONS["clang"]}') + print(f"โœ… Toolchain available at: {toolchain_component.package_dir}") def install_cmake(cmake_component: CMakeComponent) -> None: @@ -108,6 +138,11 @@ def install_xmake(xmake_component: XMakeComponent) -> None: # Install XMake using update-alternatives run_command(f"update-alternatives --install /usr/bin/xmake xmake {xmake_path} 100") + + # Run XMake + # First time we execute the bundle, it sets up its internal environment + # Environment variable is not setup yet, so we need --root option to bypass xmake root account check + run_command("xmake --root --version") print("โœ… XMake installed successfully") @@ -140,30 +175,26 @@ def main() -> None: print("๐Ÿš€ Setting up Clice Dev Container...") # Define setup jobs with proper dependency management - # Note: Release archive is already extracted by Dockerfile, so we start with installations - jobs = { - "setup_git_safe_directory": Job("setup_git_safe_directory", setup_git_safe_directory, ()), - "install_apt_packages": Job("install_apt_packages", install_apt_packages, (APT,)), - "install_toolchain": Job("install_toolchain", install_toolchain, (TOOLCHAIN,)), - "install_cmake": Job("install_cmake", install_cmake, (CMAKE,)), - "install_xmake": Job("install_xmake", install_xmake, (XMAKE,)), - "install_python_packages": Job("install_python_packages", install_python_packages, (UV,)), - "deploy_bashrc": Job("deploy_bashrc", deploy_bashrc, ()), - } - - # Define dependencies - git setup depends on apt packages - dependencies = { - "install_apt_packages": set(), - "setup_git_safe_directory": {"install_apt_packages"}, - "install_toolchain": set(), - "install_cmake": set(), - "install_xmake": set(), - "install_python_packages": set(), - "deploy_bashrc": set(), - } + install_apt_job = Job("install_apt_packages", install_apt_packages, (APT,)) + setup_git_job = Job("setup_git_safe_directory", setup_git_safe_directory, (), [install_apt_job]) + install_toolchain_job = Job("install_toolchain", install_toolchain, (TOOLCHAIN,), [install_apt_job]) + install_cmake_job = Job("install_cmake", install_cmake, (CMAKE,)) + install_xmake_job = Job("install_xmake", install_xmake, (XMAKE,)) + install_python_job = Job("install_python_packages", install_python_packages, (UV,)) + deploy_bashrc_job = Job("deploy_bashrc", deploy_bashrc, ()) + + all_jobs = [ + install_apt_job, + setup_git_job, + install_toolchain_job, + install_cmake_job, + install_xmake_job, + install_python_job, + deploy_bashrc_job, + ] # Execute setup tasks in parallel where possible - scheduler = ParallelTaskScheduler(jobs, dependencies) + scheduler = ParallelTaskScheduler(all_jobs) scheduler.run() print("โœ… Clice development environment setup completed successfully!") diff --git a/docker/linux/utility/pyproject.toml b/docker/linux/utility/pyproject.toml index 2ba123e8..b646e5d5 100644 --- a/docker/linux/utility/pyproject.toml +++ b/docker/linux/utility/pyproject.toml @@ -4,5 +4,6 @@ version = "0.1.0" description = "Python dependencies for clice's development container build scripts." requires-python = ">=3.13" dependencies = [ - "python-gnupg" + "py-minisign>=0.13.0", + "python-gnupg>=0.5.0" ]