{#- Jinja2 macros for DRY CMake configuration -#} {#- Base CMake args shared by all platforms -#} {%- macro cmake_base_args() %} "-DBUILD_SHARED_LIBS=OFF", "-DMOMENTUM_BUILD_PYMOMENTUM=ON", "-DMOMENTUM_BUILD_EXAMPLES=OFF", "-DMOMENTUM_BUILD_TESTING=OFF", "-DMOMENTUM_BUILD_RENDERER=OFF", "-DMOMENTUM_ENABLE_SIMD=OFF", {%- endmacro %} {#- Linux-specific args for manylinux compatibility -#} {%- macro cmake_linux_args() %} {{ cmake_base_args() }} "-DCMAKE_CXX_SCAN_FOR_MODULES=OFF", "-DMOMENTUM_USE_SYSTEM_RERUN_CPP_SDK=ON", # Use bundled pybind11 to avoid CMake FindPython Development.Module requirement # The pixi environment's pybind11 uses pybind11NewTools which requires python3_add_library # from CMake's FindPython, but manylinux Python only has Interpreter component "-DMOMENTUM_USE_SYSTEM_PYBIND11=OFF", # Use manylinux container's compiler (gcc-toolset-12) instead of pixi's GCC 14.3 # to ensure manylinux_2_28 compatibility (requires GLIBCXX <= 3.4.24, CXXABI <= 1.3.11) "-DCMAKE_CXX_FLAGS=-static-libstdc++ -static-libgcc", "-DCMAKE_C_FLAGS=-static-libgcc", {%- endmacro %} {#- macOS-specific args -#} {%- macro cmake_macos_args() %} {{ cmake_base_args() }} "-DCMAKE_CXX_SCAN_FOR_MODULES=OFF", "-DMOMENTUM_USE_SYSTEM_RERUN_CPP_SDK=OFF", "-DMOMENTUM_USE_SYSTEM_PYBIND11=ON", {%- endmacro %} {#- Windows CPU-specific args (Visual Studio generator for CPU builds) -#} {%- macro cmake_windows_cpu_args() %} "-DBUILD_SHARED_LIBS=OFF", "-G Visual Studio 17 2022", "-DMOMENTUM_BUILD_PYMOMENTUM=ON", "-DMOMENTUM_BUILD_EXAMPLES=OFF", "-DMOMENTUM_BUILD_TESTING=OFF", "-DMOMENTUM_BUILD_RENDERER=OFF", "-DMOMENTUM_ENABLE_SIMD=OFF", "-DMOMENTUM_USE_SYSTEM_RERUN_CPP_SDK=ON", "-DMOMENTUM_USE_SYSTEM_PYBIND11=ON", "-DCMAKE_CXX_SCAN_FOR_MODULES=OFF", {%- endmacro %} {#- Windows GPU-specific args (same as CPU; GPU only changes the PyTorch variant linked) -#} {%- macro cmake_windows_gpu_args() %} "-DBUILD_SHARED_LIBS=OFF", "-G Visual Studio 17 2022", "-DMOMENTUM_BUILD_PYMOMENTUM=ON", "-DMOMENTUM_BUILD_EXAMPLES=OFF", "-DMOMENTUM_BUILD_TESTING=OFF", "-DMOMENTUM_BUILD_RENDERER=OFF", "-DMOMENTUM_ENABLE_SIMD=OFF", "-DMOMENTUM_USE_SYSTEM_RERUN_CPP_SDK=ON", "-DMOMENTUM_USE_SYSTEM_PYBIND11=ON", "-DCMAKE_CXX_SCAN_FOR_MODULES=OFF", {%- endmacro %} {#- Common cibuildwheel before-all script for Linux (CPU builds) -#} {%- macro cibw_linux_before_all() %} # Install zip for wheel repacking (manylinux containers don't include it by default) yum install -y zip # Use local pixi binary if available to avoid network issues if [ -f /project/pixi_bin ]; then echo "Using local pixi binary" mkdir -p $HOME/.pixi/bin cp /project/pixi_bin $HOME/.pixi/bin/pixi chmod +x $HOME/.pixi/bin/pixi else echo "Downloading pixi" curl -fsSL https://pixi.sh/install.sh | bash fi export PATH=$HOME/.pixi/bin:$PATH # Install dependencies in a separate directory to avoid conflicts with host's .pixi folder mkdir -p /tmp/build_env cp {project}/pixi.toml {project}/pixi.lock {project}/README.md {project}/LICENSE /tmp/build_env/ cd /tmp/build_env pixi install -e default {%- endmacro %} {#- ============================================================================ -#} {#- MAIN TEMPLATE CONTENT -#} {#- ============================================================================ -#} [build-system] requires = ["scikit-build-core", "pybind11", "setuptools-scm"] build-backend = "scikit_build_core.build" [project] name = "pymomentum-{{ variant }}" dynamic = ["version"] description = "A library providing foundational algorithms for human kinematic motion and numerical optimization solvers to apply human motion in various applications ({{ description_suffix }})" readme = "README.md" requires-python = ">={{ python_version_min }},<{{ python_version_max }}" authors = [ { name = "Meta Reality Labs Research", email = "jeongseok@meta.com" }, ] license = { text = "MIT" } keywords = [ "kinematics", "motion", "optimization", "human-motion", "inverse-kinematics", "forward-kinematics", "body-tracking", "motion-capture", "character-animation", "robotics", "cpu", ] classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Operating System :: POSIX :: Linux", "Operating System :: MacOS", "Operating System :: Microsoft :: Windows", "Programming Language :: C++", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Mathematics", "Topic :: Scientific/Engineering :: Physics", "Topic :: Software Development :: Libraries :: Python Modules", ] dependencies = [ "numpy>=1.20.0", "scipy>=1.7.0", # PyTorch version constraints - platform and Python version specific # Linux: Support latest PyTorch with CUDA # macOS: Limited to older PyTorch versions (CPU only) # CPU package for Linux and macOS ARM64 (Apple Silicon only - Intel Macs not supported) # Linux uses latest PyTorch "torch>={{ torch_min_py312 }},<{{ torch_max_py312 }}; platform_system == 'Linux' and python_version == '3.12'", "torch>={{ torch_min_py313 }},<{{ torch_max_py313 }}; platform_system == 'Linux' and python_version == '3.13'", # macOS ARM64 (Apple Silicon) - PyTorch 2.8+ available "torch>={{ torch_min_py312_macos }},<{{ torch_max_py312_macos }}; platform_system == 'Darwin' and platform_machine == 'arm64' and python_version == '3.12'", "torch>={{ torch_min_py313_macos }},<{{ torch_max_py313_macos }}; platform_system == 'Darwin' and platform_machine == 'arm64' and python_version == '3.13'", # Windows "torch>={{ torch_min_py312 }},<{{ torch_max_py312 }}; platform_system == 'Windows' and python_version == '3.12'", "torch>={{ torch_min_py313 }},<{{ torch_max_py313 }}; platform_system == 'Windows' and python_version == '3.13'", ] [project.urls] Homepage = "https://github.com/facebookresearch/momentum" Documentation = "https://facebookresearch.github.io/momentum/" Repository = "https://github.com/facebookresearch/momentum" "Bug Tracker" = "https://github.com/facebookresearch/momentum/issues" Changelog = "https://github.com/facebookresearch/momentum/releases" [tool.scikit-build] build-dir = "build/{wheel_tag}" cmake.args = [ {{ cmake_base_args() }} "-DCMAKE_CXX_SCAN_FOR_MODULES=OFF", "-DMOMENTUM_USE_SYSTEM_RERUN_CPP_SDK=OFF", "-DMOMENTUM_USE_SYSTEM_PYBIND11=ON", "-DCMAKE_DISABLE_FIND_PACKAGE_Arrow=ON", ] # Read CMAKE_PREFIX_PATH from environment variable # This is needed for cibuildwheel where pixi installs deps to a custom location cmake.define.CMAKE_PREFIX_PATH = {env = "CMAKE_PREFIX_PATH"} minimum-version = "0.10" metadata.version.provider = "scikit_build_core.metadata.setuptools_scm" sdist.exclude = [ ".github/", ".pixi/", "build/", "dist/", "*.lock", ] wheel.exclude = ["geometry_test_helper.*"] [[tool.scikit-build.overrides]] if.platform-system = "^darwin" cmake.args = [ {{ cmake_macos_args() }} ] cmake.define.CMAKE_PREFIX_PATH = {env = "CMAKE_PREFIX_PATH"} [[tool.scikit-build.overrides]] if.platform-system = "^linux" cmake.args = [ {{ cmake_linux_args() }} ] cmake.define.CMAKE_PREFIX_PATH = {env = "CMAKE_PREFIX_PATH"} [[tool.scikit-build.overrides]] if.platform-system = "^win32" cmake.args = [ {% if variant == "gpu" %} {{ cmake_windows_gpu_args() }} {% else %} {{ cmake_windows_cpu_args() }} {% endif %} ] cmake.define.CMAKE_PREFIX_PATH = {env = "CMAKE_PREFIX_PATH"} [tool.setuptools_scm] # Automatically determine version from git tags # Tags should follow the pattern: v1.2.3 # Will generate versions like: 1.2.3 (on tag), 1.2.4.post12 (between tags) version_scheme = "post-release" local_scheme = "no-local-version" version_file = "pymomentum/_version.py" [tool.cibuildwheel] build = "cp312-* cp313-*" build-verbosity = 3 # Skip PyPy, musllinux, and Windows for now # Windows is skipped due to cibuildwheel shell issues with export/command substitution skip = "pp* *-musllinux* *-win*" archs = ["auto64"] manylinux-x86_64-image = "manylinux_2_28" # Disable build isolation so PyTorch installed by before-build is available for CMake build-frontend = { name = "pip", args = ["--no-build-isolation"] } # Common # PYMOMENTUM_VARIANT env var controls the build variant (cpu or gpu). # Set via [tool.cibuildwheel.linux.environment] or CI workflow env. before-build = """ set -e # Exit on error VER=$(python -c "import sys; print(f'{sys.version_info.major}{sys.version_info.minor}')") VARIANT="${PYMOMENTUM_VARIANT:-cpu}" echo "Building for Python $VER, variant=$VARIANT" # Copy the Python-version-specific pyproject.toml for the correct variant if [ -f "pyproject-pypi-${VARIANT}-py${VER}.toml" ]; then cp pyproject-pypi-${VARIANT}-py${VER}.toml pyproject.toml echo "Copied pyproject-pypi-${VARIANT}-py${VER}.toml" else echo "ERROR: pyproject-pypi-${VARIANT}-py${VER}.toml not found!" ls -la pyproject*.toml exit 1 fi # Upgrade pip first echo "Upgrading pip..." python -m pip install --upgrade pip # Install build dependencies (required when using --no-build-isolation) # These are from [build-system].requires in pyproject.toml echo "Installing build dependencies..." python -m pip install scikit-build-core pybind11 setuptools-scm # Install PyTorch (needed at build time to link against libtorch) # CPU builds use the cpu index; GPU builds use the CUDA 12.9 index if [ "$VARIANT" = "gpu" ]; then TORCH_INDEX="https://download.pytorch.org/whl/cu129" else TORCH_INDEX="https://download.pytorch.org/whl/cpu" fi echo "Installing PyTorch for build (variant=$VARIANT, index=$TORCH_INDEX)..." if [ "$VER" = "312" ]; then python -m pip install "torch>={{ torch_min_py312 }},<{{ torch_max_py312 }}" --index-url "$TORCH_INDEX" elif [ "$VER" = "313" ]; then python -m pip install "torch>={{ torch_min_py313 }},<{{ torch_max_py313 }}" --index-url "$TORCH_INDEX" else echo "Unsupported Python version: $VER" exit 1 fi # Verify torch is installed correctly # Temporarily clear LD_LIBRARY_PATH to avoid loading pixi env's libtorch_python.so # (built for a different Python version) instead of the pip-installed one echo "Verifying PyTorch installation..." LD_LIBRARY_PATH="" python -c "import torch; print(f'PyTorch {torch.__version__} installed at {torch.__file__}')" """ test-command = "python -c \"import pymomentum\"" test-requires = ["numpy", "scipy"] before-test = """ VARIANT="${PYMOMENTUM_VARIANT:-cpu}" if [ "$VARIANT" = "gpu" ]; then TORCH_INDEX="https://download.pytorch.org/whl/cu129" else TORCH_INDEX="https://download.pytorch.org/whl/cpu" fi VER=$(python -c "import sys; print(f'{sys.version_info.major}{sys.version_info.minor}')") if [ "$VER" = "312" ]; then python -m pip install "torch>={{ torch_min_py312 }},<{{ torch_max_py312 }}" --index-url "$TORCH_INDEX" elif [ "$VER" = "313" ]; then python -m pip install "torch>={{ torch_min_py313 }},<{{ torch_max_py313 }}" --index-url "$TORCH_INDEX" fi """ [tool.cibuildwheel.linux] before-all = """ {{ cibw_linux_before_all() }} """ # Use manylinux container's gcc-toolset-12 compiler for manylinux_2_28 compatibility # Pixi provides dependencies (headers/libs) but we use the container's compiler # LD_LIBRARY_PATH is required for auditwheel to find transitive dependencies (libdeflate, libtiff, etc.) environment = { PATH = "$HOME/.pixi/bin:$PATH", CMAKE_PREFIX_PATH = "/tmp/build_env/.pixi/envs/default", CONDA_PREFIX = "/tmp/build_env/.pixi/envs/default", MOMENTUM_BUILD_WITH_FBXSDK = "OFF", LD_LIBRARY_PATH = "/tmp/build_env/.pixi/envs/default/lib" } # Forward PYMOMENTUM_VARIANT from host env into the manylinux container # so variant-aware hooks (before-build, repair-wheel-command, before-test) work correctly. # Do NOT also set PYMOMENTUM_VARIANT in the environment table above — that would # override the passed-through value (the shell expansion happens before injection). environment-pass = ["PYMOMENTUM_VARIANT"] repair-wheel-command = """ # Manual wheel repair for Linux builds (no auditwheel). # Supports both CPU and GPU variants via PYMOMENTUM_VARIANT env var. # # auditwheel cannot be used here because: # - manylinux_2_28 policy rejects our libs (too-recent GLIBCXX symbols) # - linux_x86_64 policy is not available inside the manylinux container # # We manually bundle dependencies and rename libstdc++ to avoid the dynamic # linker collision where Python/torch load the system libstdc++.so.6 first. set -e PIXI_ENV="/tmp/build_env/.pixi/envs/default" WORK=/tmp/wheel_work rm -rf $WORK /tmp/final_wheel && mkdir -p $WORK /tmp/final_wheel echo '=== Unpacking wheel ===' cd $WORK unzip -q {wheel} # Find or create the .libs directory (variant-aware) VARIANT="${PYMOMENTUM_VARIANT:-cpu}" LIBS_DIR="pymomentum_${VARIANT}.libs" mkdir -p "$LIBS_DIR" # Recursively collect and bundle shared library dependencies # We iterate until no new libraries are discovered (transitive closure) echo '=== Collecting and bundling dependencies (recursive) ===' is_skip_lib() { case "$1" in libtorch*|libc10*|libmkl*) return 0 ;; libc.so*|libm.so*|libdl*|librt*|libpthread*) return 0 ;; libresolv*|libnss*|ld-linux*) return 0 ;; libgcc_s*) return 0 ;; libpython*) return 0 ;; libstdc++.so*) return 0 ;; *) return 1 ;; esac } find_and_bundle() { local lib="$1" if is_skip_lib "$lib"; then return 1; fi if [ -f "$LIBS_DIR/$lib" ]; then return 1; fi local LIB_PATH="" if [ -f "$PIXI_ENV/lib/$lib" ]; then LIB_PATH="$PIXI_ENV/lib/$lib" else LIB_PATH=$(ldconfig -p 2>/dev/null | grep "$lib" | head -1 | sed 's/.* => //' || true) fi if [ -n "$LIB_PATH" ] && [ -f "$LIB_PATH" ]; then local REAL_PATH=$(readlink -f "$LIB_PATH") cp "$REAL_PATH" "$LIBS_DIR/$lib" chmod 755 "$LIBS_DIR/$lib" echo " Bundled: $lib (from $REAL_PATH)" return 0 fi return 1 } # Pass 1: collect from wheel's .so files NEEDED_LIBS="" for so_file in $(find . -name '*.so' -type f); do for lib in $(patchelf --print-needed "$so_file" 2>/dev/null); do NEEDED_LIBS="$NEEDED_LIBS $lib" done done ITERATION=0 while true; do ITERATION=$((ITERATION + 1)) echo "--- Bundling pass $ITERATION ---" NEW_FOUND=0 for lib in $(echo "$NEEDED_LIBS" | tr ' ' '\n' | sort -u); do if find_and_bundle "$lib"; then NEW_FOUND=$((NEW_FOUND + 1)) fi done if [ "$NEW_FOUND" -eq 0 ]; then echo "No new libraries found in pass $ITERATION, done." break fi echo "Bundled $NEW_FOUND new libraries, scanning their dependencies..." NEEDED_LIBS="" for bundled in "$LIBS_DIR"/*.so*; do [ -f "$bundled" ] || continue for lib in $(patchelf --print-needed "$bundled" 2>/dev/null); do NEEDED_LIBS="$NEEDED_LIBS $lib" done done if [ "$ITERATION" -gt 10 ]; then echo "WARNING: Exceeded 10 bundling passes, stopping." break fi done echo "Bundled libraries:" ls -la "$LIBS_DIR/" # Special handling: rename libstdc++ to avoid dynamic linker collision echo '=== Renaming libstdc++ ===' RENAMED_LIBSTDCXX="libstdc++-pymomentum.so.6" # Find pixi's libstdc++ (the one with GLIBCXX_3.4.29) PIXI_LIBSTDCXX=$(find "$PIXI_ENV/lib" -name 'libstdc++.so.6.*' -not -type l | sort -V | tail -1) if [ -z "$PIXI_LIBSTDCXX" ]; then PIXI_LIBSTDCXX="$PIXI_ENV/lib/libstdc++.so.6" fi PIXI_LIBSTDCXX=$(readlink -f "$PIXI_LIBSTDCXX") echo "Pixi libstdc++: $PIXI_LIBSTDCXX" # Copy with renamed name cp "$PIXI_LIBSTDCXX" "$LIBS_DIR/$RENAMED_LIBSTDCXX" chmod 755 "$LIBS_DIR/$RENAMED_LIBSTDCXX" # Remove the original-named copy if bundled above rm -f "$LIBS_DIR/libstdc++.so.6" # Set the SONAME of the renamed library patchelf --set-soname "$RENAMED_LIBSTDCXX" "$LIBS_DIR/$RENAMED_LIBSTDCXX" # Patch DT_NEEDED in all .so files echo '=== Patching DT_NEEDED and RPATH ===' find . -name '*.so*' -type f | while read so_file; do case "$so_file" in *libstdc++-pymomentum*) continue ;; esac # Replace libstdc++.so.6 reference with renamed version if patchelf --print-needed "$so_file" 2>/dev/null | grep -q '^libstdc++[.]so[.]6$'; then echo " Patching DT_NEEDED: $so_file" patchelf --replace-needed libstdc++.so.6 "$RENAMED_LIBSTDCXX" "$so_file" fi # Set RPATH to find bundled libs (use DT_RPATH for higher priority) # Compute relative path from .so location to .libs dir SO_DIR=$(dirname "$so_file") REL_PATH=$(python3 -c "import os.path; print(os.path.relpath('$LIBS_DIR', '$SO_DIR'))") RPATH_VAL='$ORIGIN/'"$REL_PATH" patchelf --force-rpath --set-rpath "$RPATH_VAL" "$so_file" 2>/dev/null || true done # Rename wheel tag from linux_x86_64 to manylinux_2_28_x86_64 echo '=== Repacking wheel ===' # Update the WHEEL metadata tag WHEEL_FILE=$(find . -name 'WHEEL' -path '*.dist-info/*') if [ -n "$WHEEL_FILE" ]; then sed -i 's/linux_x86_64/manylinux_2_28_x86_64/g' "$WHEEL_FILE" fi pip install wheel 2>/dev/null || true python -m wheel pack "$WORK" -d /tmp/final_wheel FINAL_WHEEL=$(ls /tmp/final_wheel/*.whl | head -1) echo "Final wheel: $(basename $FINAL_WHEEL)" cp "$FINAL_WHEEL" {dest_dir}/ echo "Done" """