Skip to content

Instantly share code, notes, and snippets.

@bio-punk
Last active December 31, 2025 10:33
Show Gist options
  • Select an option

  • Save bio-punk/fbc7b13575923ba381e905cd168afa57 to your computer and use it in GitHub Desktop.

Select an option

Save bio-punk/fbc7b13575923ba381e905cd168afa57 to your computer and use it in GitHub Desktop.
lammps with ml-pace in blackwell #lammps

lammps + ML-PACE插件在容器云上编译

注意

  1. cmake构建时需要下载文件,需要事先准备
  2. 容器反向代理存在问题,需要本地缓存好再上传压缩包
  3. 容器的镜像基于ngc上的镜像裁剪而来,需要关闭不兼容的部分

步骤

  1. 上传压缩包
  2. bash env_create.sh 创建环境
  3. 修改文件
    1. nvcc_wrapper
      default_arch="sm_120"
      host_compiler=/usr/bin/x86_64-linux-gnu-g++
      nvcc_compiler=nvcc
    2. ML-PACE.cmake
          set(PACELIB_URL "http://127.0.0.1:9999/lammps-user-pace-v.2023.11.25.fix2.tar.gz" CACHE STRING "URL for PACE evaluator library sources")
  4. bash file_server.sh 启动文件服务器
  5. bash build_lammps.sh 2>&1 | tee build_lammps.log 构建lammps
#!/bin/bash
# 自定义变量
CONDA_ENV_NAME=lammps_22july2025_u2
PYTHON_VER=3.11
LAMMPS_VER=stable_22Jul2025_update2
ALL_PREFIX=`pwd`
LAMMPS_SRC=${ALL_PREFIX}/lammps-${LAMMPS_VER}
CONDA_ENV_PATH=/root/shared-nvme/.conda/envs
CONDA_ENV_PATH=${CONDA_ENV_PATH}/${CONDA_ENV_NAME}
# 镜像源和代码源
ALI_MIRROR="https://mirrors.aliyun.com/pytorch-wheels/cu128/"
export PIP_INDEX_URL=https://mirrors.cernet.edu.cn/pypi/web/simple
export UV_INDEX_URL=https://mirrors.cernet.edu.cn/pypi/web/simple
export CONDARC=${ALL_PREFIX}/condarc
LAMMPS_GIT=https://github.com/lammps/lammps.git
source /root/miniconda3/etc/profile.d/conda.sh
conda activate ${CONDA_ENV_PATH}
echo CONDA_PREFIX=${CONDA_PREFIX}
# cuda和cudnn目录配置
export CUDNN_ROOT=/usr
export CUDA_HOME=/usr/local/cuda-12.8
export CUDA_ROOT=${CUDA_HOME}
# 清除冲突的环境变量
unset OPAL_PREFIX
unset CMAKE_PREFIX_PATH
# 配置nvvm启用cicc
export PATH=$CUDA_HOME/nvvm/bin:$PATH
export LD_LIBRARY_PATH=$CUDA_HOME/nvvm/lib64:$LD_LIBRARY_PATH
export LIBRARY_PATH=$CUDA_HOME/nvvm/lib64:$LIBRARY_PATH
export CPATH=$CUDA_HOME/nvvm/include:$CPATH
# torch编译参数决断
export TORCH_CUDA_ARCH_LIST="12.0"
# 配置OpenMPI编译器
export CC=`which x86_64-linux-gnu-gcc`
export CXX=`which x86_64-linux-gnu-g++`
export FC=`which x86_64-linux-gnu-gfortran`
export OMPI_CC=$CC
export OMPI_CXX=$CXX
export OMPI_FC=$FC
# 配置编译器
cp nvcc_wrapper ${LAMMPS_SRC}/lib/kokkos/bin/nvcc_wrapper
# 重制ML-PACE远程文件路径
cp ML-PACE.cmake ${LAMMPS_SRC}/cmake/Modules/Packages/ML-PACE.cmake
# 配置构建目录
NOW=`date "+%Y%m%d_%H%M"`
cd $LAMMPS_SRC
mkdir -p build_${NOW}
cd build_${NOW}
# 检查环境变量
export
cmake \
-D CMAKE_BUILD_TYPE=Debug \
-D CMAKE_CXX_STANDARD=17 \
-D CMAKE_CUDA_STANDARD=17 \
-D CMAKE_CXX_COMPILER=$LAMMPS_SRC/lib/kokkos/bin/nvcc_wrapper \
-D BUILD_MPI=ON \
-D PKG_KOKKOS=ON \
-D PKG_GPU=ON \
-D PKG_ML-PACE=ON \
-D CUDA_ARCH_LIST=12.0 \
-D CUDAToolkit_ROOT=$CUDA_HOME \
-D CUDA_TOOLKIT_ROOT_DIR=$CUDA_HOME \
-D FFT=KISS \
-D GPU_API=cuda \
-D CMAKE_CUDA_ARCHITECTURES="120" \
-D GPU_ARCH=sm_120 \
-D Kokkos_ENABLE_CUDA=ON \
-D Kokkos_ARCH_BLACKWELL120=ON \
-D Kokkos_ENABLE_CUDA_LAMBDA=ON \
-D Kokkos_ENABLE_OPENMP=ON \
-D MKL_INCLUDE_DIR="$CONDA_PREFIX/include" \
-D CMAKE_PREFIX_PATH="$CUDA_HOME;$CONDA_PREFIX/lib/python$PYTHON_VER/site-packages/torch/share/cmake" \
-D CMAKE_LIBRARY_PATH=$CUDA_HOME/lib64/stubs \
-D CMAKE_MPI_C_COMPILER=mpicc \
-D CMAKE_MPI_CXX_COMPILER=mpicxx \
-D CMAKE_INSTALL_PREFIX=$CONDA_PREFIX \
-D Kokkos_ENABLE_OPENMP=yes \
-D BUILD_OMP=ON \
-D PKG_OPENMP=ON \
$LAMMPS_SRC/cmake
make -j16 VERBOSE=1
if [ $? -ne 0 ]; then
echo "CMake configuration failed."
exit 1
fi
echo "CMake configuration succeeded."
echo "install path: $CONDA_PREFIX"
make install
#!/bin/bash
# 自定义变量
CONDA_ENV_NAME=lammps_22july2025_u2
PYTHON_VER=3.11
LAMMPS_VER=stable_22Jul2025_update2
ALL_PREFIX=`pwd`
LAMMPS_SRC=${ALL_PREFIX}/lammps-${LAMMPS_VER}
CONDA_ENV_PATH=/root/shared-nvme/.conda/envs
mkdir -p ${CONDA_ENV_PATH}
CONDA_ENV_PATH=${CONDA_ENV_PATH}/${CONDA_ENV_NAME}
# 镜像源和代码源
ALI_MIRROR="https://mirrors.aliyun.com/pytorch-wheels/cu128/"
export PIP_INDEX_URL=https://mirrors.cernet.edu.cn/pypi/web/simple
export UV_INDEX_URL=https://mirrors.cernet.edu.cn/pypi/web/simple
export CONDARC=${ALL_PREFIX}/condarc
LAMMPS_GIT=https://github.com/lammps/lammps.git
conda create -p ${CONDA_ENV_PATH} python=${PYTHON_VER} openmpi "libblas=*=*_mkl" "cmake<=3.29.4" -y --override-channels -c conda-forge
conda activate ${CONDA_ENV_PATH}
# 清理缓存文件
pip cache purge
# 通过uv配置torch依赖
pip install uv
uv pip install --extra-index-url ${ALI_MIRROR} --index-strategy unsafe-best-match \
"numpy<2" \
torch==2.8.0 torchvision torchaudio
# 下载源码
# 本地执行部分
# git clone -b ${LAMMPS_VER} ${LAMMPS_GIT} ${LAMMPS_SRC}
# tar -zcf lammps-${LAMMPS_VER}.tar.gz ./lammps-${LAMMPS_VER}
tar --no-same-permissions -xf ./lammps-${LAMMPS_VER}.tar.gz
cp ${LAMMPS_SRC}/lib/kokkos/bin/nvcc_wrapper .
cp ${LAMMPS_SRC}/cmake/Modules/Packages/ML-PACE.cmake .
python -m http.server 9999
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment