nvidia-smi topo -m
node17_mlx_rename.sh
| #!/bin/bash | |
| CONDA_ENV_NAME=py4vasp_dev260114 | |
| ALL_PREFIX= | |
| PY4VASP_GIT=https://github.com/vasp-dev/py4vasp.git | |
| PY4VASP_GIT_BRANCH=0.9.0 | |
| source /data/apps/miniforge/25.3.0-3/etc/profile.d/conda.sh | |
| conda create -n ${CONDA_ENV_NAME} python=3.10 cmake ninja fftw -c conda-forge -y |
| #!/bin/bash | |
| # gnome 桌面安装 | |
| sudo apt update | |
| sudo apt-get install gnome-core -y | |
| # sudo systemctl set-default graphical.target | |
| # 安装伪装显示器 | |
| sudo apt install gnome-remote-desktop -y |
| #!/bin/bash | |
| #SBATCH --gpus=1 | |
| #SBATCH | |
| export all_prefix=/data/run01/scvi905/dev260110 | |
| export conda_env_name=phonopy_lammps_dev260110 | |
| CLIENT_NODE=ln08 | |
| LAMMPS_TAG=stable_22Jul2025_update2 | |
| LAMMPS_GIT=https://github.com/lammps/lammps.git | |
| DEEPMD_TAG=v3.1.1 # 3.1.1 适配 TensorFlow2.19 |
| #!/bin/bash | |
| # 自定义变量 | |
| CONDA_ENV_NAME=lammps_22july2025_u2 | |
| PYTHON_VER=3.11 | |
| LAMMPS_VER=stable_22Jul2025_update2 | |
| ALL_PREFIX=`pwd` | |
| LAMMPS_SRC=${ALL_PREFIX}/lammps-${LAMMPS_VER} | |
| CONDA_ENV_PATH=/root/shared-nvme/.conda/envs |
export http://${proxy_username}:${proxy_password}@${proxy_addr}:${proxy_port}安装 corkscrew 需要root权限
| from qwen_agent.llm import get_chat_model | |
| import os | |
| import random | |
| import json | |
| api_key = os.environ["APIKEY"] | |
| server_addr = os.environ["SERVER_ADDR"] | |
| server_port = os.environ["SERVER_PORT"] | |
| model_name = os.environ["MODEL_NAME"] | |
| #!/bin/bash | |
| #SBATCH -J MEGATRON_LLAMA | |
| #SBATCH -N 2 | |
| #SBATCH -p gpu | |
| #SBATCH --qos=gpugpu | |
| #SBATCH --gres=gpu:8 | |
| #SBATCH -o logs/slurm-%j.log | |
| #SBATCH -e logs/slurm-%j.log | |
| #SBATCH |
bash env_create.shsbatch build_allegro.shsbatch build_lammps.shsbatch test.sh| #!/bin/bash | |
| #SBATCH --gpus=1 | |
| #SBATCH | |
| CONDA_ENV_NAME=suanpan_dev251206 | |
| ABACUS_SRC=~/run/dev251206/abacus-src | |
| source /data/apps/miniforge/25.3.0-3/etc/profile.d/conda.sh | |
| conda activate $CONDA_ENV_NAME | |
| module load cuda/12.8 |