services:
&name vllm:
<<: [*ai-common, *gpu]
container_name: *name
hostname: *name
profiles:
- *name
# image: vllm/vllm-openai:cu130-nightly
build:services:
&name vllm:
<<: [*ai-common, *gpu]
container_name: *name
hostname: *name
profiles:
- *name
# image: vllm/vllm-openai:cu130-nightly
build: