forked from letta-ai/letta
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose-vllm.yaml
More file actions
35 lines (33 loc) · 837 Bytes
/
docker-compose-vllm.yaml
File metadata and controls
35 lines (33 loc) · 837 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
version: '3.8'
services:
letta:
image: letta/letta:latest
ports:
- "8283:8283"
environment:
- LETTA_LLM_ENDPOINT=http://vllm:8000
- LETTA_LLM_ENDPOINT_TYPE=vllm
- LETTA_LLM_MODEL=${LETTA_LLM_MODEL} # Replace with your model
- LETTA_LLM_CONTEXT_WINDOW=8192
depends_on:
- vllm
vllm:
image: vllm/vllm-openai:latest
runtime: nvidia
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
environment:
- HUGGING_FACE_HUB_TOKEN=${HUGGING_FACE_HUB_TOKEN}
volumes:
- ~/.cache/huggingface:/root/.cache/huggingface
ports:
- "8000:8000"
command: >
--model ${LETTA_LLM_MODEL} --max_model_len=8000
# Replace with your model
ipc: host