latest
sha256:619c0c35b57b5c2b473ad4f6069f8113f7be361fe19cf1a1c00cfd3304e9e0b9
Last pushed
3 months by chasefrankenfeld611
Type
Compose
Manifest digest
sha256:619c0c35b57b5c2b473ad4f6069f8113f7be361fe19cf1a1c00cfd3304e9e0b9
services:
backend:
image: ai/chat-demo-backend:latest
build: ./backend
ports:
- "8000:8000"
environment:
- MODEL_HOST=http://ollama:11434
depends_on:
ollama:
condition: service_healthy
frontend:
image: ai/chat-demo-frontend:latest
build: ./frontend
ports:
- "3000:3000"
environment:
- PORT=3000
- HOST=0.0.0.0
command: npm run start
depends_on:
- backend
ollama:
image: ai/chat-demo-model:latest
build: ./ollama
ports:
- "11434:11434"
volumes:
- ollama_data:/root/.ollama
environment:
- MODEL=${MODEL:-mistral:latest} # Default to mistral:latest if MODEL is not set
healthcheck:
test: ["CMD-SHELL", "curl -s http://localhost:11434/api/tags | jq -e \".models[] | select(.name == \\\"${MODEL:-mistral:latest}\\\")\" > /dev/null"]
interval: 10s
timeout: 5s
retries: 50
start_period: 600s
deploy:
resources:
limits:
memory: 8G
volumes:
ollama_data:
name: ollama_data
FastAPI-based backend service for AI-driven text and chat generation with Ollama model server.
0
Runtime environment for AI models deployed with Ollama based on ollama/ollama:0.4.0-rc8
0
Responsive frontend for an AI-driven chat application, powered by Remix and Vite.
0