TradingAgents/docker-compose.yml
Geeta Chauhan 78ea029a0b
Docker support and Ollama support (#47)
- Added support for running CLI and Ollama server via Docker
- Introduced tests for local embeddings model and standalone Docker setup
- Enabled conditional Ollama server launch via LLM_PROVIDER
2025-06-25 23:57:05 -04:00

75 lines
1.5 KiB
YAML

version: "3.8"
services:
# Ollama service for local LLM
ollama:
image: ollama/ollama:latest
container_name: ollama
network_mode: host
volumes:
- ./ollama_data:/root/.ollama
# Uncomment for GPU support
# deploy:
# resources:
# reservations:
# devices:
# - capabilities: ["gpu"]
profiles:
- ollama
# App container for Ollama setup
app-ollama:
build:
context: .
container_name: trading-agents-ollama
network_mode: host
volumes:
- .:/app
- ./data:/app/data
env_file:
- .env
environment:
- LLM_BACKEND_URL=http://localhost:11434/v1
- LLM_PROVIDER=ollama
depends_on:
- ollama
tty: true
stdin_open: true
profiles:
- ollama
# App container for OpenAI setup (no Ollama dependency)
app-openai:
build:
context: .
container_name: trading-agents-openai
network_mode: host
volumes:
- .:/app
- ./data:/app/data
env_file:
- .env
environment:
- LLM_PROVIDER=openai
- LLM_BACKEND_URL=https://api.openai.com/v1
tty: true
stdin_open: true
profiles:
- openai
# Generic app container (uses .env settings as-is)
app:
build:
context: .
container_name: trading-agents
network_mode: host
volumes:
- .:/app
- ./data:/app/data
env_file:
- .env
tty: true
stdin_open: true
profiles:
- default