AI/ML Stack Installation
This guide covers the AI/ML stack that powers RavenmaskOS agents, local inference, and LLM observability.
Overview
| Component | Purpose | Port | URL |
|---|---|---|---|
| Norns Agent | LangGraph-based agent API | 8000 | norns-pm.ravenhelm.dev |
| Langfuse | LLM observability and traces | 3000 | langfuse.ravenhelm.dev |
| Ollama | Local embeddings and inference | 11434 | - |
| Neo4j (optional) | Graph memory store | 7474 / 7687 | neo4j.ravenhelm.dev |
Prerequisites
Before deploying the AI/ML stack:
- Prerequisites completed
- Core Stack running (PostgreSQL, Redis, Traefik)
- Identity Stack configured (Zitadel for SSO)
ravenhelm_netnetwork exists
Directory Structure
mkdir -p ~/ravenhelm/services/{norns,langfuse,ollama,neo4j}
mkdir -p ~/ravenhelm/data/langfuse/clickhouse/{data,logs}
mkdir -p ~/ravenhelm/data/langfuse/minio
mkdir -p ~/ravenhelm/data/{ollama,neo4j}
Step 1: Create Langfuse Database
Langfuse uses PostgreSQL for application data.
docker exec -it postgres psql -U ravenhelm -d postgres -c "CREATE DATABASE langfuse OWNER ravenhelm;"
Step 2: Configure Langfuse Environment
Create ~/ravenhelm/services/langfuse/.env with the required secrets:
cat > ~/ravenhelm/services/langfuse/.env << 'ENV'
# Database
POSTGRES_USER=ravenhelm
POSTGRES_PASSWORD_ENCODED=<url-encoded-password>
# Langfuse core secrets
LANGFUSE_SALT=<generate-secure-random>
LANGFUSE_ENCRYPTION_KEY=<generate-secure-random>
LANGFUSE_NEXTAUTH_SECRET=<generate-secure-random>
# ClickHouse
CLICKHOUSE_PASSWORD=<generate-secure-random>
# MinIO
MINIO_SECRET_KEY=<generate-secure-random>
# Zitadel SSO
AUTH_CUSTOM_CLIENT_ID=<zitadel-client-id>
AUTH_CUSTOM_CLIENT_SECRET=<zitadel-client-secret>
AUTH_CUSTOM_ISSUER=https://auth.ravenhelm.dev
AUTH_CUSTOM_NAME=Zitadel
AUTH_DISABLE_USERNAME_PASSWORD=true
ENV
If the PostgreSQL password contains special characters, URL-encode it for POSTGRES_PASSWORD_ENCODED.
Step 3: Deploy Langfuse
Create ~/ravenhelm/services/langfuse/docker-compose.yml:
services:
langfuse-worker:
image: docker.io/langfuse/langfuse-worker:3
container_name: langfuse-worker
restart: unless-stopped
depends_on:
langfuse-clickhouse:
condition: service_healthy
langfuse-minio:
condition: service_healthy
networks:
- ravenhelm_net
env_file:
- .env
environment:
- NEXTAUTH_URL=https://langfuse.ravenhelm.dev
- DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD_ENCODED}@postgres:5432/langfuse
- SALT=${LANGFUSE_SALT}
- ENCRYPTION_KEY=${LANGFUSE_ENCRYPTION_KEY}
- TELEMETRY_ENABLED=false
- LANGFUSE_ENABLE_EXPERIMENTAL_FEATURES=true
# ClickHouse
- CLICKHOUSE_MIGRATION_URL=clickhouse://langfuse-clickhouse:9000
- CLICKHOUSE_URL=http://langfuse-clickhouse:8123
- CLICKHOUSE_USER=clickhouse
- CLICKHOUSE_PASSWORD=${CLICKHOUSE_PASSWORD}
- CLICKHOUSE_CLUSTER_ENABLED=false
# S3/MinIO
- LANGFUSE_S3_EVENT_UPLOAD_BUCKET=langfuse
- LANGFUSE_S3_EVENT_UPLOAD_REGION=auto
- LANGFUSE_S3_EVENT_UPLOAD_ACCESS_KEY_ID=langfuse
- LANGFUSE_S3_EVENT_UPLOAD_SECRET_ACCESS_KEY=${MINIO_SECRET_KEY}
- LANGFUSE_S3_EVENT_UPLOAD_ENDPOINT=http://langfuse-minio:9000
- LANGFUSE_S3_EVENT_UPLOAD_FORCE_PATH_STYLE=true
- LANGFUSE_S3_EVENT_UPLOAD_PREFIX=events/
- LANGFUSE_S3_MEDIA_UPLOAD_BUCKET=langfuse
- LANGFUSE_S3_MEDIA_UPLOAD_REGION=auto
- LANGFUSE_S3_MEDIA_UPLOAD_ACCESS_KEY_ID=langfuse
- LANGFUSE_S3_MEDIA_UPLOAD_SECRET_ACCESS_KEY=${MINIO_SECRET_KEY}
- LANGFUSE_S3_MEDIA_UPLOAD_ENDPOINT=http://langfuse-minio:9000
- LANGFUSE_S3_MEDIA_UPLOAD_FORCE_PATH_STYLE=true
- LANGFUSE_S3_MEDIA_UPLOAD_PREFIX=media/
- LANGFUSE_S3_BATCH_EXPORT_ENABLED=true
- LANGFUSE_S3_BATCH_EXPORT_BUCKET=langfuse
- LANGFUSE_S3_BATCH_EXPORT_PREFIX=exports/
- LANGFUSE_S3_BATCH_EXPORT_REGION=auto
- LANGFUSE_S3_BATCH_EXPORT_ENDPOINT=http://langfuse-minio:9000
- LANGFUSE_S3_BATCH_EXPORT_EXTERNAL_ENDPOINT=https://langfuse.ravenhelm.dev/minio
- LANGFUSE_S3_BATCH_EXPORT_ACCESS_KEY_ID=langfuse
- LANGFUSE_S3_BATCH_EXPORT_SECRET_ACCESS_KEY=${MINIO_SECRET_KEY}
- LANGFUSE_S3_BATCH_EXPORT_FORCE_PATH_STYLE=true
# Redis
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_AUTH=${REDIS_PASSWORD}
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
labels:
- "com.ravenhelm.service=langfuse-worker"
langfuse-web:
image: docker.io/langfuse/langfuse:3
container_name: langfuse-web
restart: unless-stopped
depends_on:
langfuse-clickhouse:
condition: service_healthy
langfuse-minio:
condition: service_healthy
networks:
- ravenhelm_net
env_file:
- .env
environment:
- NEXTAUTH_URL=https://langfuse.ravenhelm.dev
- NEXTAUTH_SECRET=${LANGFUSE_NEXTAUTH_SECRET}
- DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD_ENCODED}@postgres:5432/langfuse
- SALT=${LANGFUSE_SALT}
- ENCRYPTION_KEY=${LANGFUSE_ENCRYPTION_KEY}
- TELEMETRY_ENABLED=false
- LANGFUSE_ENABLE_EXPERIMENTAL_FEATURES=true
# ClickHouse
- CLICKHOUSE_MIGRATION_URL=clickhouse://langfuse-clickhouse:9000
- CLICKHOUSE_URL=http://langfuse-clickhouse:8123
- CLICKHOUSE_USER=clickhouse
- CLICKHOUSE_PASSWORD=${CLICKHOUSE_PASSWORD}
- CLICKHOUSE_CLUSTER_ENABLED=false
# S3/MinIO
- LANGFUSE_S3_EVENT_UPLOAD_BUCKET=langfuse
- LANGFUSE_S3_EVENT_UPLOAD_REGION=auto
- LANGFUSE_S3_EVENT_UPLOAD_ACCESS_KEY_ID=langfuse
- LANGFUSE_S3_EVENT_UPLOAD_SECRET_ACCESS_KEY=${MINIO_SECRET_KEY}
- LANGFUSE_S3_EVENT_UPLOAD_ENDPOINT=http://langfuse-minio:9000
- LANGFUSE_S3_EVENT_UPLOAD_FORCE_PATH_STYLE=true
- LANGFUSE_S3_EVENT_UPLOAD_PREFIX=events/
- LANGFUSE_S3_MEDIA_UPLOAD_BUCKET=langfuse
- LANGFUSE_S3_MEDIA_UPLOAD_REGION=auto
- LANGFUSE_S3_MEDIA_UPLOAD_ACCESS_KEY_ID=langfuse
- LANGFUSE_S3_MEDIA_UPLOAD_SECRET_ACCESS_KEY=${MINIO_SECRET_KEY}
- LANGFUSE_S3_MEDIA_UPLOAD_ENDPOINT=http://langfuse-minio:9000
- LANGFUSE_S3_MEDIA_UPLOAD_FORCE_PATH_STYLE=true
- LANGFUSE_S3_MEDIA_UPLOAD_PREFIX=media/
- LANGFUSE_S3_BATCH_EXPORT_ENABLED=true
- LANGFUSE_S3_BATCH_EXPORT_BUCKET=langfuse
- LANGFUSE_S3_BATCH_EXPORT_PREFIX=exports/
- LANGFUSE_S3_BATCH_EXPORT_REGION=auto
- LANGFUSE_S3_BATCH_EXPORT_ENDPOINT=http://langfuse-minio:9000
- LANGFUSE_S3_BATCH_EXPORT_EXTERNAL_ENDPOINT=https://langfuse.ravenhelm.dev/minio
- LANGFUSE_S3_BATCH_EXPORT_ACCESS_KEY_ID=langfuse
- LANGFUSE_S3_BATCH_EXPORT_SECRET_ACCESS_KEY=${MINIO_SECRET_KEY}
- LANGFUSE_S3_BATCH_EXPORT_FORCE_PATH_STYLE=true
# Redis
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_AUTH=${REDIS_PASSWORD}
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
# Initial setup (optional)
- LANGFUSE_INIT_ORG_NAME=Ravenhelm
- LANGFUSE_INIT_PROJECT_NAME=LangGraph Agents
# Zitadel SSO
- AUTH_CUSTOM_CLIENT_ID=${AUTH_CUSTOM_CLIENT_ID}
- AUTH_CUSTOM_CLIENT_SECRET=${AUTH_CUSTOM_CLIENT_SECRET}
- AUTH_CUSTOM_ISSUER=${AUTH_CUSTOM_ISSUER}
- AUTH_CUSTOM_NAME=${AUTH_CUSTOM_NAME}
- AUTH_DISABLE_USERNAME_PASSWORD=${AUTH_DISABLE_USERNAME_PASSWORD}
labels:
- "com.ravenhelm.service=langfuse-web"
- "traefik.enable=true"
- "traefik.http.routers.langfuse.rule=Host(`langfuse.ravenhelm.dev`)"
- "traefik.http.routers.langfuse.entrypoints=websecure"
- "traefik.http.routers.langfuse.tls.certresolver=letsencrypt"
- "traefik.http.services.langfuse.loadbalancer.server.port=3000"
langfuse-clickhouse:
image: docker.io/clickhouse/clickhouse-server:24
container_name: langfuse-clickhouse
restart: unless-stopped
user: "101:101"
networks:
- ravenhelm_net
environment:
- CLICKHOUSE_DB=default
- CLICKHOUSE_USER=clickhouse
- CLICKHOUSE_PASSWORD=${CLICKHOUSE_PASSWORD}
volumes:
- /Users/ravenhelm/ravenhelm/data/langfuse/clickhouse/data:/var/lib/clickhouse
- /Users/ravenhelm/ravenhelm/data/langfuse/clickhouse/logs:/var/log/clickhouse-server
healthcheck:
test: wget --no-verbose --tries=1 --spider http://localhost:8123/ping || exit 1
interval: 5s
timeout: 5s
retries: 10
start_period: 10s
labels:
- "com.ravenhelm.service=langfuse-clickhouse"
langfuse-minio:
image: minio/minio:latest
container_name: langfuse-minio
restart: unless-stopped
networks:
- ravenhelm_net
entrypoint: sh
command: -c 'mkdir -p /data/langfuse && minio server /data --console-address ":9001"'
environment:
- MINIO_ROOT_USER=langfuse
- MINIO_ROOT_PASSWORD=${MINIO_SECRET_KEY}
volumes:
- /Users/ravenhelm/ravenhelm/data/langfuse/minio:/data
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 5s
timeout: 5s
retries: 5
start_period: 10s
labels:
- "com.ravenhelm.service=langfuse-minio"
networks:
ravenhelm_net:
external: true
Start Langfuse:
cd ~/ravenhelm/services/langfuse
docker compose --env-file ~/ravenhelm/secrets/.env up -d
Step 4: Deploy Ollama
Create ~/ravenhelm/services/ollama/docker-compose.yml:
services:
ollama:
image: ollama/ollama:latest
container_name: ollama
restart: unless-stopped
networks:
- ravenhelm_net
environment:
- TZ=America/Chicago
volumes:
- ../../data/ollama:/root/.ollama
# Internal only - no Traefik exposure
labels:
- "com.ravenhelm.service=ollama"
- "com.ravenhelm.internal=true"
networks:
ravenhelm_net:
external: true
Start Ollama:
cd ~/ravenhelm/services/ollama
docker compose up -d
Step 5: Deploy Norns Agent
Norns reads secrets from ~/ravenhelm/secrets/.env. Ensure the following are set:
# Core dependencies
POSTGRES_PASSWORD=<postgres-password>
REDIS_PASSWORD=<redis-password>
NORNS_API_KEY=<generate-secure-random>
# LLM providers (at least one)
ANTHROPIC_API_KEY=<optional>
OPENAI_API_KEY=<optional>
# Observability
LANGFUSE_PUBLIC_KEY=<langfuse-public-key>
LANGFUSE_SECRET_KEY=<langfuse-secret-key>
# Vault (recommended)
VAULT_ENABLED=true
VAULT_ADDR=https://vault.ravenhelm.dev
VAULT_ROLE_ID=<role-id>
VAULT_SECRET_ID=<secret-id>
Create ~/ravenhelm/docs/AI-ML-Platform/norns-agent/docker-compose.yml:
services:
norns-agent:
build:
context: ./agent
dockerfile: Dockerfile
container_name: norns-agent
entrypoint: ["/bin/bash", "/app/vault-entrypoint.sh"]
command: ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
group_add:
- "991"
restart: unless-stopped
ports:
- "3901:8000"
volumes:
- /Users/ravenhelm/ravenhelm/docs/AI-ML-Platform/norns-agent/agent/vault-entrypoint.sh:/app/vault-entrypoint.sh:ro
# Backup access (read-only for status checks)
- /Users/ravenhelm/ravenhelm/backups:/backups:ro
- /Users/ravenhelm/.config/restic:/root/.config/restic:ro
- /Volumes/T9:/Volumes/T9:ro
# Docker socket for backup execution
- /var/run/docker.sock:/var/run/docker.sock
environment:
# Database (PostgreSQL)
- DB_HOST=postgres
- DB_PORT=5432
- DB_NAME=ravenmaskos
- DB_USER=ravenhelm
- DB_PASSWORD=${POSTGRES_PASSWORD}
# Redis (Huginn state + Context caching)
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_DB=0
- REDIS_PASSWORD=${REDIS_PASSWORD}
# Embeddings (Ollama preferred, falls back to OpenAI)
- OLLAMA_URL=http://ollama:11434
- OLLAMA_EMBED_MODEL=nomic-embed-text
- EMBEDDING_PROVIDER=auto
# Slack
- SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN}
- SLACK_SIGNING_SECRET=${SLACK_SIGNING_SECRET}
# AI APIs
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
- OPENAI_API_KEY=${OPENAI_API_KEY}
# Google Services
- GOOGLE_MAPS_API_KEY=${GOOGLE_MAPS_API_KEY}
- GOOGLE_CALENDAR_CLIENT_ID=${GOOGLE_CALENDAR_CLIENT_ID}
- GOOGLE_CALENDAR_CLIENT_SECRET=${GOOGLE_CALENDAR_CLIENT_SECRET}
- GOOGLE_CALENDAR_REFRESH_TOKEN=${GOOGLE_CALENDAR_REFRESH_TOKEN}
- RAVENHELM_CALENDAR_ID=${RAVENHELM_CALENDAR_ID}
# Home Automation
- HOMEASSISTANT_URL=http://homeassistant:8123
- HOMEASSISTANT_TOKEN=${HOMEASSISTANT_TOKEN}
# Twilio (SMS/Voice)
- TWILIO_ACCOUNT_SID=${TWILIO_ACCOUNT_SID}
- TWILIO_AUTH_TOKEN=${TWILIO_AUTH_TOKEN}
- TWILIO_FROM_NUMBER=+17372143330
# Telephony Service
- TELEPHONY_URL=http://telephony:8000
- GROCY_URL=http://grocy:80
- GROCY_API_KEY=${GROCY_API_KEY}
# Norns
- NORNS_API_KEY=${NORNS_API_KEY}
# OpenBao / Vault
- VAULT_ENABLED=${VAULT_ENABLED:-true}
- VAULT_ADDR=${VAULT_ADDR:-https://vault.ravenhelm.dev}
- VAULT_ROLE_ID=${VAULT_ROLE_ID}
- VAULT_SECRET_ID=${VAULT_SECRET_ID}
- VAULT_NAMESPACE=${VAULT_NAMESPACE}
- VAULT_KV_MOUNT=${VAULT_KV_MOUNT:-secret}
- VAULT_TRANSIT_MOUNT=${VAULT_TRANSIT_MOUNT:-transit}
- VAULT_TOKEN_TTL=${VAULT_TOKEN_TTL:-300}
# Telephony service for outbound calls
- TELEPHONY_URL=http://telephony:8000
- LOG_LEVEL=INFO
# Langfuse Observability
- LANGFUSE_PUBLIC_KEY=${LANGFUSE_PUBLIC_KEY}
- LANGFUSE_SECRET_KEY=${LANGFUSE_SECRET_KEY}
- LANGFUSE_HOST=https://langfuse.ravenhelm.dev
- LANGFUSE_URL=http://langfuse-web:3000
# Knowledge APIs
- TAVILY_API_KEY=${TAVILY_API_KEY}
- OPENWEATHER_API_KEY=${OPENWEATHER_API_KEY}
- BRAVE_API_KEY=${BRAVE_API_KEY}
- DEFAULT_LOCATION=${DEFAULT_LOCATION:-San Marcos, TX}
# GitLab
- GITLAB_URL=http://gitlab
- GITLAB_TOKEN=${GITLAB_TOKEN}
- N8N_URL=http://n8n:5678
- LINEAR_API_KEY=${LINEAR_API_KEY}
- FIRECRAWL_API_KEY=${FIRECRAWL_API_KEY}
# Backup paths (container paths)
- BACKUP_ROOT=/backups
- RESTIC_ENV_FILE=/root/.config/restic/homelab.env
- T9_MOUNT=/Volumes/T9
networks:
- ravenhelm_net
labels:
- "traefik.enable=true"
- "traefik.http.routers.norns-pm.rule=Host(`norns-pm.ravenhelm.dev`)"
- "traefik.http.routers.norns-pm.entrypoints=websecure"
- "traefik.http.routers.norns-pm.tls.certresolver=letsencrypt"
- "traefik.http.services.norns-pm.loadbalancer.server.port=8000"
networks:
ravenhelm_net:
external: true
Start Norns:
cd ~/ravenhelm/docs/AI-ML-Platform/norns-agent
docker compose --env-file ~/ravenhelm/secrets/.env up -d
Step 6: Optional Neo4j (Memory Graph)
If you use graph memory, deploy Neo4j:
services:
neo4j:
image: neo4j:5.26-community
container_name: neo4j
restart: unless-stopped
networks:
- ravenhelm_net
environment:
- TZ=America/Chicago
- NEO4J_AUTH=neo4j/${NEO4J_PASSWORD}
- NEO4J_PLUGINS=["apoc"]
- NEO4J_dbms_security_procedures_unrestricted=apoc.*
- NEO4J_dbms_security_procedures_allowlist=apoc.*
- NEO4J_apoc_export_file_enabled=true
- NEO4J_apoc_import_file_enabled=true
- NEO4J_apoc_import_file_use__neo4j__config=true
- NEO4J_server_memory_heap_initial__size=512m
- NEO4J_server_memory_heap_max__size=1G
- NEO4J_server_memory_pagecache_size=512m
volumes:
- /Users/ravenhelm/ravenhelm/data/neo4j/data:/data
- /Users/ravenhelm/ravenhelm/data/neo4j/logs:/logs
- /Users/ravenhelm/ravenhelm/data/neo4j/plugins:/plugins
ports:
- "7687:7687"
healthcheck:
test: ["CMD", "wget", "-q", "--spider", "http://localhost:7474"]
interval: 30s
timeout: 10s
retries: 5
start_period: 60s
labels:
- "com.ravenhelm.service=neo4j"
- "traefik.enable=true"
- "traefik.http.routers.neo4j.rule=Host(`neo4j.ravenhelm.dev`)"
- "traefik.http.routers.neo4j.entrypoints=websecure"
- "traefik.http.routers.neo4j.tls.certresolver=letsencrypt"
- "traefik.http.routers.neo4j.service=neo4j"
- "traefik.http.services.neo4j.loadbalancer.server.port=7474"
networks:
ravenhelm_net:
external: true
Start Neo4j:
cd ~/ravenhelm/services/neo4j
docker compose --env-file ~/ravenhelm/secrets/.env up -d
Verification
- Norns API:
https://norns-pm.ravenhelm.dev - Langfuse UI:
https://langfuse.ravenhelm.dev - Ollama (internal):
http://ollama:11434from within the Docker network - Neo4j (optional):
https://neo4j.ravenhelm.dev