docetl2/.env.example

41 lines
1.5 KiB
Plaintext

# =====================================================
# BACKEND ENVIRONMENT CONFIGURATION
# =====================================================
# This .env file is used by the backend Python server that executes DocETL pipelines.
# It controls the actual processing of documents and LLM operations when running pipelines.
#
# To use: Copy this file to .env and fill in your actual values.
# =====================================================
# Required: API key for LLM operations in pipeline execution
# This is used when DocETL operations make calls to language models
# DocETL uses LiteLLM which supports 100+ models - see https://docs.litellm.ai/docs/providers
#
# Use the appropriate environment variable for your provider:
# - OpenAI: OPENAI_API_KEY=sk-...
# - Anthropic: ANTHROPIC_API_KEY=sk-ant-...
# - Google: GEMINI_API_KEY=...
# - Cohere: COHERE_API_KEY=...
# - And many more - check LiteLLM docs for your provider
OPENAI_API_KEY=your_api_key_here
# Backend server configuration
BACKEND_ALLOW_ORIGINS=http://localhost:3000,http://127.0.0.1:3000
BACKEND_HOST=localhost
BACKEND_PORT=8000
BACKEND_RELOAD=True
# Frontend configuration (for docker compose)
FRONTEND_HOST=0.0.0.0
FRONTEND_PORT=3000
# Host port mapping for docker-compose (if not set, defaults are used in docker-compose.yml)
FRONTEND_DOCKER_COMPOSE_PORT=3031
BACKEND_DOCKER_COMPOSE_PORT=8081
# Supported text file encodings for document processing
TEXT_FILE_ENCODINGS=utf-8,latin1,cp1252,iso-8859-1
# Optional: AWS configuration for Bedrock models
# AWS_PROFILE=default
# AWS_REGION=us-west-2