rdune71's picture
Fix Ollama connection, Redis configuration, and monitoring issues
84ae379
raw
history blame
1.27 kB
import os
from dotenv import load_dotenv
class Config:
def __init__(self):
load_dotenv()
self.hf_token = os.getenv("HF_TOKEN")
self.hf_api_url = os.getenv("HF_API_ENDPOINT_URL", "https://api-inference.huggingface.co/v1/")
self.use_fallback = os.getenv("USE_FALLBACK", "false").lower() == "true"
self.tavily_api_key = os.getenv("TAVILY_API_KEY")
self.openweather_api_key = os.getenv("OPENWEATHER_API_KEY")
self.nasa_api_key = os.getenv("NASA_API_KEY")
# Redis configuration with proper defaults for local development
self.redis_host = os.getenv("REDIS_HOST", "localhost")
self.redis_port = int(os.getenv("REDIS_PORT", "6379"))
self.redis_username = os.getenv("REDIS_USERNAME", "")
self.redis_password = os.getenv("REDIS_PASSWORD", "")
self.redis_retries = int(os.getenv("REDIS_RETRIES", "3"))
self.redis_retry_delay = int(os.getenv("REDIS_RETRY_DELAY", "1"))
# Local model configuration
self.local_model_name = os.getenv("LOCAL_MODEL_NAME", "mistral")
# Update to use the working ngrok URL from your logs
self.ollama_host = os.getenv("OLLAMA_HOST", "https://f943b91f0a0c.ngrok-free.app")
config = Config()