Renangi's picture
Fix logger definition and provider fallback in LLMClient
2aaa8e9
raw
history blame contribute delete
876 Bytes
import os
from dotenv import load_dotenv
load_dotenv()
HF_TOKEN = os.getenv("HF_TOKEN")
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
RAGBENCH_DATASET = "galileo-ai/ragbench"
#Will be "groq" if env var is missing OR empty
LLM_PROVIDER = (os.getenv("RAGBENCH_LLM_PROVIDER") or "groq").lower()
GEN_MODEL = os.getenv("RAGBENCH_GEN_MODEL", "llama-3.1-8b-instant")
JUDGE_MODEL = os.getenv("RAGBENCH_JUDGE_MODEL", "llama-3.1-70b-versatile")
EMBEDDING_MODEL = os.getenv(
"RAGBENCH_EMBEDDING_MODEL",
"sentence-transformers/all-MiniLM-L6-v2",
)
RAGBENCH_DATASET = os.getenv("RAGBENCH_DATASET", "galileo-ai/ragbench")
DOMAIN_TO_SUBSETS = {
"biomedical": ["pubmedqa", "covidqa"],
"general_knowledge": ["hotpotqa", "msmarco", "hagrid", "expertqa"],
"legal": ["cuad"],
"customer_support": ["delucionqa", "emanual", "techqa"],
"finance": ["finqa", "tatqa"],
}