Spaces:
Running
Running
Fix logger definition and provider fallback in LLMClient
Browse files- ragbench_eval/config.py +2 -3
ragbench_eval/config.py
CHANGED
|
@@ -10,9 +10,8 @@ RAGBENCH_DATASET = "galileo-ai/ragbench"
|
|
| 10 |
|
| 11 |
#Will be "groq" if env var is missing OR empty
|
| 12 |
LLM_PROVIDER = (os.getenv("RAGBENCH_LLM_PROVIDER") or "groq").lower()
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
JUDGE_MODEL = os.getenv("RAGBENCH_JUDGE_MODEL", "llama3-70b-8192")
|
| 16 |
|
| 17 |
EMBEDDING_MODEL = os.getenv(
|
| 18 |
"RAGBENCH_EMBEDDING_MODEL",
|
|
|
|
| 10 |
|
| 11 |
#Will be "groq" if env var is missing OR empty
|
| 12 |
LLM_PROVIDER = (os.getenv("RAGBENCH_LLM_PROVIDER") or "groq").lower()
|
| 13 |
+
GEN_MODEL = os.getenv("RAGBENCH_GEN_MODEL", "llama-3.1-8b-instant")
|
| 14 |
+
JUDGE_MODEL = os.getenv("RAGBENCH_JUDGE_MODEL", "llama-3.1-70b-versatile")
|
|
|
|
| 15 |
|
| 16 |
EMBEDDING_MODEL = os.getenv(
|
| 17 |
"RAGBENCH_EMBEDDING_MODEL",
|