From 1ea387413d2c3a0f9abc2a5a946983d5861359f8 Mon Sep 17 00:00:00 2001 From: pjavanrood Date: Wed, 7 Jan 2026 21:17:45 -0800 Subject: [PATCH] refactor: adding api_key param to litellm --- src/lighteval/metrics/metrics_sample.py | 9 ++++----- src/lighteval/metrics/utils/llm_as_judge.py | 4 ++++ 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/src/lighteval/metrics/metrics_sample.py b/src/lighteval/metrics/metrics_sample.py index d83e64e22..db14b9bf6 100644 --- a/src/lighteval/metrics/metrics_sample.py +++ b/src/lighteval/metrics/metrics_sample.py @@ -951,29 +951,28 @@ def __init__( short_judge_name: str | None = None, response_format: BaseModel | None = None, url: str | None = None, + api_key: str | None = None, hf_provider: str | None = None, max_tokens: int | None = None, backend_options: dict | None = None, ) -> None: logger.debug(f"Initializing JudgeLLM with backend: {judge_backend}, model: {judge_model_name}") - api_key = None - match judge_backend: case "openai": if judge_model_name not in self.available_models_openai: raise ValueError(f"{judge_model_name} not in available models for llm as a judge metric") - api_key = os.getenv("OPENAI_API_KEY") + api_key = api_key or os.getenv("OPENAI_API_KEY") logger.debug("Using OpenAI backend for llm as a judge metric") case "tgi": - api_key = os.getenv("HF_TOKEN") + api_key = api_key or os.getenv("HF_TOKEN") if url is None: url = "https://api-inference.huggingface.co/v1/" logger.debug("Using TGI backend") case "inference-providers": - api_key = os.getenv("HF_TOKEN") + api_key = api_key or os.getenv("HF_TOKEN") logger.debug("Using Hugging Face Inference backend") case "litellm": diff --git a/src/lighteval/metrics/utils/llm_as_judge.py b/src/lighteval/metrics/utils/llm_as_judge.py index e30ec0449..79c7d0da7 100644 --- a/src/lighteval/metrics/utils/llm_as_judge.py +++ b/src/lighteval/metrics/utils/llm_as_judge.py @@ -329,6 +329,10 @@ def __call_api(prompt): } if max_new_tokens is not None: kwargs["max_tokens"] = (max_new_tokens,) + if self.api_key is not None: + kwargs["api_key"] = self.api_key + if self.url is not None: + kwargs["base_url"] = self.url response = litellm.completion(**kwargs) text = response.choices[0].message.content