From ae10f01e6a3f21e2c2861c778235cc52f27aec99 Mon Sep 17 00:00:00 2001 From: Muhammad Waqas <47238077+smwaqas89@users.noreply.github.com> Date: Wed, 4 Mar 2026 01:08:27 -0600 Subject: [PATCH] fix: set litellm.drop_params to support non-OpenAI providers Fixes #63. Also addresses the issue in #32. When using providers like Ollama or Groq as the weak model, litellm raises UnsupportedParamsError for parameters like presence_penalty that these providers don't support. Setting litellm.drop_params = True causes litellm to silently drop unsupported parameters, enabling seamless routing to any provider supported by litellm. --- routellm/controller.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/routellm/controller.py b/routellm/controller.py index 8a02a05..a46d7d2 100644 --- a/routellm/controller.py +++ b/routellm/controller.py @@ -4,7 +4,10 @@ from typing import Any, Optional import pandas as pd +import litellm from litellm import acompletion, completion + +litellm.drop_params = True from tqdm import tqdm from routellm.routers.routers import ROUTER_CLS