File tree Expand file tree Collapse file tree 1 file changed +4
-2
lines changed
litellm/proxy/batches_endpoints Expand file tree Collapse file tree 1 file changed +4
-2
lines changed Original file line number Diff line number Diff line change 3131 get_original_file_id ,
3232 prepare_data_with_credentials ,
3333)
34-
3534from litellm .proxy .utils import handle_exception_on_proxy , is_known_model
3635from litellm .types .llms .openai import LiteLLMBatchCreateRequest
3736
@@ -112,7 +111,10 @@ async def create_batch( # noqa: PLR0915
112111 is_router_model = is_known_model (model = router_model , llm_router = llm_router )
113112
114113 custom_llm_provider = (
115- provider or data .pop ("custom_llm_provider" , None ) or "openai"
114+ provider
115+ or data .pop ("custom_llm_provider" , None )
116+ or get_custom_llm_provider_from_request_headers (request = request )
117+ or "openai"
116118 )
117119 _create_batch_data = LiteLLMBatchCreateRequest (** data )
118120 input_file_id = _create_batch_data .get ("input_file_id" , None )
You can’t perform that action at this time.
0 commit comments