Skip to content

Commit 963e943

Browse files
anthonyivn2Sameerlite
authored andcommitted
init commit (#16200)
1 parent c49084b commit 963e943

File tree

3 files changed

+300
-35
lines changed

3 files changed

+300
-35
lines changed

litellm/litellm_core_utils/prompt_templates/common_utils.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,15 @@ def handle_messages_with_content_list_to_str_conversion(
9494
return messages
9595

9696

97+
def strip_name_from_message(message: AllMessageValues, allowed_name_roles: List[str] = ["user"]) -> AllMessageValues:
98+
"""
99+
Removes 'name' from message
100+
"""
101+
msg_copy = message.copy()
102+
if msg_copy.get("role") not in allowed_name_roles:
103+
msg_copy.pop("name", None) # type: ignore
104+
return msg_copy
105+
97106
def strip_name_from_messages(
98107
messages: List[AllMessageValues], allowed_name_roles: List[str] = ["user"]
99108
) -> List[AllMessageValues]:

litellm/llms/databricks/chat/transformation.py

Lines changed: 31 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
_should_convert_tool_call_to_json_mode,
2727
)
2828
from litellm.litellm_core_utils.prompt_templates.common_utils import (
29-
strip_name_from_messages,
29+
strip_name_from_message
3030
)
3131
from litellm.llms.base_llm.base_model_iterator import BaseModelResponseIterator
3232
from litellm.types.llms.anthropic import AllAnthropicToolsValues
@@ -332,8 +332,11 @@ def _transform_messages(
332332
_message = message.model_dump(exclude_none=True)
333333
else:
334334
_message = message
335+
_message = strip_name_from_message(_message, allowed_name_roles=["user"])
336+
# Move message-level cache_control into a content block when content is a string.
337+
if "cache_control" in _message and isinstance(_message.get("content"), str):
338+
_message = self._move_cache_control_into_string_content_block(_message)
335339
new_messages.append(_message)
336-
new_messages = strip_name_from_messages(new_messages)
337340

338341
if is_async:
339342
return super()._transform_messages(
@@ -344,6 +347,32 @@ def _transform_messages(
344347
messages=new_messages, model=model, is_async=cast(Literal[False], False)
345348
)
346349

350+
def _move_cache_control_into_string_content_block(self, message: AllMessageValues) -> AllMessageValues:
351+
"""
352+
Moves message-level cache_control into a content block when content is a string.
353+
354+
Transforms:
355+
{"role": "user", "content": "text", "cache_control": {...}}
356+
Into:
357+
{"role": "user", "content": [{"type": "text", "text": "text", "cache_control": {...}}]}
358+
359+
This is required for Anthropic's prompt caching API when cache_control is specified
360+
at the message level but content is a simple string (not already an array of content blocks).
361+
"""
362+
content = message.get("content")
363+
# Create new message with cache_control moved into content block
364+
transformed_message = cast(dict[str, Any], message.copy())
365+
cache_control = transformed_message.pop("cache_control")
366+
transformed_message["content"] = [
367+
{
368+
"type": "text",
369+
"text": content,
370+
"cache_control": cache_control,
371+
}
372+
]
373+
return cast(AllMessageValues, transformed_message)
374+
375+
347376
@staticmethod
348377
def extract_content_str(
349378
content: Optional[AllDatabricksContentValues],

0 commit comments

Comments
 (0)