Spaces:
Running
Running
Update models.py
Browse files
models.py
CHANGED
@@ -44,12 +44,24 @@ def get_anthropic(model: str,
|
|
44 |
# Add conversation history, removing cache_control from all but the last two user messages
|
45 |
for i, message in enumerate(chat_history):
|
46 |
if message["role"] == "user":
|
|
|
|
|
47 |
if i >= len(chat_history) - 3: # Last two user messages
|
48 |
-
messages.append(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
49 |
else:
|
50 |
messages.append({
|
51 |
"role": "user",
|
52 |
-
"content": [{"type": "text", "text":
|
53 |
})
|
54 |
else:
|
55 |
messages.append(message)
|
@@ -532,4 +544,4 @@ def get_model_api(model: str):
|
|
532 |
return get_openai_azure
|
533 |
else:
|
534 |
raise ValueError(
|
535 |
-
f"Unsupported model: {model}. Ensure to add prefix (e.g. openai:, google:, groq:, cerebras:, azure:, ollama:, anthropic:)")
|
|
|
44 |
# Add conversation history, removing cache_control from all but the last two user messages
|
45 |
for i, message in enumerate(chat_history):
|
46 |
if message["role"] == "user":
|
47 |
+
content = message["content"][0]["text"] if isinstance(message["content"], list) else message["content"]
|
48 |
+
|
49 |
if i >= len(chat_history) - 3: # Last two user messages
|
50 |
+
messages.append({
|
51 |
+
"role": "user",
|
52 |
+
"content": [
|
53 |
+
{
|
54 |
+
"type": "text",
|
55 |
+
"text": content,
|
56 |
+
"cache_control": {"type": "ephemeral"}
|
57 |
+
}
|
58 |
+
]
|
59 |
+
})
|
60 |
+
|
61 |
else:
|
62 |
messages.append({
|
63 |
"role": "user",
|
64 |
+
"content": [{"type": "text", "text": content}]
|
65 |
})
|
66 |
else:
|
67 |
messages.append(message)
|
|
|
544 |
return get_openai_azure
|
545 |
else:
|
546 |
raise ValueError(
|
547 |
+
f"Unsupported model: {model}. Ensure to add prefix (e.g. openai:, google:, groq:, cerebras:, azure:, ollama:, anthropic:)")
|