|
|
|
|
|
|
|
import sys, os |
|
import traceback |
|
|
|
sys.path.insert( |
|
0, os.path.abspath("../..") |
|
) |
|
import litellm |
|
from litellm import embedding, completion |
|
import pytest |
|
|
|
litellm.set_verbose = True |
|
|
|
model_alias_map = { |
|
"good-model": "anyscale/meta-llama/Llama-2-7b-chat-hf" |
|
} |
|
|
|
litellm.model_alias_map = model_alias_map |
|
|
|
def test_model_alias_map(): |
|
try: |
|
response = completion( |
|
"good-model", |
|
messages=[{"role": "user", "content": "Hey, how's it going?"}], |
|
top_p=0.1, |
|
temperature=0.01, |
|
max_tokens=10, |
|
) |
|
print(response.model) |
|
assert "Llama-2-7b-chat-hf" in response.model |
|
except Exception as e: |
|
pytest.fail(f"Error occurred: {e}") |
|
|
|
|
|
test_model_alias_map() |