metisllm-dashboard / llm_handler /llm_interface.py
Gateston Johns
first real commit
9041389
from __future__ import annotations
from typing import List, Protocol, Type, Dict
import enum
class DefaultEnumMeta(enum.EnumMeta):
def __call__(cls, value=None, *args, **kwargs) -> DefaultEnumMeta:
if value is None:
return next(iter(cls))
return super().__call__(value, *args, **kwargs) # type: ignore
class LLMInterface(Protocol):
def get_chat_completion(self, messages: List, model: enum.Enum, temperature: float,
**kwargs) -> str:
...
def get_text_embedding(self, input: str, model: enum.Enum) -> List[float]:
...