Spaces:
Sleeping
Sleeping
File size: 2,538 Bytes
50cb379 2963935 50cb379 2963935 50cb379 0a2cb3b 50cb379 0a2cb3b 50cb379 0a2cb3b 50cb379 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 |
from langchain_anthropic import ChatAnthropic
from langchain_openai import ChatOpenAI
from langchain_ollama import ChatOllama
from langchain_core.language_models.base import BaseLanguageModel
from langchain_core.messages import BaseMessage, HumanMessage, AIMessage
from typing import Optional, Dict, List, Any, Sequence
from langchain.tools import BaseTool
import os
import requests
import json
from dotenv import load_dotenv
from dataclasses import dataclass
import ollama
import copy
from chat_gemini import ChatGemini
load_dotenv()
class LLMProvider:
def __init__(self, api_keys: Optional[Dict[str, str]] = None):
self.api_keys = {}
if api_keys:
self.api_keys = api_keys
self.providers: Dict[str, Any] = {}
self._setup_providers()
def _get_ollama_models(self) -> List[str]:
"""Get list of available Ollama models using the ollama package"""
try:
models = ollama.list()
return [model.model for model in models['models']]
except Exception:
return []
def _setup_providers(self):
# Google Gemini
if google_key := os.getenv('GOOGLE_API_KEY') or self.api_keys.get('Gemimi'):
self.providers['Gemini'] = ChatGemini(api_key=google_key)
# Anthropicsel
if anthropic_key := os.getenv('ANTHROPIC_API_KEY') or self.api_keys.get('Claude'):
self.providers['Claude'] = ChatAnthropic(
api_key=anthropic_key,
model_name="claude-3-5-sonnet-20241022",
)
# OpenAI
if openai_key := os.getenv('OPENAI_API_KEY') or self.api_keys.get('ChatGPT'):
self.providers['ChatGPT'] = ChatOpenAI(
api_key=openai_key,
model_name="gpt-4o-2024-11-20",
max_completion_tokens=4096,
)
# Ollama (local)
try:
# Get available Ollama models using the ollama package
ollama_models = self._get_ollama_models()
for model in ollama_models:
self.providers[f'Ollama-{model}'] = ChatOllama(model=model)
except Exception:
pass # Ollama not available
def get_available_providers(self) -> list[str]:
"""Return list of available provider names"""
return list(self.providers.keys())
def get_provider(self, name: str) -> Optional[Any]:
"""Get LLM provider by name"""
return self.providers.get(name)
|