File size: 2,367 Bytes
5f3b01e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80250d2
 
 
 
 
 
 
 
5f3b01e
e8452b9
80250d2
5f3b01e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80250d2
 
 
 
 
 
 
 
 
 
 
 
 
5f3b01e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80250d2
 
 
 
5238878
5f3b01e
 
 
 
5238878
 
5f3b01e
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
from typing import Any, List, Mapping, Optional
from g4f.Provider import (
    Ails,
    You,
    Bing,
    Yqcloud,
    Theb,
    Aichat,
    Bard,
    Vercel,
    Forefront,
    Lockchat,
    Liaobots,
    H2o,
    ChatgptLogin,
    DeepAi,
    GetGpt,
    AItianhu,
    EasyChat,
    Acytoo,
    DfeHub,
    AiService,
    Wewordle,
    ChatgptAi,
)

from g4f import Provider
import g4f
from langchain.callbacks.manager import CallbackManagerForLLMRun
from langchain.llms.base import LLM
provider_dict = {
    'Ails': Ails,
    'You': You,
    'Bing': Bing,
    'Yqcloud': Yqcloud,
    'Theb': Theb,
    'Aichat': Aichat,
    'Bard': Bard,
    'Vercel': Vercel,
    'Forefront': Forefront,
    'Lockchat': Lockchat,
    'Liaobots': Liaobots,
    'H2o': H2o,
    'ChatgptLogin': ChatgptLogin,
    'DeepAi': DeepAi,
    'GetGpt': GetGpt,
    'AItianhu': AItianhu,
    'EasyChat': EasyChat,
    'Acytoo': Acytoo,
    'DfeHub': DfeHub,
    'AiService': AiService,
    'Wewordle': Wewordle,
    'ChatgptAi': ChatgptAi,
}
provider_auth_settings = {
    'Bard':{
        'cookie':""
    }
}
class CustomLLM(LLM):
    model_name: str="gpt-3.5-turbo"
    provider_name: str="GetGpt"
    @property
    def _llm_type(self) -> str:
        return "custom"

    def _call(
        self,
        prompt: str,
        stop: Optional[List[str]] = None,
        run_manager: Optional[CallbackManagerForLLMRun] = None,
        model_name = 'gpt-3.5-turbo',
        provider = GetGpt
    ) -> str:
        # if stop is not None:
        #     raise ValueError("stop kwargs are not permitted.")
        provider_llm = getattr(Provider, self.provider_name)
        provider_llm.working = True

        bot_msg = g4f.ChatCompletion.create(model=self.model_name, 
                                            provider=provider_dict[self.provider_name], 
                                            messages=[{"role": "user", 
                                                        "content": prompt}],
                                            stream=provider_llm.supports_stream,
                                            auth=provider_auth_settings['provider'] if provider_llm.needs_auth else None)
        return bot_msg

    @property
    def _identifying_params(self) -> Mapping[str, Any]:
        """Get the identifying parameters."""
        return {"model:": "gpt-3.5-turbo"}