File size: 1,541 Bytes
f4b4235
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import g4f
import gradio as gr
from g4f.Provider import (
    Ails,
    You,
    Bing,
    Yqcloud,
    Theb,
    Aichat,
    Bard,
    Vercel,
    Forefront,
    Lockchat,
    Liaobots,
    H2o,
    ChatgptLogin,
    DeepAi,
    GetGpt
)
from revChatGPT.V1 import Chatbot
import os

from g4f.models import Model
model_provider_dict = {}

# 遍历Model类中的name属性
for name in vars(Model):
    attr_value = getattr(Model, name)
    if hasattr(attr_value, 'name'):
        model_provider_dict[attr_value.name] = attr_value.best_provider

# os.environ["no_proxy"] = "localhost,127.0.0.1,:1"

with gr.Blocks() as demo:

  chatbot = gr.Chatbot([[None, None]], label='AI')
  msg = gr.Textbox(value="", label='')
  clear = gr.Button("Clear")
  model_name = gr.Dropdown(model_provider_dict.keys())

  def user(user_message, history):
      return gr.update(value="", interactive=False), history + [[user_message, None]]

  def bot(history, model_name):
      history[-1][1] = ''
      bot_msg = g4f.ChatCompletion.create(model=model_name, provider=model_provider_dict[model_name], messages=[{"role": "user", "content": history[-1][0]}], stream=True)
      for c in bot_msg:
          history[-1][1] += c
          yield history

  response = msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
      bot, [chatbot, model_name], chatbot
  )
  response.then(lambda: gr.update(interactive=True), None, [msg], queue=False)
  clear.click(lambda: None, None, chatbot, queue=False)

demo.title = "AI Chat"
demo.queue()
demo.launch()