Xmaster6y commited on
Commit
1a02700
1 Parent(s): 009d25b

gh transfer

Browse files
Files changed (4) hide show
  1. app.py +70 -0
  2. requirements.txt +1 -0
  3. src/__init__.py +0 -0
  4. src/requests.py +31 -0
app.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Main Gradio module.
3
+ """
4
+
5
+ import gradio as gr
6
+
7
+ from src import requests
8
+
9
+
10
+ def respond_stream(
11
+ message,
12
+ chat_history,
13
+ api_key,
14
+ model,
15
+ temperature,
16
+ top_p,
17
+ max_tokens,
18
+ system,
19
+ ):
20
+ response = ""
21
+ received_anything = False
22
+ for chunk in requests.get_stream_chat_completion(
23
+ message=message,
24
+ chat_history=chat_history,
25
+ model=model,
26
+ api_key=api_key,
27
+ temperature=temperature,
28
+ top_p=top_p,
29
+ max_tokens=int(max_tokens),
30
+ system=system if system else None,
31
+ ):
32
+ response += chunk
33
+ yield response
34
+ received_anything = True
35
+ if not received_anything:
36
+ gr.Warning("Error: Invalid API Key")
37
+ yield ""
38
+
39
+
40
+ with gr.Blocks(title="Mistral Playground") as demo:
41
+ with gr.Row():
42
+ api_key = gr.Textbox(lines=1, label="Mistral API Key")
43
+ model = gr.Radio(
44
+ choices=["mistral-tiny", "mistral-small", "mistral-medium"],
45
+ value="mistral-tiny",
46
+ )
47
+ with gr.Row():
48
+ temperature = gr.Slider(
49
+ minimum=0.01, maximum=1.0, step=0.01, label="Temperature"
50
+ )
51
+ top_p = gr.Slider(minimum=0.01, maximum=1.0, step=0.01, label="Top P")
52
+ max_tokens = gr.Slider(
53
+ minimum=1, maximum=4000, step=1, label="Max Tokens", value=100
54
+ )
55
+
56
+ with gr.Row():
57
+ system = gr.Textbox(lines=10, label="System Message")
58
+ gr.ChatInterface(
59
+ respond_stream,
60
+ additional_inputs=[
61
+ api_key,
62
+ model,
63
+ temperature,
64
+ top_p,
65
+ max_tokens,
66
+ system,
67
+ ],
68
+ )
69
+
70
+ demo.launch()
requirements.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ mistralai @ git+https://github.com/mistralai/client-python@be8bfb5010462996e289ff6f66e10e101f341197
src/__init__.py ADDED
File without changes
src/requests.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Module to perform requests to Mistral API.
3
+ """
4
+
5
+ from mistralai.client import MistralClient
6
+ from mistralai.models.chat_completion import ChatMessage
7
+
8
+
9
+ def get_stream_chat_completion(
10
+ message, chat_history, model, api_key, system=None, **kwargs
11
+ ):
12
+ messages = []
13
+ if system is not None:
14
+ messages.append(ChatMessage(role="system", content=system))
15
+ for chat in chat_history:
16
+ human_message, bot_message = chat
17
+ messages.extend(
18
+ (
19
+ ChatMessage(role="user", content=human_message),
20
+ ChatMessage(role="assistant", content=bot_message),
21
+ )
22
+ )
23
+ messages.append(ChatMessage(role="user", content=message))
24
+ client = MistralClient(api_key=api_key)
25
+ for chunk in client.chat_stream(
26
+ model=model,
27
+ messages=messages,
28
+ **kwargs,
29
+ ):
30
+ if chunk.choices[0].delta.content is not None:
31
+ yield chunk.choices[0].delta.content