santhosh1234 commited on
Commit
95c6fbb
·
verified ·
1 Parent(s): 35f37a7

Upload chatgptmodelapi.py

Browse files
Files changed (1) hide show
  1. chatgptmodelapi.py +69 -0
chatgptmodelapi.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """chatGptModelApi.ipynb
3
+
4
+ Automatically generated by Colab.
5
+
6
+ Original file is located at
7
+ https://colab.research.google.com/drive/17PFsIT6q1FbnJOGfEOZlrbTBw7L8ABkW
8
+ """
9
+
10
+ !pip install -q gradio
11
+
12
+ import gradio as gr
13
+ from huggingface_hub import InferenceClient
14
+
15
+ """
16
+ For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
17
+ """
18
+ client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
19
+
20
+ def respond(
21
+ message,
22
+ history: list[tuple[str, str]],
23
+ system_message,
24
+ max_tokens,
25
+ temperature,
26
+ top_p,
27
+ ):
28
+ messages = [{"role": "system", "content": system_message}]
29
+
30
+ for val in history:
31
+ if val[0]:
32
+ messages.append({"role": "user", "content": val[0]})
33
+ if val[1]:
34
+ messages.append({"role": "assistant", "content": val[1]})
35
+
36
+ messages.append({"role": "user", "content": message})
37
+
38
+ response = ""
39
+
40
+ for message in client.chat_completion(
41
+ messages,
42
+ max_tokens=max_tokens,
43
+ stream=True,
44
+ temperature=temperature,
45
+ top_p=top_p,
46
+ ):
47
+ token = message.choices[0].delta.content
48
+
49
+ response += token
50
+ yield response
51
+
52
+ demo = gr.ChatInterface(
53
+ respond,
54
+ additional_inputs=[
55
+ gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
56
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
57
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
58
+ gr.Slider(
59
+ minimum=0.1,
60
+ maximum=1.0,
61
+ value=0.95,
62
+ step=0.05,
63
+ label="Top-p (nucleus sampling)",
64
+ ),
65
+ ],
66
+ )
67
+
68
+ if __name__ == "__main__":
69
+ demo.launch()