alpdk1394 commited on
Commit
02f2a44
·
verified ·
1 Parent(s): 89d5d70

Return echo

Browse files
Files changed (1) hide show
  1. app.py +59 -61
app.py CHANGED
@@ -62,69 +62,67 @@
62
  # if __name__ == "__main__":
63
  # demo.launch()
64
 
 
 
 
 
 
 
 
 
 
65
  # import gradio as gr
66
 
67
- # def echo(message, history):
68
- # if not history:
69
- # return message
70
- # return str(history[-1][0] + history[-1][1])
71
 
72
- # demo = gr.ChatInterface(fn=echo, examples=["hello", "hola", "merhaba"], title="Echo Bot")
73
- # demo.launch()
 
 
 
 
 
 
 
74
 
75
- from huggingface_hub import InferenceClient
76
- import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
 
78
- # client = InferenceClient("meta-llama/Meta-Llama-3-8B")
79
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
80
-
81
- def respond(
82
- message,
83
- history: list[tuple[str, str]],
84
- system_message,
85
- max_tokens,
86
- temperature,
87
- top_p,
88
- ):
89
- messages = [{"role": "system", "content": system_message}]
90
-
91
- for val in history:
92
- if val[0]:
93
- messages.append({"role": "user", "content": val[0]})
94
- if val[1]:
95
- messages.append({"role": "assistant", "content": val[1]})
96
-
97
- messages.append({"role": "user", "content": message})
98
-
99
- response = ""
100
-
101
- for message in client.chat_completion(
102
- messages,
103
- max_tokens=max_tokens,
104
- stream=True,
105
- temperature=temperature,
106
- top_p=top_p,
107
- ):
108
- token = message.choices[0].delta.content
109
-
110
- response += token
111
- yield response
112
-
113
- demo = gr.ChatInterface(
114
- respond,
115
- additional_inputs=[
116
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
117
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
118
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
119
- gr.Slider(
120
- minimum=0.1,
121
- maximum=1.0,
122
- value=0.95,
123
- step=0.05,
124
- label="Top-p (nucleus sampling)",
125
- ),
126
- ],
127
- )
128
-
129
- if __name__ == "__main__":
130
- demo.launch()
 
62
  # if __name__ == "__main__":
63
  # demo.launch()
64
 
65
+ import gradio as gr
66
+
67
+ def echo(message, history):
68
+ return message
69
+
70
+ demo = gr.ChatInterface(fn=echo, examples=["hello", "hola", "merhaba"], title="Echo Bot")
71
+ demo.launch()
72
+
73
+ # from huggingface_hub import InferenceClient
74
  # import gradio as gr
75
 
76
+ # # client = InferenceClient("meta-llama/Meta-Llama-3-8B")
77
+ # client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
 
 
78
 
79
+ # def respond(
80
+ # message,
81
+ # history: list[tuple[str, str]],
82
+ # system_message,
83
+ # max_tokens,
84
+ # temperature,
85
+ # top_p,
86
+ # ):
87
+ # messages = [{"role": "system", "content": system_message}]
88
 
89
+ # for val in history:
90
+ # if val[0]:
91
+ # messages.append({"role": "user", "content": val[0]})
92
+ # if val[1]:
93
+ # messages.append({"role": "assistant", "content": val[1]})
94
+
95
+ # messages.append({"role": "user", "content": message})
96
+
97
+ # response = ""
98
+
99
+ # for message in client.chat_completion(
100
+ # messages,
101
+ # max_tokens=max_tokens,
102
+ # stream=True,
103
+ # temperature=temperature,
104
+ # top_p=top_p,
105
+ # ):
106
+ # token = message.choices[0].delta.content
107
 
108
+ # response += token
109
+ # yield response
110
+
111
+ # demo = gr.ChatInterface(
112
+ # respond,
113
+ # additional_inputs=[
114
+ # gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
115
+ # gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
116
+ # gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
117
+ # gr.Slider(
118
+ # minimum=0.1,
119
+ # maximum=1.0,
120
+ # value=0.95,
121
+ # step=0.05,
122
+ # label="Top-p (nucleus sampling)",
123
+ # ),
124
+ # ],
125
+ # )
126
+
127
+ # if __name__ == "__main__":
128
+ # demo.launch()