AdnanRiaz107 commited on
Commit
58ef48c
·
verified ·
1 Parent(s): 4d0d6e8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -12
app.py CHANGED
@@ -1,14 +1,9 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
- """
5
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
- """
7
  client = InferenceClient("AdnanRiaz107/CodePhi-3-Mini-1K4bit")
8
 
9
- ")
10
-
11
-
12
  def respond(
13
  message,
14
  history: list[tuple[str, str]],
@@ -37,13 +32,9 @@ def respond(
37
  top_p=top_p,
38
  ):
39
  token = message.choices[0].delta.content
40
-
41
  response += token
42
  yield response
43
 
44
- """
45
- For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
46
- """
47
  demo = gr.ChatInterface(
48
  respond,
49
  additional_inputs=[
@@ -60,6 +51,5 @@ demo = gr.ChatInterface(
60
  ],
61
  )
62
 
63
-
64
  if __name__ == "__main__":
65
- demo.launch()
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
+ # Correctly initialize the InferenceClient with your fine-tuned model
 
 
5
  client = InferenceClient("AdnanRiaz107/CodePhi-3-Mini-1K4bit")
6
 
 
 
 
7
  def respond(
8
  message,
9
  history: list[tuple[str, str]],
 
32
  top_p=top_p,
33
  ):
34
  token = message.choices[0].delta.content
 
35
  response += token
36
  yield response
37
 
 
 
 
38
  demo = gr.ChatInterface(
39
  respond,
40
  additional_inputs=[
 
51
  ],
52
  )
53
 
 
54
  if __name__ == "__main__":
55
+ demo.launch()