WICKED4950 commited on
Commit
cbc3cf5
·
verified ·
1 Parent(s): 735ec05

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -0
app.py CHANGED
@@ -1,6 +1,8 @@
1
  import gradio as gr
2
  from transformers import AutoTokenizer, TFBlenderbotForConditionalGeneration
3
  import tensorflow as tf
 
 
4
  print("Loading the model......")
5
  model_name = "WICKED4950/Irisonego5"
6
  strategy = tf.distribute.MirroredStrategy()
@@ -9,6 +11,16 @@ tokenizer = AutoTokenizer.from_pretrained(model_name)
9
  with strategy.scope():
10
  model = TFBlenderbotForConditionalGeneration.from_pretrained(model_name)
11
 
 
 
 
 
 
 
 
 
 
 
12
  print("Interface getting done....")
13
  # Define the chatbot function
14
  def predict(user_input):
@@ -27,6 +39,7 @@ def predict(user_input):
27
 
28
  # Decode the response
29
  response = tokenizer.decode(response_id[0], skip_special_tokens=True)
 
30
  return response
31
 
32
  # Gradio interface
 
1
  import gradio as gr
2
  from transformers import AutoTokenizer, TFBlenderbotForConditionalGeneration
3
  import tensorflow as tf
4
+ import json
5
+
6
  print("Loading the model......")
7
  model_name = "WICKED4950/Irisonego5"
8
  strategy = tf.distribute.MirroredStrategy()
 
11
  with strategy.scope():
12
  model = TFBlenderbotForConditionalGeneration.from_pretrained(model_name)
13
 
14
+ def save_question(path = "question_answer.json",question,answer):
15
+ try:
16
+ with open(path, "r") as file:
17
+ data = json.load(file)
18
+ data.append({"Question:":question,"Answer:":answer})
19
+ with open(path, "w") as file:
20
+ json.dump(data, file, indent=4)
21
+ except Exception as e:
22
+ print(f"Error with {e}")
23
+
24
  print("Interface getting done....")
25
  # Define the chatbot function
26
  def predict(user_input):
 
39
 
40
  # Decode the response
41
  response = tokenizer.decode(response_id[0], skip_special_tokens=True)
42
+ save_question(question = user_input,answer=response)
43
  return response
44
 
45
  # Gradio interface