oceansweep commited on
Commit
4c1add8
1 Parent(s): 0c16438

Upload 3 files

Browse files
App_Function_Libraries/Gradio_UI/Character_Interaction_tab.py CHANGED
@@ -4,6 +4,7 @@
4
  # Imports
5
  import base64
6
  import io
 
7
  from datetime import datetime as datetime
8
  import logging
9
  import json
@@ -16,7 +17,7 @@ import gradio as gr
16
  from PIL import Image
17
  #
18
  # Local Imports
19
- from App_Function_Libraries.Chat import chat, load_characters
20
  from App_Function_Libraries.Gradio_UI.Chat_ui import chat_wrapper
21
  from App_Function_Libraries.Gradio_UI.Writing_tab import generate_writing_feedback
22
  #
@@ -165,7 +166,8 @@ def create_character_card_interaction_tab():
165
  send_message_button = gr.Button("Send Message")
166
  regenerate_button = gr.Button("Regenerate Last Message")
167
  clear_chat_button = gr.Button("Clear Chat")
168
- save_chat_button = gr.Button("Save This Chat")
 
169
  save_status = gr.Textbox(label="Save Status", interactive=False)
170
 
171
  character_data = gr.State(None)
@@ -403,6 +405,10 @@ def create_character_card_interaction_tab():
403
  finally:
404
  logging.debug("Exiting update_character_info_with_error_handling")
405
 
 
 
 
 
406
  import_card_button.click(
407
  fn=import_character,
408
  inputs=[character_card_upload],
@@ -445,10 +451,11 @@ def create_character_card_interaction_tab():
445
  outputs=[user_name]
446
  )
447
 
448
- save_chat_button.click(
449
- fn=save_current_chat,
450
- inputs=[chat_history, character_data],
451
- outputs=[save_status]
 
452
  )
453
 
454
  return character_data, chat_history, user_input, user_name, character_image
@@ -598,6 +605,8 @@ def create_multiple_character_chat_tab():
598
  add_narration_btn = gr.Button("Add Narration")
599
  error_box = gr.Textbox(label="Error Messages", visible=False)
600
  reset_btn = gr.Button("Reset Conversation")
 
 
601
 
602
  def update_character_selectors(num):
603
  return [gr.update(visible=True) if i < int(num) else gr.update(visible=False) for i in range(4)]
@@ -667,6 +676,10 @@ def create_multiple_character_chat_tab():
667
  error_message = f"An error occurred: {str(e)}"
668
  return conversation, current_index, gr.update(visible=True, value=error_message)
669
 
 
 
 
 
670
  next_turn_btn.click(
671
  take_turn_with_error_handling,
672
  inputs=[chat_display, current_index] + character_selectors + [api_endpoint, api_key, temperature,
@@ -685,6 +698,13 @@ def create_multiple_character_chat_tab():
685
  outputs=[chat_display, current_index, scenario, narrator_input]
686
  )
687
 
 
 
 
 
 
 
 
688
  return character_interaction
689
 
690
  #
@@ -728,9 +748,15 @@ def create_narrator_controlled_conversation_tab():
728
  with gr.Row():
729
  generate_btn = gr.Button("Generate Next Interaction")
730
  reset_btn = gr.Button("Reset Conversation")
 
 
731
 
732
  error_box = gr.Textbox(label="Error Messages", visible=False)
733
 
 
 
 
 
734
  def generate_interaction(conversation, narrator_text, user_text, api_endpoint, api_key, temperature,
735
  *character_data):
736
  try:
@@ -790,6 +816,14 @@ def create_narrator_controlled_conversation_tab():
790
  outputs=[conversation_display, narrator_input, user_input, error_box]
791
  )
792
 
 
 
 
 
 
 
 
 
793
  return api_endpoint, api_key, temperature, narrator_input, conversation_display, user_input, generate_btn, reset_btn, error_box
794
 
795
  #
 
4
  # Imports
5
  import base64
6
  import io
7
+ import uuid
8
  from datetime import datetime as datetime
9
  import logging
10
  import json
 
17
  from PIL import Image
18
  #
19
  # Local Imports
20
+ from App_Function_Libraries.Chat import chat, load_characters, save_chat_history_to_db_wrapper
21
  from App_Function_Libraries.Gradio_UI.Chat_ui import chat_wrapper
22
  from App_Function_Libraries.Gradio_UI.Writing_tab import generate_writing_feedback
23
  #
 
166
  send_message_button = gr.Button("Send Message")
167
  regenerate_button = gr.Button("Regenerate Last Message")
168
  clear_chat_button = gr.Button("Clear Chat")
169
+ chat_media_name = gr.Textbox(label="Custom Chat Name(optional)", visible=True)
170
+ save_chat_history_to_db = gr.Button("Save Chat History to DataBase")
171
  save_status = gr.Textbox(label="Save Status", interactive=False)
172
 
173
  character_data = gr.State(None)
 
405
  finally:
406
  logging.debug("Exiting update_character_info_with_error_handling")
407
 
408
+ # Define States for conversation_id and media_content, which are required for saving chat history
409
+ conversation_id = gr.State(str(uuid.uuid4()))
410
+ media_content = gr.State({})
411
+
412
  import_card_button.click(
413
  fn=import_character,
414
  inputs=[character_card_upload],
 
451
  outputs=[user_name]
452
  )
453
 
454
+ # FIXME - Implement saving chat history to database; look at Chat_UI.py for reference
455
+ save_chat_history_to_db.click(
456
+ save_chat_history_to_db_wrapper,
457
+ inputs=[chat_history, conversation_id, media_content, chat_media_name],
458
+ outputs=[conversation_id, gr.Textbox(label="Save Status")]
459
  )
460
 
461
  return character_data, chat_history, user_input, user_name, character_image
 
605
  add_narration_btn = gr.Button("Add Narration")
606
  error_box = gr.Textbox(label="Error Messages", visible=False)
607
  reset_btn = gr.Button("Reset Conversation")
608
+ chat_media_name = gr.Textbox(label="Custom Chat Name(optional)", visible=True)
609
+ save_chat_history_to_db = gr.Button("Save Chat History to DataBase")
610
 
611
  def update_character_selectors(num):
612
  return [gr.update(visible=True) if i < int(num) else gr.update(visible=False) for i in range(4)]
 
676
  error_message = f"An error occurred: {str(e)}"
677
  return conversation, current_index, gr.update(visible=True, value=error_message)
678
 
679
+ # Define States for conversation_id and media_content, which are required for saving chat history
680
+ media_content = gr.State({})
681
+ conversation_id = gr.State(str(uuid.uuid4()))
682
+
683
  next_turn_btn.click(
684
  take_turn_with_error_handling,
685
  inputs=[chat_display, current_index] + character_selectors + [api_endpoint, api_key, temperature,
 
698
  outputs=[chat_display, current_index, scenario, narrator_input]
699
  )
700
 
701
+ # FIXME - Implement saving chat history to database; look at Chat_UI.py for reference
702
+ save_chat_history_to_db.click(
703
+ save_chat_history_to_db_wrapper,
704
+ inputs=[chat_display, conversation_id, media_content, chat_media_name],
705
+ outputs=[conversation_id, gr.Textbox(label="Save Status")]
706
+ )
707
+
708
  return character_interaction
709
 
710
  #
 
748
  with gr.Row():
749
  generate_btn = gr.Button("Generate Next Interaction")
750
  reset_btn = gr.Button("Reset Conversation")
751
+ chat_media_name = gr.Textbox(label="Custom Chat Name(optional)", visible=True)
752
+ save_chat_history_to_db = gr.Button("Save Chat History to DataBase")
753
 
754
  error_box = gr.Textbox(label="Error Messages", visible=False)
755
 
756
+ # Define States for conversation_id and media_content, which are required for saving chat history
757
+ conversation_id = gr.State(str(uuid.uuid4()))
758
+ media_content = gr.State({})
759
+
760
  def generate_interaction(conversation, narrator_text, user_text, api_endpoint, api_key, temperature,
761
  *character_data):
762
  try:
 
816
  outputs=[conversation_display, narrator_input, user_input, error_box]
817
  )
818
 
819
+ # FIXME - Implement saving chat history to database; look at Chat_UI.py for reference
820
+ save_chat_history_to_db.click(
821
+ save_chat_history_to_db_wrapper,
822
+ inputs=[conversation_display, conversation_id, media_content, chat_media_name],
823
+ outputs=[conversation_id, gr.Textbox(label="Save Status")]
824
+ )
825
+
826
+
827
  return api_endpoint, api_key, temperature, narrator_input, conversation_display, user_input, generate_btn, reset_btn, error_box
828
 
829
  #
App_Function_Libraries/Gradio_UI/Chat_Workflows.py CHANGED
@@ -9,7 +9,9 @@ from pathlib import Path
9
  # External Imports
10
  import gradio as gr
11
  #
12
- from App_Function_Libraries.Gradio_UI.Chat_ui import process_with_llm
 
 
13
  #
14
  ############################################################################################################
15
  #
@@ -25,164 +27,293 @@ with json_path.open('r') as f:
25
  def chat_workflows_tab():
26
  with gr.TabItem("Chat Workflows"):
27
  gr.Markdown("# Workflows using LLMs")
 
 
 
 
 
28
 
29
  with gr.Row():
30
- workflow_selector = gr.Dropdown(label="Select Workflow", choices=[wf['name'] for wf in workflows])
31
- api_selector = gr.Dropdown(
32
- label="Select API Endpoint",
33
- choices=["OpenAI", "Anthropic", "Cohere", "Groq", "DeepSeek", "Mistral", "OpenRouter",
34
- "Llama.cpp", "Kobold", "Ooba", "Tabbyapi", "VLLM", "ollama", "HuggingFace"],
35
- value="OpenAI"
36
- )
37
- api_key_input = gr.Textbox(label="API Key (optional)", type="password")
38
-
39
- context_input = gr.Textbox(label="Initial Context (optional)", lines=5)
40
-
41
- # Create a container for dynamic components
42
- with gr.Column() as dynamic_components:
43
- prompt_displays = []
44
- user_inputs = []
45
- output_boxes = []
46
- process_buttons = []
47
- regenerate_buttons = []
48
-
49
- # Create the maximum number of components needed
50
- max_steps = max(len(wf['prompts']) for wf in workflows)
51
- for i in range(max_steps):
52
- prompt_displays.append(gr.Markdown(visible=False))
53
- user_inputs.append(gr.Textbox(label=f"Your Input", lines=2, visible=False))
54
- output_boxes.append(gr.Textbox(label=f"AI Output", lines=5, visible=False))
55
- with gr.Row():
56
- process_buttons.append(gr.Button(f"Process Step {i + 1}", visible=False))
57
- regenerate_buttons.append(gr.Button(f"🔄 Regenerate", visible=False))
 
58
 
59
  def update_workflow_ui(workflow_name):
60
- selected_workflow = next(wf for wf in workflows if wf['name'] == workflow_name)
61
- num_prompts = len(selected_workflow['prompts'])
62
-
63
- prompt_updates = []
64
- input_updates = []
65
- output_updates = []
66
- button_updates = []
67
- regenerate_updates = []
68
-
69
- for i in range(max_steps):
70
- if i < num_prompts:
71
- prompt_updates.append(
72
- gr.update(value=f"**Step {i + 1}:** {selected_workflow['prompts'][i]}", visible=True))
73
- input_updates.append(gr.update(value="", visible=True, interactive=(i == 0)))
74
- output_updates.append(gr.update(value="", visible=True))
75
- button_updates.append(gr.update(visible=(i == 0)))
76
- regenerate_updates.append(gr.update(visible=False))
77
- else:
78
- prompt_updates.append(gr.update(visible=False))
79
- input_updates.append(gr.update(visible=False))
80
- output_updates.append(gr.update(visible=False))
81
- button_updates.append(gr.update(visible=False))
82
- regenerate_updates.append(gr.update(visible=False))
83
 
84
- return prompt_updates + input_updates + output_updates + button_updates + regenerate_updates
85
-
86
- def process(context, workflow_name, api_endpoint, api_key, step, *user_inputs):
 
87
  try:
88
- selected_workflow = next(wf for wf in workflows if wf['name'] == workflow_name)
89
- except StopIteration:
90
- # Handle the case where no matching workflow is found
91
- error_message = f"No workflow found with name: {workflow_name}"
92
- logging.error(error_message)
93
- return [gr.update(value=error_message)] * (
94
- len(prompt_displays) + len(user_inputs) + len(output_boxes) + len(process_buttons) + len(
95
- regenerate_buttons))
96
-
97
- # Ensure we don't go out of bounds
98
- if step >= len(selected_workflow['prompts']):
99
- error_message = f"Step {step} is out of range for workflow: {workflow_name}"
100
- logging.error(error_message)
101
- return [gr.update(value=error_message)] * (
102
- len(prompt_displays) + len(user_inputs) + len(output_boxes) + len(process_buttons) + len(
103
- regenerate_buttons))
104
-
105
- # Build up the context from previous steps
106
- full_context = context + "\n\n"
107
- for i in range(step + 1):
108
- full_context += f"Question: {selected_workflow['prompts'][i]}\n"
109
- full_context += f"Answer: {user_inputs[i]}\n"
110
- if i < step:
111
- full_context += f"AI Output: {output_boxes[i].value}\n\n"
112
 
113
- try:
114
- result = process_with_llm(workflow_name, full_context, selected_workflow['prompts'][step], api_endpoint,
115
- api_key)
116
- except Exception as e:
117
- error_message = f"Error processing with LLM: {str(e)}"
118
- logging.error(error_message)
119
- result = error_message
120
-
121
- updates = []
122
- for i in range(max_steps):
123
- if i == step:
124
- updates.extend([
125
- gr.update(), # Markdown (prompt_displays)
126
- gr.update(interactive=False), # Textbox (user_inputs)
127
- gr.update(value=result), # Textbox (output_boxes)
128
- gr.update(visible=False), # Button (process_buttons)
129
- gr.update(visible=True) # Button (regenerate_buttons)
130
- ])
131
- elif i == step + 1:
132
- updates.extend([
133
- gr.update(), # Markdown (prompt_displays)
134
- gr.update(interactive=True), # Textbox (user_inputs)
135
- gr.update(), # Textbox (output_boxes)
136
- gr.update(visible=True), # Button (process_buttons)
137
- gr.update(visible=False) # Button (regenerate_buttons)
138
- ])
139
- elif i > step + 1:
140
- updates.extend([
141
- gr.update(), # Markdown (prompt_displays)
142
- gr.update(interactive=False), # Textbox (user_inputs)
143
- gr.update(), # Textbox (output_boxes)
144
- gr.update(visible=False), # Button (process_buttons)
145
- gr.update(visible=False) # Button (regenerate_buttons)
146
- ])
147
- else:
148
- updates.extend([
149
- gr.update(), # Markdown (prompt_displays)
150
- gr.update(interactive=False), # Textbox (user_inputs)
151
- gr.update(), # Textbox (output_boxes)
152
- gr.update(visible=False), # Button (process_buttons)
153
- gr.update(visible=True) # Button (regenerate_buttons)
154
- ])
155
 
156
- return updates
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
157
 
158
- # Set up event handlers
159
  workflow_selector.change(
160
  update_workflow_ui,
161
  inputs=[workflow_selector],
162
- outputs=prompt_displays + user_inputs + output_boxes + process_buttons + regenerate_buttons
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
163
  )
164
 
165
- # Set up process button click events
166
- for i, button in enumerate(process_buttons):
167
- button.click(
168
- fn=lambda context, wf_name, api_endpoint, api_key, *inputs, step=i: process(context, wf_name,
169
- api_endpoint, api_key, step,
170
- *inputs),
171
- inputs=[context_input, workflow_selector, api_selector, api_key_input] + user_inputs,
172
- outputs=prompt_displays + user_inputs + output_boxes + process_buttons + regenerate_buttons
173
- ).then(lambda: gr.update(value=""), outputs=[user_inputs[i]])
174
-
175
- # Set up regenerate button click events
176
- for i, button in enumerate(regenerate_buttons):
177
- button.click(
178
- fn=lambda context, wf_name, api_endpoint, api_key, *inputs, step=i: process(context, wf_name,
179
- api_endpoint, api_key, step,
180
- *inputs),
181
- inputs=[context_input, workflow_selector, api_selector, api_key_input] + user_inputs,
182
- outputs=prompt_displays + user_inputs + output_boxes + process_buttons + regenerate_buttons
183
- )
184
-
185
- return workflow_selector, api_selector, api_key_input, context_input, dynamic_components
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
186
 
187
  #
188
  # End of script
 
9
  # External Imports
10
  import gradio as gr
11
  #
12
+ from App_Function_Libraries.Gradio_UI.Chat_ui import chat_wrapper, search_conversations, \
13
+ load_conversation
14
+ from App_Function_Libraries.Chat import save_chat_history_to_db_wrapper
15
  #
16
  ############################################################################################################
17
  #
 
27
  def chat_workflows_tab():
28
  with gr.TabItem("Chat Workflows"):
29
  gr.Markdown("# Workflows using LLMs")
30
+ chat_history = gr.State([])
31
+ media_content = gr.State({})
32
+ selected_parts = gr.State([])
33
+ conversation_id = gr.State(None)
34
+ workflow_state = gr.State({"current_step": 0, "max_steps": 0, "conversation_id": None})
35
 
36
  with gr.Row():
37
+ with gr.Column():
38
+ workflow_selector = gr.Dropdown(label="Select Workflow", choices=[wf['name'] for wf in workflows])
39
+ api_selector = gr.Dropdown(
40
+ label="Select API Endpoint",
41
+ choices=["OpenAI", "Anthropic", "Cohere", "Groq", "DeepSeek", "Mistral", "OpenRouter",
42
+ "Llama.cpp", "Kobold", "Ooba", "Tabbyapi", "VLLM", "ollama", "HuggingFace"],
43
+ value="OpenAI"
44
+ )
45
+ api_key_input = gr.Textbox(label="API Key (optional)", type="password")
46
+ temperature = gr.Slider(label="Temperature", minimum=0.00, maximum=1.0, step=0.05, value=0.7)
47
+ save_conversation = gr.Checkbox(label="Save Conversation", value=False)
48
+ with gr.Column():
49
+ gr.Markdown("Placeholder")
50
+ with gr.Row():
51
+ with gr.Column():
52
+ conversation_search = gr.Textbox(label="Search Conversations")
53
+ search_conversations_btn = gr.Button("Search Conversations")
54
+ with gr.Column():
55
+ previous_conversations = gr.Dropdown(label="Select Conversation", choices=[], interactive=True)
56
+ load_conversations_btn = gr.Button("Load Selected Conversation")
57
+ with gr.Row():
58
+ with gr.Column():
59
+ context_input = gr.Textbox(label="Initial Context", lines=5)
60
+ chatbot = gr.Chatbot(label="Workflow Chat")
61
+ msg = gr.Textbox(label="Your Input")
62
+ submit_btn = gr.Button("Submit")
63
+ clear_btn = gr.Button("Clear Chat")
64
+ chat_media_name = gr.Textbox(label="Custom Chat Name(optional)")
65
+ save_btn = gr.Button("Save Chat to Database")
66
 
67
  def update_workflow_ui(workflow_name):
68
+ if not workflow_name:
69
+ return {"current_step": 0, "max_steps": 0, "conversation_id": None}, "", []
70
+ selected_workflow = next((wf for wf in workflows if wf['name'] == workflow_name), None)
71
+ if selected_workflow:
72
+ num_prompts = len(selected_workflow['prompts'])
73
+ context = selected_workflow.get('context', '')
74
+ first_prompt = selected_workflow['prompts'][0]
75
+ initial_chat = [(None, f"{first_prompt}")]
76
+ logging.info(f"Initializing workflow: {workflow_name} with {num_prompts} steps")
77
+ return {"current_step": 0, "max_steps": num_prompts, "conversation_id": None}, context, initial_chat
78
+ else:
79
+ logging.error(f"Selected workflow not found: {workflow_name}")
80
+ return {"current_step": 0, "max_steps": 0, "conversation_id": None}, "", []
 
 
 
 
 
 
 
 
 
 
81
 
82
+ def process_workflow_step(message, history, context, workflow_name, api_endpoint, api_key, workflow_state,
83
+ save_conv, temp):
84
+ logging.info(f"Process workflow step called with message: {message}")
85
+ logging.info(f"Current workflow state: {workflow_state}")
86
  try:
87
+ selected_workflow = next((wf for wf in workflows if wf['name'] == workflow_name), None)
88
+ if not selected_workflow:
89
+ logging.error(f"Selected workflow not found: {workflow_name}")
90
+ return history, workflow_state, gr.update(interactive=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
 
92
+ current_step = workflow_state["current_step"]
93
+ max_steps = workflow_state["max_steps"]
94
+
95
+ logging.info(f"Current step: {current_step}, Max steps: {max_steps}")
96
+
97
+ if current_step >= max_steps:
98
+ logging.info("Workflow completed, disabling input")
99
+ return history, workflow_state, gr.update(interactive=False)
100
+
101
+ prompt = selected_workflow['prompts'][current_step]
102
+ full_message = f"{context}\n\nStep {current_step + 1}: {prompt}\nUser: {message}"
103
+
104
+ logging.info(f"Calling chat_wrapper with full_message: {full_message[:100]}...")
105
+ bot_message, new_history, new_conversation_id = chat_wrapper(
106
+ full_message, history, media_content.value, selected_parts.value,
107
+ api_endpoint, api_key, "", workflow_state["conversation_id"],
108
+ save_conv, temp, "You are a helpful assistant guiding through a workflow."
109
+ )
110
+
111
+ logging.info(f"Received bot_message: {bot_message[:100]}...")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
 
113
+ next_step = current_step + 1
114
+ new_workflow_state = {
115
+ "current_step": next_step,
116
+ "max_steps": max_steps,
117
+ "conversation_id": new_conversation_id
118
+ }
119
+
120
+ if next_step >= max_steps:
121
+ logging.info("Workflow completed after this step")
122
+ return new_history, new_workflow_state, gr.update(interactive=False)
123
+ else:
124
+ next_prompt = selected_workflow['prompts'][next_step]
125
+ new_history.append((None, f"Step {next_step + 1}: {next_prompt}"))
126
+ logging.info(f"Moving to next step: {next_step}")
127
+ return new_history, new_workflow_state, gr.update(interactive=True)
128
+ except Exception as e:
129
+ logging.error(f"Error in process_workflow_step: {str(e)}")
130
+ return history, workflow_state, gr.update(interactive=True)
131
 
 
132
  workflow_selector.change(
133
  update_workflow_ui,
134
  inputs=[workflow_selector],
135
+ outputs=[workflow_state, context_input, chatbot]
136
+ )
137
+
138
+ submit_btn.click(
139
+ process_workflow_step,
140
+ inputs=[msg, chatbot, context_input, workflow_selector, api_selector, api_key_input, workflow_state,
141
+ save_conversation, temperature],
142
+ outputs=[chatbot, workflow_state, msg]
143
+ ).then(
144
+ lambda: gr.update(value=""),
145
+ outputs=[msg]
146
+ )
147
+
148
+ clear_btn.click(
149
+ lambda: ([], {"current_step": 0, "max_steps": 0, "conversation_id": None}, ""),
150
+ outputs=[chatbot, workflow_state, context_input]
151
+ )
152
+
153
+ save_btn.click(
154
+ save_chat_history_to_db_wrapper,
155
+ inputs=[chatbot, conversation_id, media_content, chat_media_name],
156
+ outputs=[conversation_id, gr.Textbox(label="Save Status")]
157
  )
158
 
159
+ search_conversations_btn.click(
160
+ search_conversations,
161
+ inputs=[conversation_search],
162
+ outputs=[previous_conversations]
163
+ )
164
+
165
+ load_conversations_btn.click(
166
+ lambda: ([], {"current_step": 0, "max_steps": 0, "conversation_id": None}, ""),
167
+ outputs=[chatbot, workflow_state, context_input]
168
+ ).then(
169
+ load_conversation,
170
+ inputs=[previous_conversations],
171
+ outputs=[chatbot, conversation_id]
172
+ )
173
+
174
+ return workflow_selector, api_selector, api_key_input, context_input, chatbot, msg, submit_btn, clear_btn, save_btn
175
+ # def chat_workflows_tab():
176
+ # with gr.TabItem("Chat Workflows"):
177
+ # gr.Markdown("# Workflows using LLMs")
178
+ # chat_history = gr.State([])
179
+ # media_content = gr.State({})
180
+ # selected_parts = gr.State([])
181
+ # conversation_id = gr.State(None)
182
+ # workflow_state = gr.State({"current_step": 0, "max_steps": 0, "conversation_id": None})
183
+ #
184
+ # with gr.Row():
185
+ # workflow_selector = gr.Dropdown(label="Select Workflow", choices=[wf['name'] for wf in workflows])
186
+ # api_selector = gr.Dropdown(
187
+ # label="Select API Endpoint",
188
+ # choices=["OpenAI", "Anthropic", "Cohere", "Groq", "DeepSeek", "Mistral", "OpenRouter",
189
+ # "Llama.cpp", "Kobold", "Ooba", "Tabbyapi", "VLLM", "ollama", "HuggingFace"],
190
+ # value="OpenAI"
191
+ # )
192
+ # api_key_input = gr.Textbox(label="API Key (optional)", type="password")
193
+ #
194
+ # context_input = gr.Textbox(label="Initial Context (optional)", lines=5)
195
+ #
196
+ # with gr.Row():
197
+ # temperature = gr.Slider(label="Temperature", minimum=0.00, maximum=1.0, step=0.05, value=0.7)
198
+ # save_conversation = gr.Checkbox(label="Save Conversation", value=False)
199
+ #
200
+ # chatbot = gr.Chatbot(label="Workflow Chat")
201
+ # msg = gr.Textbox(label="Your Input")
202
+ # submit_btn = gr.Button("Submit")
203
+ # clear_btn = gr.Button("Clear Chat")
204
+ # save_btn = gr.Button("Save Chat to Database")
205
+ #
206
+ # with gr.Row():
207
+ # conversation_search = gr.Textbox(label="Search Conversations")
208
+ # search_conversations_btn = gr.Button("Search Conversations")
209
+ # previous_conversations = gr.Dropdown(label="Select Conversation", choices=[], interactive=True)
210
+ # load_conversations_btn = gr.Button("Load Selected Conversation")
211
+ #
212
+ # def update_workflow_ui(workflow_name):
213
+ # if not workflow_name:
214
+ # return {"current_step": 0, "max_steps": 0, "conversation_id": None}
215
+ # selected_workflow = next((wf for wf in workflows if wf['name'] == workflow_name), None)
216
+ # if selected_workflow:
217
+ # num_prompts = len(selected_workflow['prompts'])
218
+ # logging.info(f"Initializing workflow: {workflow_name} with {num_prompts} steps")
219
+ # return {"current_step": 0, "max_steps": num_prompts, "conversation_id": None}
220
+ # else:
221
+ # logging.error(f"Selected workflow not found: {workflow_name}")
222
+ # return {"current_step": 0, "max_steps": 0, "conversation_id": None}
223
+ #
224
+ # def process_workflow_step(message, history, context, workflow_name, api_endpoint, api_key, workflow_state,
225
+ # save_conv, temp):
226
+ # logging.info(f"Process workflow step called with message: {message}")
227
+ # logging.info(f"Current workflow state: {workflow_state}")
228
+ # try:
229
+ # selected_workflow = next((wf for wf in workflows if wf['name'] == workflow_name), None)
230
+ # if not selected_workflow:
231
+ # logging.error(f"Selected workflow not found: {workflow_name}")
232
+ # return history, workflow_state, gr.update(interactive=True)
233
+ #
234
+ # current_step = workflow_state["current_step"]
235
+ # max_steps = workflow_state["max_steps"]
236
+ #
237
+ # logging.info(f"Current step: {current_step}, Max steps: {max_steps}")
238
+ #
239
+ # if current_step >= max_steps:
240
+ # logging.info("Workflow completed, disabling input")
241
+ # return history, workflow_state, gr.update(interactive=False)
242
+ #
243
+ # prompt = selected_workflow['prompts'][current_step]
244
+ # full_message = f"{context}\n\nStep {current_step + 1}: {prompt}\nUser: {message}"
245
+ #
246
+ # logging.info(f"Calling chat_wrapper with full_message: {full_message[:100]}...")
247
+ # bot_message, new_history, new_conversation_id = chat_wrapper(
248
+ # full_message, history, media_content.value, selected_parts.value,
249
+ # api_endpoint, api_key, "", workflow_state["conversation_id"],
250
+ # save_conv, temp, "You are a helpful assistant guiding through a workflow."
251
+ # )
252
+ #
253
+ # logging.info(f"Received bot_message: {bot_message[:100]}...")
254
+ #
255
+ # next_step = current_step + 1
256
+ # new_workflow_state = {
257
+ # "current_step": next_step,
258
+ # "max_steps": max_steps,
259
+ # "conversation_id": new_conversation_id
260
+ # }
261
+ #
262
+ # if next_step >= max_steps:
263
+ # logging.info("Workflow completed after this step")
264
+ # return new_history, new_workflow_state, gr.update(interactive=False)
265
+ # else:
266
+ # next_prompt = selected_workflow['prompts'][next_step]
267
+ # new_history.append((None, f"Step {next_step + 1}: {next_prompt}"))
268
+ # logging.info(f"Moving to next step: {next_step}")
269
+ # return new_history, new_workflow_state, gr.update(interactive=True)
270
+ # except Exception as e:
271
+ # logging.error(f"Error in process_workflow_step: {str(e)}")
272
+ # return history, workflow_state, gr.update(interactive=True)
273
+ #
274
+ # workflow_selector.change(
275
+ # update_workflow_ui,
276
+ # inputs=[workflow_selector],
277
+ # outputs=[workflow_state]
278
+ # )
279
+ #
280
+ # submit_btn.click(
281
+ # process_workflow_step,
282
+ # inputs=[msg, chatbot, context_input, workflow_selector, api_selector, api_key_input, workflow_state,
283
+ # save_conversation, temperature],
284
+ # outputs=[chatbot, workflow_state, msg]
285
+ # ).then(
286
+ # lambda: gr.update(value=""),
287
+ # outputs=[msg]
288
+ # )
289
+ #
290
+ # clear_btn.click(
291
+ # lambda: ([], {"current_step": 0, "max_steps": 0, "conversation_id": None}),
292
+ # outputs=[chatbot, workflow_state]
293
+ # )
294
+ #
295
+ # save_btn.click(
296
+ # save_chat_history_to_db_wrapper,
297
+ # inputs=[chatbot, conversation_id, media_content],
298
+ # outputs=[conversation_id, gr.Textbox(label="Save Status")]
299
+ # )
300
+ #
301
+ # search_conversations_btn.click(
302
+ # search_conversations,
303
+ # inputs=[conversation_search],
304
+ # outputs=[previous_conversations]
305
+ # )
306
+ #
307
+ # load_conversations_btn.click(
308
+ # lambda: ([], {"current_step": 0, "max_steps": 0, "conversation_id": None}),
309
+ # outputs=[chatbot, workflow_state]
310
+ # ).then(
311
+ # load_conversation,
312
+ # inputs=[previous_conversations],
313
+ # outputs=[chatbot, conversation_id]
314
+ # )
315
+ #
316
+ # return workflow_selector, api_selector, api_key_input, context_input, chatbot, msg, submit_btn, clear_btn, save_btn
317
 
318
  #
319
  # End of script
App_Function_Libraries/Gradio_UI/Chat_ui.py CHANGED
@@ -95,20 +95,21 @@ def chat_wrapper(message, history, media_content, selected_parts, api_endpoint,
95
  # Generate bot response
96
  bot_message = chat(full_message, history, media_content, selected_parts, api_endpoint, api_key, custom_prompt,
97
  temperature, system_prompt)
 
98
  logging.debug(f"Bot message being returned: {bot_message}")
 
99
  if save_conversation:
100
  # Add assistant message to the database
101
  add_chat_message(conversation_id, "assistant", bot_message)
102
 
103
  # Update history
104
- history.append((message, bot_message))
105
 
106
- return bot_message, history, conversation_id
107
  except Exception as e:
108
  logging.error(f"Error in chat wrapper: {str(e)}")
109
  return "An error occurred.", history, conversation_id
110
 
111
-
112
  def search_conversations(query):
113
  try:
114
  conversations = search_chat_conversations(query)
@@ -226,6 +227,7 @@ def create_chat_interface():
226
  delete_message_id = gr.Number(label="Message ID to Delete", visible=False)
227
  delete_message_button = gr.Button("Delete Message", visible=False)
228
 
 
229
  save_chat_history_to_db = gr.Button("Save Chat History to DataBase")
230
  save_chat_history_as_file = gr.Button("Save Chat History as File")
231
  download_file = gr.File(label="Download Chat History")
@@ -348,7 +350,7 @@ def create_chat_interface():
348
 
349
  save_chat_history_to_db.click(
350
  save_chat_history_to_db_wrapper,
351
- inputs=[chatbot, conversation_id, media_content],
352
  outputs=[conversation_id, gr.Textbox(label="Save Status")]
353
  )
354
 
@@ -416,13 +418,7 @@ def create_chat_interface_stacked():
416
  with gr.Column():
417
  submit = gr.Button("Submit")
418
  clear_chat_button = gr.Button("Clear Chat")
419
-
420
- edit_message_id = gr.Number(label="Message ID to Edit", visible=False)
421
- edit_message_text = gr.Textbox(label="Edit Message", visible=False)
422
- update_message_button = gr.Button("Update Message", visible=False)
423
-
424
- delete_message_id = gr.Number(label="Message ID to Delete", visible=False)
425
- delete_message_button = gr.Button("Delete Message", visible=False)
426
  save_chat_history_to_db = gr.Button("Save Chat History to DataBase")
427
  save_chat_history_as_file = gr.Button("Save Chat History as File")
428
  with gr.Column():
@@ -500,18 +496,6 @@ def create_chat_interface_stacked():
500
  outputs=[chat_history]
501
  )
502
 
503
- update_message_button.click(
504
- update_message_in_chat,
505
- inputs=[edit_message_id, edit_message_text, chat_history],
506
- outputs=[chatbot]
507
- )
508
-
509
- delete_message_button.click(
510
- delete_message_from_chat,
511
- inputs=[delete_message_id, chat_history],
512
- outputs=[chatbot]
513
- )
514
-
515
  save_chat_history_as_file.click(
516
  save_chat_history,
517
  inputs=[chatbot, conversation_id],
@@ -520,13 +504,10 @@ def create_chat_interface_stacked():
520
 
521
  save_chat_history_to_db.click(
522
  save_chat_history_to_db_wrapper,
523
- inputs=[chatbot, conversation_id, media_content],
524
  outputs=[conversation_id, gr.Textbox(label="Save Status")]
525
  )
526
 
527
- chatbot.select(show_edit_message, None, [edit_message_text, edit_message_id, update_message_button])
528
- chatbot.select(show_delete_message, None, [delete_message_id, delete_message_button])
529
-
530
 
531
  # FIXME - System prompts
532
  def create_chat_interface_multi_api():
 
95
  # Generate bot response
96
  bot_message = chat(full_message, history, media_content, selected_parts, api_endpoint, api_key, custom_prompt,
97
  temperature, system_prompt)
98
+
99
  logging.debug(f"Bot message being returned: {bot_message}")
100
+
101
  if save_conversation:
102
  # Add assistant message to the database
103
  add_chat_message(conversation_id, "assistant", bot_message)
104
 
105
  # Update history
106
+ new_history = history + [(message, bot_message)]
107
 
108
+ return bot_message, new_history, conversation_id
109
  except Exception as e:
110
  logging.error(f"Error in chat wrapper: {str(e)}")
111
  return "An error occurred.", history, conversation_id
112
 
 
113
  def search_conversations(query):
114
  try:
115
  conversations = search_chat_conversations(query)
 
227
  delete_message_id = gr.Number(label="Message ID to Delete", visible=False)
228
  delete_message_button = gr.Button("Delete Message", visible=False)
229
 
230
+ chat_media_name = gr.Textbox(label="Custom Chat Name(optional)")
231
  save_chat_history_to_db = gr.Button("Save Chat History to DataBase")
232
  save_chat_history_as_file = gr.Button("Save Chat History as File")
233
  download_file = gr.File(label="Download Chat History")
 
350
 
351
  save_chat_history_to_db.click(
352
  save_chat_history_to_db_wrapper,
353
+ inputs=[chatbot, conversation_id, media_content, chat_media_name],
354
  outputs=[conversation_id, gr.Textbox(label="Save Status")]
355
  )
356
 
 
418
  with gr.Column():
419
  submit = gr.Button("Submit")
420
  clear_chat_button = gr.Button("Clear Chat")
421
+ chat_media_name = gr.Textbox(label="Custom Chat Name(optional)", visible=True)
 
 
 
 
 
 
422
  save_chat_history_to_db = gr.Button("Save Chat History to DataBase")
423
  save_chat_history_as_file = gr.Button("Save Chat History as File")
424
  with gr.Column():
 
496
  outputs=[chat_history]
497
  )
498
 
 
 
 
 
 
 
 
 
 
 
 
 
499
  save_chat_history_as_file.click(
500
  save_chat_history,
501
  inputs=[chatbot, conversation_id],
 
504
 
505
  save_chat_history_to_db.click(
506
  save_chat_history_to_db_wrapper,
507
+ inputs=[chatbot, conversation_id, media_content, chat_media_name],
508
  outputs=[conversation_id, gr.Textbox(label="Save Status")]
509
  )
510
 
 
 
 
511
 
512
  # FIXME - System prompts
513
  def create_chat_interface_multi_api():