Sergidev commited on
Commit
f6fb494
·
1 Parent(s): 6862403
Files changed (2) hide show
  1. app.py +5 -10
  2. modules/pmbl.py +19 -54
app.py CHANGED
@@ -3,21 +3,17 @@ import gradio as gr
3
  import spaces
4
  import json
5
  from modules.pmbl import PMBL
6
- from huggingface_hub import HfApi
7
 
8
  # Initialize the PMBL instance
9
  pmbl = PMBL("Qwen/QwQ-32B-GGUF")
10
 
11
- # Check if we're running on Hugging Face Spaces
12
- IS_HF_SPACE = os.environ.get("SPACE_ID") is not None
13
- DATASET_REPO = "Sergidev/PMBMemory"
14
- api = HfApi()
15
-
16
- # Define theme
17
  theme = gr.themes.Soft(
18
  primary_hue="red",
19
  secondary_hue="purple",
20
- ).set(
 
 
21
  body_background_fill="linear-gradient(to bottom right, #222222, #333333)",
22
  block_background_fill="#1e1e1e",
23
  button_primary_background_fill="#59788E",
@@ -27,7 +23,6 @@ theme = gr.themes.Soft(
27
  block_border_width="1px",
28
  block_border_color="#ccc",
29
  block_radius="5px",
30
- text_color="#f0f8ff",
31
  )
32
 
33
  # CSS for customizing the chat interface
@@ -81,7 +76,7 @@ def generate_response(message, history, memory_mode):
81
  response += chunk
82
  yield response
83
 
84
- # Save the conversation to history
85
  pmbl.save_chat(message, response)
86
 
87
  # Process and organize chat history
 
3
  import spaces
4
  import json
5
  from modules.pmbl import PMBL
 
6
 
7
  # Initialize the PMBL instance
8
  pmbl = PMBL("Qwen/QwQ-32B-GGUF")
9
 
10
+ # Define theme with compatible settings
 
 
 
 
 
11
  theme = gr.themes.Soft(
12
  primary_hue="red",
13
  secondary_hue="purple",
14
+ )
15
+ # Use update instead of set for theme configuration
16
+ theme = theme.update(
17
  body_background_fill="linear-gradient(to bottom right, #222222, #333333)",
18
  block_background_fill="#1e1e1e",
19
  button_primary_background_fill="#59788E",
 
23
  block_border_width="1px",
24
  block_border_color="#ccc",
25
  block_radius="5px",
 
26
  )
27
 
28
  # CSS for customizing the chat interface
 
76
  response += chunk
77
  yield response
78
 
79
+ # Save the conversation to local history only
80
  pmbl.save_chat(message, response)
81
 
82
  # Process and organize chat history
modules/pmbl.py CHANGED
@@ -1,73 +1,38 @@
1
  import json
2
- import asyncio
3
- from datetime import datetime
4
- from huggingface_hub import HfApi
5
  import os
 
6
  from ctransformers import AutoModelForCausalLM
7
 
8
- # Check if we're running on Hugging Face Spaces
9
- IS_HF_SPACE = os.environ.get("SPACE_ID") is not None
10
- DATASET_REPO = "Sergidev/PMBMemory"
11
- api = HfApi()
12
-
13
  class PMBL:
14
  def __init__(self, model_path):
15
  self.model_path = model_path
16
- self.init_lock = asyncio.Lock()
17
  self.chat_history = []
18
- # Initialize history immediately without asyncio task
19
  self._load_history_sync()
20
 
21
  def _load_history_sync(self):
22
- """Load chat history synchronously from Hugging Face dataset"""
23
- if IS_HF_SPACE:
24
- try:
25
- # Check if the chat history file exists in the dataset
26
- files = api.list_repo_files(DATASET_REPO)
27
- if "chat_history.json" in files:
28
- # Download the file
29
- content = api.hf_hub_download(DATASET_REPO, "chat_history.json")
30
- with open(content, 'r') as f:
31
- self.chat_history = json.load(f)
32
- print(f"Loaded {len(self.chat_history)} chat records")
33
- else:
34
- print("No chat history found, starting with empty history")
35
- self.chat_history = []
36
- except Exception as e:
37
- print(f"Error loading chat history: {e}")
38
- self.chat_history = []
39
- else:
40
- # Local development mode
41
  if os.path.exists("chat_history.json"):
42
  with open("chat_history.json", 'r') as f:
43
  self.chat_history = json.load(f)
 
44
  else:
 
45
  self.chat_history = []
 
 
 
46
 
47
  def save_history_sync(self):
48
- """Save chat history synchronously to Hugging Face dataset"""
49
- if IS_HF_SPACE:
50
- try:
51
- # Create a temporary file
52
- with open("temp_history.json", 'w') as f:
53
- json.dump(self.chat_history, f)
54
-
55
- # Upload the file to the dataset
56
- api.upload_file(
57
- path_or_fileobj="temp_history.json",
58
- path_in_repo="chat_history.json",
59
- repo_id=DATASET_REPO,
60
- repo_type="dataset"
61
- )
62
-
63
- # Clean up
64
- os.remove("temp_history.json")
65
- except Exception as e:
66
- print(f"Error saving chat history: {e}")
67
- else:
68
- # Local development mode
69
  with open("chat_history.json", 'w') as f:
70
  json.dump(self.chat_history, f)
 
 
71
 
72
  def process_history(self, history, mode, user_message):
73
  """Process the chat history based on the mode"""
@@ -171,7 +136,7 @@ class PMBL:
171
  yield chunk
172
 
173
  def save_chat(self, prompt, response):
174
- """Save chat to history synchronously"""
175
  # Add the new chat to history
176
  chat_entry = {
177
  "timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
@@ -182,11 +147,11 @@ class PMBL:
182
 
183
  self.chat_history.append(chat_entry)
184
 
185
- # Save the updated history
186
  self.save_history_sync()
187
 
188
  def sleep_mode(self):
189
- """Process and organize chat history synchronously"""
190
  # Find chats without specific topics
191
  untitled_chats = [
192
  (i, chat) for i, chat in enumerate(self.chat_history)
@@ -206,7 +171,7 @@ class PMBL:
206
  self.save_history_sync()
207
 
208
  def _generate_topic_sync(self, prompt, response):
209
- """Generate a topic for a chat using the model synchronously"""
210
  try:
211
  # Use a very small context for topic generation to save resources
212
  model = AutoModelForCausalLM.from_pretrained(
 
1
  import json
 
 
 
2
  import os
3
+ from datetime import datetime
4
  from ctransformers import AutoModelForCausalLM
5
 
 
 
 
 
 
6
  class PMBL:
7
  def __init__(self, model_path):
8
  self.model_path = model_path
 
9
  self.chat_history = []
10
+ # Load local history only
11
  self._load_history_sync()
12
 
13
  def _load_history_sync(self):
14
+ """Load chat history from local file"""
15
+ try:
16
+ # Local storage only
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  if os.path.exists("chat_history.json"):
18
  with open("chat_history.json", 'r') as f:
19
  self.chat_history = json.load(f)
20
+ print(f"Loaded {len(self.chat_history)} chat records")
21
  else:
22
+ print("No chat history found, starting with empty history")
23
  self.chat_history = []
24
+ except Exception as e:
25
+ print(f"Error loading chat history: {e}")
26
+ self.chat_history = []
27
 
28
  def save_history_sync(self):
29
+ """Save chat history to local file"""
30
+ try:
31
+ # Local storage only
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  with open("chat_history.json", 'w') as f:
33
  json.dump(self.chat_history, f)
34
+ except Exception as e:
35
+ print(f"Error saving chat history: {e}")
36
 
37
  def process_history(self, history, mode, user_message):
38
  """Process the chat history based on the mode"""
 
136
  yield chunk
137
 
138
  def save_chat(self, prompt, response):
139
+ """Save chat to history"""
140
  # Add the new chat to history
141
  chat_entry = {
142
  "timestamp": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
 
147
 
148
  self.chat_history.append(chat_entry)
149
 
150
+ # Save the updated history locally only
151
  self.save_history_sync()
152
 
153
  def sleep_mode(self):
154
+ """Process and organize chat history"""
155
  # Find chats without specific topics
156
  untitled_chats = [
157
  (i, chat) for i, chat in enumerate(self.chat_history)
 
171
  self.save_history_sync()
172
 
173
  def _generate_topic_sync(self, prompt, response):
174
+ """Generate a topic for a chat using the model"""
175
  try:
176
  # Use a very small context for topic generation to save resources
177
  model = AutoModelForCausalLM.from_pretrained(