oceansweep commited on
Commit
968f616
1 Parent(s): 17c7477

Upload 3 files

Browse files
App_Function_Libraries/DB/DB_Manager.py ADDED
@@ -0,0 +1,535 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import configparser
2
+ import logging
3
+ import os
4
+ from contextlib import contextmanager
5
+ from time import sleep
6
+ from typing import Tuple
7
+ import sqlite3
8
+ # 3rd-Party Libraries
9
+ from elasticsearch import Elasticsearch
10
+
11
+ ############################################################################################################
12
+ #
13
+ # This file contains the DatabaseManager class, which is responsible for managing the database connection, i.e. either SQLite or Elasticsearch.
14
+
15
+ ####
16
+ # The DatabaseManager class provides the following methods:
17
+ # - add_media: Add a new media item to the database
18
+ # - fetch_items_by_keyword: Fetch media items from the database based on a keyword
19
+ # - fetch_item_details: Fetch details of a specific media item from the database
20
+ # - update_media_content: Update the content of a specific media item in the database
21
+ # - search_and_display_items: Search for media items in the database and display the results
22
+ # - close_connection: Close the database connection
23
+ ####
24
+
25
+ # Import your existing SQLite functions
26
+ from SQLite_DB import (
27
+ update_media_content as sqlite_update_media_content,
28
+ list_prompts as sqlite_list_prompts,
29
+ search_and_display as sqlite_search_and_display,
30
+ fetch_prompt_details as sqlite_fetch_prompt_details,
31
+ keywords_browser_interface as sqlite_keywords_browser_interface,
32
+ add_keyword as sqlite_add_keyword,
33
+ delete_keyword as sqlite_delete_keyword,
34
+ export_keywords_to_csv as sqlite_export_keywords_to_csv,
35
+ ingest_article_to_db as sqlite_ingest_article_to_db,
36
+ add_media_to_database as sqlite_add_media_to_database,
37
+ import_obsidian_note_to_db as sqlite_import_obsidian_note_to_db,
38
+ add_prompt as sqlite_add_prompt,
39
+ delete_chat_message as sqlite_delete_chat_message,
40
+ update_chat_message as sqlite_update_chat_message,
41
+ add_chat_message as sqlite_add_chat_message,
42
+ get_chat_messages as sqlite_get_chat_messages,
43
+ search_chat_conversations as sqlite_search_chat_conversations,
44
+ create_chat_conversation as sqlite_create_chat_conversation,
45
+ save_chat_history_to_database as sqlite_save_chat_history_to_database,
46
+ view_database as sqlite_view_database,
47
+ get_transcripts as sqlite_get_transcripts,
48
+ get_trashed_items as sqlite_get_trashed_items,
49
+ user_delete_item as sqlite_user_delete_item,
50
+ empty_trash as sqlite_empty_trash,
51
+ create_automated_backup as sqlite_create_automated_backup,
52
+ add_or_update_prompt as sqlite_add_or_update_prompt,
53
+ load_prompt_details as sqlite_load_prompt_details,
54
+ load_preset_prompts as sqlite_load_preset_prompts,
55
+ insert_prompt_to_db as sqlite_insert_prompt_to_db,
56
+ delete_prompt as sqlite_delete_prompt,
57
+ search_and_display_items as sqlite_search_and_display_items,
58
+ get_conversation_name as sqlite_get_conversation_name,
59
+ add_media_with_keywords as sqlite_add_media_with_keywords,
60
+ check_media_and_whisper_model as sqlite_check_media_and_whisper_model,
61
+ DatabaseError, create_document_version as sqlite_create_document_version,
62
+ get_document_version as sqlite_get_document_version
63
+ )
64
+
65
+ class Database:
66
+ def __init__(self, db_path=None):
67
+ self.db_path = db_path or os.getenv('DB_NAME', 'media_summary.db')
68
+ self.pool = []
69
+ self.pool_size = 10
70
+
71
+ @contextmanager
72
+ def get_connection(self):
73
+ retry_count = 5
74
+ retry_delay = 1
75
+ conn = None
76
+ while retry_count > 0:
77
+ try:
78
+ conn = self.pool.pop() if self.pool else sqlite3.connect(self.db_path, check_same_thread=False)
79
+ yield conn
80
+ self.pool.append(conn)
81
+ return
82
+ except sqlite3.OperationalError as e:
83
+ if 'database is locked' in str(e):
84
+ logging.warning(f"Database is locked, retrying in {retry_delay} seconds...")
85
+ retry_count -= 1
86
+ sleep(retry_delay)
87
+ else:
88
+ raise DatabaseError(f"Database error: {e}")
89
+ except Exception as e:
90
+ raise DatabaseError(f"Unexpected error: {e}")
91
+ finally:
92
+ # Ensure the connection is returned to the pool even on failure
93
+ if conn and conn not in self.pool:
94
+ self.pool.append(conn)
95
+ raise DatabaseError("Database is locked and retries have been exhausted")
96
+
97
+ def execute_query(self, query: str, params: Tuple = ()) -> None:
98
+ with self.get_connection() as conn:
99
+ try:
100
+ cursor = conn.cursor()
101
+ cursor.execute(query, params)
102
+ conn.commit()
103
+ except sqlite3.Error as e:
104
+ raise DatabaseError(f"Database error: {e}, Query: {query}")
105
+
106
+ def close_all_connections(self):
107
+ for conn in self.pool:
108
+ conn.close()
109
+ self.pool.clear()
110
+
111
+ def get_db_config():
112
+ config = configparser.ConfigParser()
113
+ config.read('config.txt')
114
+ return {
115
+ 'type': config['Database']['type'],
116
+ 'sqlite_path': config.get('Database', 'sqlite_path', fallback='media_summary.db'),
117
+ 'elasticsearch_host': config.get('Database', 'elasticsearch_host', fallback='localhost'),
118
+ 'elasticsearch_port': config.getint('Database', 'elasticsearch_port', fallback=9200)
119
+ }
120
+
121
+ db_config = get_db_config()
122
+ db_type = db_config['type']
123
+
124
+ if db_type == 'sqlite':
125
+ # Use the config path if provided, otherwise fall back to default
126
+ db = Database(db_config.get('sqlite_path'))
127
+ elif db_type == 'elasticsearch':
128
+ es = Elasticsearch([{
129
+ 'host': db_config['elasticsearch_host'],
130
+ 'port': db_config['elasticsearch_port']
131
+ }])
132
+ else:
133
+ raise ValueError(f"Unsupported database type: {db_type}")
134
+
135
+ db_path = db_config['sqlite_path']
136
+
137
+ # Update this path to the directory where you want to store the database backups
138
+ backup_dir = os.environ.get('DB_BACKUP_DIR', 'path/to/backup/directory')
139
+
140
+
141
+
142
+
143
+ if db_type == 'sqlite':
144
+ conn = sqlite3.connect(db_config['sqlite_path'])
145
+ cursor = conn.cursor()
146
+ elif db_type == 'elasticsearch':
147
+ es = Elasticsearch([{
148
+ 'host': db_config['elasticsearch_host'],
149
+ 'port': db_config['elasticsearch_port']
150
+ }])
151
+ else:
152
+ raise ValueError(f"Unsupported database type: {db_type}")
153
+
154
+ ############################################################################################################
155
+ #
156
+ # DB-Searching functions
157
+
158
+ def view_database(*args, **kwargs):
159
+ if db_type == 'sqlite':
160
+ return sqlite_view_database(*args, **kwargs)
161
+ elif db_type == 'elasticsearch':
162
+ # Implement Elasticsearch version
163
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
164
+
165
+ def search_and_display_items(*args, **kwargs):
166
+ if db_type == 'sqlite':
167
+ return sqlite_search_and_display_items(*args, **kwargs)
168
+ elif db_type == 'elasticsearch':
169
+ # Implement Elasticsearch version
170
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
171
+
172
+ def search_and_display(*args, **kwargs):
173
+ if db_type == 'sqlite':
174
+ return sqlite_search_and_display(*args, **kwargs)
175
+ elif db_type == 'elasticsearch':
176
+ # Implement Elasticsearch version
177
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
178
+
179
+ #
180
+ # End of DB-Searching functions
181
+ ############################################################################################################
182
+
183
+ ############################################################################################################
184
+ #
185
+ # Transcript-related Functions
186
+
187
+ def get_transcripts(*args, **kwargs):
188
+ if db_type == 'sqlite':
189
+ return sqlite_get_transcripts(*args, **kwargs)
190
+ elif db_type == 'elasticsearch':
191
+ # Implement Elasticsearch version
192
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
193
+
194
+ #
195
+ # End of Transcript-related Functions
196
+ ############################################################################################################
197
+
198
+ ############################################################################################################
199
+ #
200
+ # DB-Ingestion functions
201
+
202
+ def add_media_to_database(*args, **kwargs):
203
+ if db_type == 'sqlite':
204
+ result = sqlite_add_media_to_database(*args, **kwargs)
205
+
206
+ # Extract content
207
+ segments = args[2]
208
+ if isinstance(segments, list):
209
+ content = ' '.join([segment.get('Text', '') for segment in segments if 'Text' in segment])
210
+ elif isinstance(segments, dict):
211
+ content = segments.get('text', '') or segments.get('content', '')
212
+ else:
213
+ content = str(segments)
214
+
215
+ # Extract media_id from the result
216
+ # Assuming the result is in the format "Media 'Title' added/updated successfully with ID: {media_id}"
217
+ import re
218
+ match = re.search(r"with ID: (\d+)", result)
219
+ if match:
220
+ media_id = int(match.group(1))
221
+
222
+ # Create initial document version
223
+ sqlite_create_document_version(media_id, content)
224
+
225
+ return result
226
+ elif db_type == 'elasticsearch':
227
+ # Implement Elasticsearch version
228
+ raise NotImplementedError("Elasticsearch version of add_media_to_database not yet implemented")
229
+
230
+
231
+ def import_obsidian_note_to_db(*args, **kwargs):
232
+ if db_type == 'sqlite':
233
+ return sqlite_import_obsidian_note_to_db(*args, **kwargs)
234
+ elif db_type == 'elasticsearch':
235
+ # Implement Elasticsearch version
236
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
237
+
238
+
239
+ def update_media_content(*args, **kwargs):
240
+ if db_type == 'sqlite':
241
+ result = sqlite_update_media_content(*args, **kwargs)
242
+
243
+ # Extract media_id and content
244
+ selected_item = args[0]
245
+ item_mapping = args[1]
246
+ content_input = args[2]
247
+
248
+ if selected_item and item_mapping and selected_item in item_mapping:
249
+ media_id = item_mapping[selected_item]
250
+
251
+ # Create new document version
252
+ sqlite_create_document_version(media_id, content_input)
253
+
254
+ return result
255
+ elif db_type == 'elasticsearch':
256
+ # Implement Elasticsearch version
257
+ raise NotImplementedError("Elasticsearch version of update_media_content not yet implemented")
258
+
259
+
260
+ def add_media_with_keywords(*args, **kwargs):
261
+ if db_type == 'sqlite':
262
+ return sqlite_add_media_with_keywords(*args, **kwargs)
263
+ elif db_type == 'elasticsearch':
264
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
265
+
266
+ def check_media_and_whisper_model(*args, **kwargs):
267
+ if db_type == 'sqlite':
268
+ return sqlite_check_media_and_whisper_model(*args, **kwargs)
269
+ elif db_type == 'elasticsearch':
270
+ raise NotImplementedError("Elasticsearch version of check_media_and_whisper_model not yet implemented")
271
+
272
+ def ingest_article_to_db(url, title, author, content, keywords, summary, ingestion_date, custom_prompt):
273
+ if db_type == 'sqlite':
274
+ return sqlite_ingest_article_to_db(url, title, author, content, keywords, summary, ingestion_date, custom_prompt)
275
+ elif db_type == 'elasticsearch':
276
+ # Implement Elasticsearch version
277
+ raise NotImplementedError("Elasticsearch version of ingest_article_to_db not yet implemented")
278
+ else:
279
+ raise ValueError(f"Unsupported database type: {db_type}")
280
+
281
+ #
282
+ # End of DB-Ingestion functions
283
+ ############################################################################################################
284
+
285
+
286
+ ############################################################################################################
287
+ #
288
+ # Prompt-related functions
289
+
290
+ def list_prompts(*args, **kwargs):
291
+ if db_type == 'sqlite':
292
+ return sqlite_list_prompts(*args, **kwargs)
293
+ elif db_type == 'elasticsearch':
294
+ # Implement Elasticsearch version
295
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
296
+
297
+
298
+ def fetch_prompt_details(*args, **kwargs):
299
+ if db_type == 'sqlite':
300
+ return sqlite_fetch_prompt_details(*args, **kwargs)
301
+ elif db_type == 'elasticsearch':
302
+ # Implement Elasticsearch version
303
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
304
+
305
+ def add_prompt(*args, **kwargs):
306
+ if db_type == 'sqlite':
307
+ return sqlite_add_prompt(*args, **kwargs)
308
+ elif db_type == 'elasticsearch':
309
+ # Implement Elasticsearch version
310
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
311
+
312
+
313
+ def add_or_update_prompt(*args, **kwargs):
314
+ if db_type == 'sqlite':
315
+ return sqlite_add_or_update_prompt(*args, **kwargs)
316
+ elif db_type == 'elasticsearch':
317
+ # Implement Elasticsearch version
318
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
319
+
320
+ def load_prompt_details(*args, **kwargs):
321
+ if db_type == 'sqlite':
322
+ return sqlite_load_prompt_details(*args, **kwargs)
323
+ elif db_type == 'elasticsearch':
324
+ # Implement Elasticsearch version
325
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
326
+
327
+ def load_preset_prompts(*args, **kwargs):
328
+ if db_type == 'sqlite':
329
+ return sqlite_load_preset_prompts(*args, **kwargs)
330
+ elif db_type == 'elasticsearch':
331
+ # Implement Elasticsearch version
332
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
333
+
334
+ def insert_prompt_to_db(*args, **kwargs):
335
+ if db_type == 'sqlite':
336
+ return sqlite_insert_prompt_to_db(*args, **kwargs)
337
+ elif db_type == 'elasticsearch':
338
+ # Implement Elasticsearch version
339
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
340
+
341
+ def delete_prompt(*args, **kwargs):
342
+ if db_type == 'sqlite':
343
+ return sqlite_delete_prompt(*args, **kwargs)
344
+ elif db_type == 'elasticsearch':
345
+ # Implement Elasticsearch version
346
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
347
+
348
+ #
349
+ # End of Prompt-related functions
350
+ ############################################################################################################
351
+
352
+ ############################################################################################################
353
+ #
354
+ # Keywords-related Functions
355
+
356
+ def keywords_browser_interface(*args, **kwargs):
357
+ if db_type == 'sqlite':
358
+ return sqlite_keywords_browser_interface(*args, **kwargs)
359
+ elif db_type == 'elasticsearch':
360
+ # Implement Elasticsearch version
361
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
362
+
363
+ def add_keyword(*args, **kwargs):
364
+ if db_type == 'sqlite':
365
+ with db.get_connection() as conn:
366
+ cursor = conn.cursor()
367
+ return sqlite_add_keyword(*args, **kwargs)
368
+ elif db_type == 'elasticsearch':
369
+ # Implement Elasticsearch version
370
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
371
+
372
+ def delete_keyword(*args, **kwargs):
373
+ if db_type == 'sqlite':
374
+ return sqlite_delete_keyword(*args, **kwargs)
375
+ elif db_type == 'elasticsearch':
376
+ # Implement Elasticsearch version
377
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
378
+
379
+ def export_keywords_to_csv(*args, **kwargs):
380
+ if db_type == 'sqlite':
381
+ return sqlite_export_keywords_to_csv(*args, **kwargs)
382
+ elif db_type == 'elasticsearch':
383
+ # Implement Elasticsearch version
384
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
385
+
386
+ #
387
+ # End of Keywords-related Functions
388
+ ############################################################################################################
389
+
390
+ ############################################################################################################
391
+ #
392
+ # Chat-related Functions
393
+
394
+ def delete_chat_message(*args, **kwargs):
395
+ if db_type == 'sqlite':
396
+ return sqlite_delete_chat_message(*args, **kwargs)
397
+ elif db_type == 'elasticsearch':
398
+ # Implement Elasticsearch version
399
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
400
+
401
+ def update_chat_message(*args, **kwargs):
402
+ if db_type == 'sqlite':
403
+ return sqlite_update_chat_message(*args, **kwargs)
404
+ elif db_type == 'elasticsearch':
405
+ # Implement Elasticsearch version
406
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
407
+
408
+ def add_chat_message(*args, **kwargs):
409
+ if db_type == 'sqlite':
410
+ return sqlite_add_chat_message(*args, **kwargs)
411
+ elif db_type == 'elasticsearch':
412
+ # Implement Elasticsearch version
413
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
414
+
415
+ def get_chat_messages(*args, **kwargs):
416
+ if db_type == 'sqlite':
417
+ return sqlite_get_chat_messages(*args, **kwargs)
418
+ elif db_type == 'elasticsearch':
419
+ # Implement Elasticsearch version
420
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
421
+
422
+ def search_chat_conversations(*args, **kwargs):
423
+ if db_type == 'sqlite':
424
+ return sqlite_search_chat_conversations(*args, **kwargs)
425
+ elif db_type == 'elasticsearch':
426
+ # Implement Elasticsearch version
427
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
428
+
429
+ def create_chat_conversation(*args, **kwargs):
430
+ if db_type == 'sqlite':
431
+ return sqlite_create_chat_conversation(*args, **kwargs)
432
+ elif db_type == 'elasticsearch':
433
+ # Implement Elasticsearch version
434
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
435
+
436
+ def save_chat_history_to_database(*args, **kwargs):
437
+ if db_type == 'sqlite':
438
+ return sqlite_save_chat_history_to_database(*args, **kwargs)
439
+ elif db_type == 'elasticsearch':
440
+ # Implement Elasticsearch version
441
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
442
+
443
+ def get_conversation_name(*args, **kwargs):
444
+ if db_type == 'sqlite':
445
+ return sqlite_get_conversation_name(*args, **kwargs)
446
+ elif db_type == 'elasticsearch':
447
+ # Implement Elasticsearch version
448
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
449
+
450
+ #
451
+ # End of Chat-related Functions
452
+ ############################################################################################################
453
+
454
+ ############################################################################################################
455
+ #
456
+ # Trash-related Functions
457
+
458
+ def get_trashed_items(*args, **kwargs):
459
+ if db_type == 'sqlite':
460
+ return sqlite_get_trashed_items(*args, **kwargs)
461
+ elif db_type == 'elasticsearch':
462
+ # Implement Elasticsearch version
463
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
464
+
465
+ def user_delete_item(*args, **kwargs):
466
+ if db_type == 'sqlite':
467
+ return sqlite_user_delete_item(*args, **kwargs)
468
+ elif db_type == 'elasticsearch':
469
+ # Implement Elasticsearch version
470
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
471
+
472
+ def empty_trash(*args, **kwargs):
473
+ if db_type == 'sqlite':
474
+ return sqlite_empty_trash(*args, **kwargs)
475
+ elif db_type == 'elasticsearch':
476
+ # Implement Elasticsearch version
477
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
478
+
479
+ #
480
+ # End of Trash-related Functions
481
+ ############################################################################################################
482
+
483
+
484
+ ############################################################################################################
485
+ #
486
+ # DB-Backup Functions
487
+
488
+ def create_automated_backup(*args, **kwargs):
489
+ if db_type == 'sqlite':
490
+ return sqlite_create_automated_backup(*args, **kwargs)
491
+ elif db_type == 'elasticsearch':
492
+ # Implement Elasticsearch version
493
+ raise NotImplementedError("Elasticsearch version of add_media_with_keywords not yet implemented")
494
+
495
+ #
496
+ # End of DB-Backup Functions
497
+ ############################################################################################################
498
+
499
+
500
+ ############################################################################################################
501
+ #
502
+ # Document Versioning Functions
503
+
504
+ def create_document_version(*args, **kwargs):
505
+ if db_type == 'sqlite':
506
+ return sqlite_create_document_version(*args, **kwargs)
507
+ elif db_type == 'elasticsearch':
508
+ # Implement Elasticsearch version
509
+ raise NotImplementedError("Elasticsearch version of create_document_version not yet implemented")
510
+
511
+ def get_document_version(*args, **kwargs):
512
+ if db_type == 'sqlite':
513
+ return sqlite_get_document_version(*args, **kwargs)
514
+ elif db_type == 'elasticsearch':
515
+ # Implement Elasticsearch version
516
+ raise NotImplementedError("Elasticsearch version of get_document_version not yet implemented")
517
+
518
+ #
519
+ # End of Document Versioning Functions
520
+ ############################################################################################################
521
+
522
+
523
+
524
+ ############################################################################################################
525
+ #
526
+ # Function to close the database connection for SQLite
527
+
528
+ def close_connection():
529
+ if db_type == 'sqlite':
530
+ db.close_all_connections()
531
+ # Elasticsearch doesn't need explicit closing
532
+
533
+ #
534
+ # End of file
535
+ ############################################################################################################
App_Function_Libraries/DB/SQLite_DB.py ADDED
@@ -0,0 +1,2066 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SQLite_DB.py
2
+ #########################################
3
+ # SQLite_DB Library
4
+ # This library is used to perform any/all DB operations related to SQLite.
5
+ #
6
+ ####
7
+
8
+ ####################
9
+ # Function List
10
+ # FIXME - UPDATE Function Arguments
11
+ # 1. get_connection(self)
12
+ # 2. execute_query(self, query: str, params: Tuple = ())
13
+ # 3. create_tables()
14
+ # 4. add_keyword(keyword: str)
15
+ # 5. delete_keyword(keyword: str)
16
+ # 6. add_media_with_keywords(url, title, media_type, content, keywords, prompt, summary, transcription_model, author, ingestion_date)
17
+ # 7. fetch_all_keywords()
18
+ # 8. keywords_browser_interface()
19
+ # 9. display_keywords()
20
+ # 10. export_keywords_to_csv()
21
+ # 11. browse_items(search_query, search_type)
22
+ # 12. fetch_item_details(media_id: int)
23
+ # 13. add_media_version(media_id: int, prompt: str, summary: str)
24
+ # 14. search_db(search_query: str, search_fields: List[str], keywords: str, page: int = 1, results_per_page: int = 10)
25
+ # 15. search_and_display(search_query, search_fields, keywords, page)
26
+ # 16. display_details(index, results)
27
+ # 17. get_details(index, dataframe)
28
+ # 18. format_results(results)
29
+ # 19. export_to_csv(search_query: str, search_fields: List[str], keyword: str, page: int = 1, results_per_file: int = 1000)
30
+ # 20. is_valid_url(url: str) -> bool
31
+ # 21. is_valid_date(date_string: str) -> bool
32
+ # 22. add_media_to_database(url, info_dict, segments, summary, keywords, custom_prompt_input, whisper_model)
33
+ # 23. create_prompts_db()
34
+ # 24. add_prompt(name, details, system, user=None)
35
+ # 25. fetch_prompt_details(name)
36
+ # 26. list_prompts()
37
+ # 27. insert_prompt_to_db(title, description, system_prompt, user_prompt)
38
+ # 28. update_media_content(media_id: int, content: str, prompt: str, summary: str)
39
+ # 29. search_media_database(query: str) -> List[Tuple[int, str, str]]
40
+ # 30. load_media_content(media_id: int)
41
+ # 31.
42
+ # 32.
43
+ #
44
+ #
45
+ #####################
46
+ #
47
+ # Import necessary libraries
48
+ import csv
49
+ import html
50
+ import logging
51
+ import os
52
+ import re
53
+ import shutil
54
+ import sqlite3
55
+ import time
56
+ import traceback
57
+ from contextlib import contextmanager
58
+ from datetime import datetime, timedelta
59
+ from typing import List, Tuple, Dict, Any
60
+ # Local Libraries
61
+ from App_Function_Libraries.Utils.Utils import is_valid_url
62
+ # Third-Party Libraries
63
+ import gradio as gr
64
+ import pandas as pd
65
+ import yaml
66
+
67
+
68
+ # Import Local Libraries
69
+ #
70
+ #######################################################################################################################
71
+ # Function Definitions
72
+ #
73
+
74
+
75
+ # Set up logging
76
+ #logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
77
+ #logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
78
+ logger = logging.getLogger(__name__)
79
+
80
+
81
+ #
82
+ # Backup-related functions
83
+
84
+ def create_incremental_backup(db_path, backup_dir):
85
+ conn = sqlite3.connect(db_path)
86
+ cursor = conn.cursor()
87
+
88
+ # Get the page count of the database
89
+ cursor.execute("PRAGMA page_count")
90
+ page_count = cursor.fetchone()[0]
91
+
92
+ # Create a new backup file
93
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
94
+ backup_file = os.path.join(backup_dir, f"incremental_backup_{timestamp}.sqlib")
95
+
96
+ # Perform the incremental backup
97
+ conn.execute(f"VACUUM INTO '{backup_file}'")
98
+
99
+ conn.close()
100
+ print(f"Incremental backup created: {backup_file}")
101
+ return backup_file
102
+
103
+
104
+ def create_automated_backup(db_path, backup_dir):
105
+ # Ensure backup directory exists
106
+ os.makedirs(backup_dir, exist_ok=True)
107
+
108
+ # Create a timestamped backup file name
109
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
110
+ backup_file = os.path.join(backup_dir, f"backup_{timestamp}.db")
111
+
112
+ # Copy the database file
113
+ shutil.copy2(db_path, backup_file)
114
+
115
+ print(f"Backup created: {backup_file}")
116
+ return backup_file
117
+
118
+ # FIXME - boto3 aint getting installed by default....
119
+ # def upload_to_s3(file_path, bucket_name, s3_key):
120
+ # import boto3
121
+ # s3 = boto3.client('s3')
122
+ # try:
123
+ # s3.upload_file(file_path, bucket_name, s3_key)
124
+ # print(f"File uploaded to S3: {s3_key}")
125
+ # except Exception as e:
126
+ # print(f"Error uploading to S3: {str(e)}")
127
+
128
+
129
+ def rotate_backups(backup_dir, max_backups=10):
130
+ backups = sorted(
131
+ [f for f in os.listdir(backup_dir) if f.endswith('.db')],
132
+ key=lambda x: os.path.getmtime(os.path.join(backup_dir, x)),
133
+ reverse=True
134
+ )
135
+
136
+ while len(backups) > max_backups:
137
+ old_backup = backups.pop()
138
+ os.remove(os.path.join(backup_dir, old_backup))
139
+ print(f"Removed old backup: {old_backup}")
140
+
141
+
142
+ # FIXME - Setup properly and test/add documentation for its existence...
143
+ db_path = "path/to/your/database.db"
144
+ backup_dir = "path/to/backup/directory"
145
+ #create_automated_backup(db_path, backup_dir)
146
+
147
+ # FIXME - Setup properly and test/add documentation for its existence...
148
+ #backup_file = create_automated_backup(db_path, backup_dir)
149
+ #upload_to_s3(backup_file, 'your-s3-bucket-name', f"database_backups/{os.path.basename(backup_file)}")
150
+
151
+ # FIXME - Setup properly and test/add documentation for its existence...
152
+ #create_incremental_backup(db_path, backup_dir)
153
+
154
+ # FIXME - Setup properly and test/add documentation for its existence...
155
+ #rotate_backups(backup_dir)
156
+
157
+ #
158
+ #
159
+ #######################################################################################################################
160
+ #
161
+ # DB-Integrity Check Functions
162
+
163
+ def check_database_integrity(db_path):
164
+ conn = sqlite3.connect(db_path)
165
+ cursor = conn.cursor()
166
+
167
+ cursor.execute("PRAGMA integrity_check")
168
+ result = cursor.fetchone()
169
+
170
+ conn.close()
171
+
172
+ if result[0] == "ok":
173
+ print("Database integrity check passed.")
174
+ return True
175
+ else:
176
+ print("Database integrity check failed:", result[0])
177
+ return False
178
+
179
+ #check_database_integrity(db_path)
180
+
181
+ #
182
+ # End of DB-Integrity Check functions
183
+ #######################################################################################################################
184
+ #
185
+ # Media-related Functions
186
+
187
+ # Custom exceptions
188
+ class DatabaseError(Exception):
189
+ pass
190
+
191
+
192
+ class InputError(Exception):
193
+ pass
194
+
195
+
196
+ # Database connection function with connection pooling
197
+ class Database:
198
+ def __init__(self, db_name=None):
199
+ self.db_name = db_name or os.getenv('DB_NAME', 'media_summary.db')
200
+ self.pool = []
201
+ self.pool_size = 10
202
+
203
+ @contextmanager
204
+ def get_connection(self):
205
+ retry_count = 5
206
+ retry_delay = 1
207
+ conn = None
208
+ while retry_count > 0:
209
+ try:
210
+ conn = self.pool.pop() if self.pool else sqlite3.connect(self.db_name, check_same_thread=False)
211
+ yield conn
212
+ self.pool.append(conn)
213
+ return
214
+ except sqlite3.OperationalError as e:
215
+ if 'database is locked' in str(e):
216
+ logging.warning(f"Database is locked, retrying in {retry_delay} seconds...")
217
+ retry_count -= 1
218
+ time.sleep(retry_delay)
219
+ else:
220
+ raise DatabaseError(f"Database error: {e}")
221
+ except Exception as e:
222
+ raise DatabaseError(f"Unexpected error: {e}")
223
+ finally:
224
+ # Ensure the connection is returned to the pool even on failure
225
+ if conn:
226
+ self.pool.append(conn)
227
+ raise DatabaseError("Database is locked and retries have been exhausted")
228
+
229
+ def execute_query(self, query: str, params: Tuple = ()) -> None:
230
+ with self.get_connection() as conn:
231
+ try:
232
+ cursor = conn.cursor()
233
+ cursor.execute(query, params)
234
+ conn.commit()
235
+ except sqlite3.Error as e:
236
+ raise DatabaseError(f"Database error: {e}, Query: {query}")
237
+
238
+ db = Database()
239
+
240
+ def instantiate_SQLite_db():
241
+ global sqlite_db
242
+ sqlite_db = Database()
243
+
244
+
245
+ # Function to create tables with the new media schema
246
+ def create_tables(db) -> None:
247
+ table_queries = [
248
+ # CREATE TABLE statements
249
+ '''
250
+ CREATE TABLE IF NOT EXISTS Media (
251
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
252
+ url TEXT,
253
+ title TEXT NOT NULL,
254
+ type TEXT NOT NULL,
255
+ content TEXT,
256
+ author TEXT,
257
+ ingestion_date TEXT,
258
+ prompt TEXT,
259
+ summary TEXT,
260
+ transcription_model TEXT,
261
+ is_trash BOOLEAN DEFAULT 0,
262
+ trash_date DATETIME,
263
+ vector_embedding BLOB
264
+ )
265
+ ''',
266
+ '''
267
+ CREATE TABLE IF NOT EXISTS Keywords (
268
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
269
+ keyword TEXT NOT NULL UNIQUE
270
+ )
271
+ ''',
272
+ '''
273
+ CREATE TABLE IF NOT EXISTS MediaKeywords (
274
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
275
+ media_id INTEGER NOT NULL,
276
+ keyword_id INTEGER NOT NULL,
277
+ FOREIGN KEY (media_id) REFERENCES Media(id),
278
+ FOREIGN KEY (keyword_id) REFERENCES Keywords(id)
279
+ )
280
+ ''',
281
+ '''
282
+ CREATE TABLE IF NOT EXISTS MediaVersion (
283
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
284
+ media_id INTEGER NOT NULL,
285
+ version INTEGER NOT NULL,
286
+ prompt TEXT,
287
+ summary TEXT,
288
+ created_at TEXT NOT NULL,
289
+ FOREIGN KEY (media_id) REFERENCES Media(id)
290
+ )
291
+ ''',
292
+ '''
293
+ CREATE TABLE IF NOT EXISTS MediaModifications (
294
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
295
+ media_id INTEGER NOT NULL,
296
+ prompt TEXT,
297
+ summary TEXT,
298
+ modification_date TEXT,
299
+ FOREIGN KEY (media_id) REFERENCES Media(id)
300
+ )
301
+ ''',
302
+ '''
303
+ CREATE TABLE IF NOT EXISTS ChatConversations (
304
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
305
+ media_id INTEGER,
306
+ media_name TEXT,
307
+ conversation_name TEXT,
308
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
309
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
310
+ FOREIGN KEY (media_id) REFERENCES Media(id)
311
+ )
312
+ ''',
313
+ '''
314
+ CREATE TABLE IF NOT EXISTS ChatMessages (
315
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
316
+ conversation_id INTEGER,
317
+ sender TEXT,
318
+ message TEXT,
319
+ timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
320
+ FOREIGN KEY (conversation_id) REFERENCES ChatConversations(id)
321
+ )
322
+ ''',
323
+ '''
324
+ CREATE TABLE IF NOT EXISTS Transcripts (
325
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
326
+ media_id INTEGER,
327
+ whisper_model TEXT,
328
+ transcription TEXT,
329
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
330
+ FOREIGN KEY (media_id) REFERENCES Media(id)
331
+ )
332
+ ''',
333
+ '''
334
+ CREATE TABLE IF NOT EXISTS MediaChunks (
335
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
336
+ media_id INTEGER,
337
+ chunk_text TEXT,
338
+ start_index INTEGER,
339
+ end_index INTEGER,
340
+ vector_embedding BLOB,
341
+ FOREIGN KEY (media_id) REFERENCES Media(id)
342
+ )
343
+ ''',
344
+ '''
345
+ CREATE TABLE IF NOT EXISTS UnvectorizedMediaChunks (
346
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
347
+ media_id INTEGER NOT NULL,
348
+ chunk_text TEXT NOT NULL,
349
+ chunk_index INTEGER NOT NULL,
350
+ start_char INTEGER NOT NULL,
351
+ end_char INTEGER NOT NULL,
352
+ chunk_type TEXT,
353
+ creation_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
354
+ last_modified TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
355
+ is_processed BOOLEAN DEFAULT FALSE,
356
+ metadata TEXT,
357
+ FOREIGN KEY (media_id) REFERENCES Media(id)
358
+ )
359
+ ''',
360
+ '''
361
+ CREATE TABLE IF NOT EXISTS DocumentVersions (
362
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
363
+ media_id INTEGER NOT NULL,
364
+ version_number INTEGER NOT NULL,
365
+ content TEXT,
366
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
367
+ FOREIGN KEY (media_id) REFERENCES Media(id)
368
+ )
369
+ ''',
370
+ ]
371
+
372
+ index_queries = [
373
+ # CREATE INDEX statements
374
+ 'CREATE INDEX IF NOT EXISTS idx_media_title ON Media(title)',
375
+ 'CREATE INDEX IF NOT EXISTS idx_media_type ON Media(type)',
376
+ 'CREATE INDEX IF NOT EXISTS idx_media_author ON Media(author)',
377
+ 'CREATE INDEX IF NOT EXISTS idx_media_ingestion_date ON Media(ingestion_date)',
378
+ 'CREATE INDEX IF NOT EXISTS idx_keywords_keyword ON Keywords(keyword)',
379
+ 'CREATE INDEX IF NOT EXISTS idx_mediakeywords_media_id ON MediaKeywords(media_id)',
380
+ 'CREATE INDEX IF NOT EXISTS idx_mediakeywords_keyword_id ON MediaKeywords(keyword_id)',
381
+ 'CREATE INDEX IF NOT EXISTS idx_media_version_media_id ON MediaVersion(media_id)',
382
+ 'CREATE INDEX IF NOT EXISTS idx_mediamodifications_media_id ON MediaModifications(media_id)',
383
+ 'CREATE INDEX IF NOT EXISTS idx_chatconversations_media_id ON ChatConversations(media_id)',
384
+ 'CREATE INDEX IF NOT EXISTS idx_chatmessages_conversation_id ON ChatMessages(conversation_id)',
385
+ 'CREATE INDEX IF NOT EXISTS idx_media_is_trash ON Media(is_trash)',
386
+ 'CREATE INDEX IF NOT EXISTS idx_mediachunks_media_id ON MediaChunks(media_id)',
387
+ 'CREATE INDEX IF NOT EXISTS idx_unvectorized_media_chunks_media_id ON UnvectorizedMediaChunks(media_id)',
388
+ 'CREATE INDEX IF NOT EXISTS idx_unvectorized_media_chunks_is_processed ON UnvectorizedMediaChunks(is_processed)',
389
+ 'CREATE INDEX IF NOT EXISTS idx_unvectorized_media_chunks_chunk_type ON UnvectorizedMediaChunks(chunk_type)',
390
+ # CREATE UNIQUE INDEX statements
391
+ 'CREATE UNIQUE INDEX IF NOT EXISTS idx_unique_media_url ON Media(url)',
392
+ 'CREATE UNIQUE INDEX IF NOT EXISTS idx_unique_media_keyword ON MediaKeywords(media_id, keyword_id)',
393
+ 'CREATE INDEX IF NOT EXISTS idx_document_versions_media_id ON DocumentVersions(media_id)',
394
+ 'CREATE INDEX IF NOT EXISTS idx_document_versions_version_number ON DocumentVersions(version_number)',
395
+ ]
396
+
397
+ virtual_table_queries = [
398
+ # CREATE VIRTUAL TABLE statements
399
+ 'CREATE VIRTUAL TABLE IF NOT EXISTS media_fts USING fts5(title, content)',
400
+ 'CREATE VIRTUAL TABLE IF NOT EXISTS keyword_fts USING fts5(keyword)'
401
+ ]
402
+
403
+ all_queries = table_queries + index_queries + virtual_table_queries
404
+
405
+ for query in all_queries:
406
+ try:
407
+ db.execute_query(query)
408
+ except Exception as e:
409
+ logging.error(f"Error executing query: {query}")
410
+ logging.error(f"Error details: {str(e)}")
411
+ raise
412
+
413
+ logging.info("All tables, indexes, and virtual tables created successfully.")
414
+
415
+ create_tables(db)
416
+
417
+
418
+ def check_media_exists(title, url):
419
+ """Check if media with the given title or URL exists in the database."""
420
+ with db.get_connection() as conn:
421
+ cursor = conn.cursor()
422
+ cursor.execute("SELECT id FROM Media WHERE title = ? OR url = ?", (title, url))
423
+ result = cursor.fetchone()
424
+ return result is not None
425
+
426
+
427
+ def check_media_and_whisper_model(title=None, url=None, current_whisper_model=None):
428
+ """
429
+ Check if media exists in the database and compare the whisper model used.
430
+
431
+ :param title: Title of the media (optional)
432
+ :param url: URL of the media (optional)
433
+ :param current_whisper_model: The whisper model currently selected for use
434
+ :return: Tuple (bool, str) - (should_download, reason)
435
+ """
436
+ if not title and not url:
437
+ return True, "No title or URL provided"
438
+
439
+ with db.get_connection() as conn:
440
+ cursor = conn.cursor()
441
+
442
+ # First, find the media_id
443
+ query = "SELECT id FROM Media WHERE "
444
+ params = []
445
+
446
+ if title:
447
+ query += "title = ?"
448
+ params.append(title)
449
+
450
+ if url:
451
+ if params:
452
+ query += " OR "
453
+ query += "url = ?"
454
+ params.append(url)
455
+
456
+ cursor.execute(query, tuple(params))
457
+ result = cursor.fetchone()
458
+
459
+ if not result:
460
+ return True, "Media not found in database"
461
+
462
+ media_id = result[0]
463
+
464
+ # Now, get the latest transcript for this media
465
+ cursor.execute("""
466
+ SELECT transcription
467
+ FROM Transcripts
468
+ WHERE media_id = ?
469
+ ORDER BY created_at DESC
470
+ LIMIT 1
471
+ """, (media_id,))
472
+
473
+ transcript_result = cursor.fetchone()
474
+
475
+ if not transcript_result:
476
+ return True, f"No transcript found for media (ID: {media_id})"
477
+
478
+ transcription = transcript_result[0]
479
+
480
+ # Extract the whisper model from the transcription
481
+ match = re.search(r"This text was transcribed using whisper model: (.+)$", transcription, re.MULTILINE)
482
+ if not match:
483
+ return True, f"Whisper model information not found in transcript (Media ID: {media_id})"
484
+
485
+ db_whisper_model = match.group(1).strip()
486
+
487
+ if not current_whisper_model:
488
+ return False, f"Media found in database (ID: {media_id})"
489
+
490
+ if db_whisper_model != current_whisper_model:
491
+ return True, f"Different whisper model (DB: {db_whisper_model}, Current: {current_whisper_model})"
492
+
493
+ return False, f"Media found with same whisper model (ID: {media_id})"
494
+
495
+
496
+ #######################################################################################################################
497
+ # Keyword-related Functions
498
+ #
499
+
500
+ # Function to add a keyword
501
+ def add_keyword(keyword: str) -> int:
502
+ keyword = keyword.strip().lower()
503
+ with db.get_connection() as conn:
504
+ cursor = conn.cursor()
505
+ try:
506
+ cursor.execute('INSERT OR IGNORE INTO Keywords (keyword) VALUES (?)', (keyword,))
507
+ cursor.execute('SELECT id FROM Keywords WHERE keyword = ?', (keyword,))
508
+ keyword_id = cursor.fetchone()[0]
509
+ cursor.execute('INSERT OR IGNORE INTO keyword_fts (rowid, keyword) VALUES (?, ?)', (keyword_id, keyword))
510
+ logging.info(f"Keyword '{keyword}' added to keyword_fts with ID: {keyword_id}")
511
+ conn.commit()
512
+ return keyword_id
513
+ except sqlite3.IntegrityError as e:
514
+ logging.error(f"Integrity error adding keyword: {e}")
515
+ raise DatabaseError(f"Integrity error adding keyword: {e}")
516
+ except sqlite3.Error as e:
517
+ logging.error(f"Error adding keyword: {e}")
518
+ raise DatabaseError(f"Error adding keyword: {e}")
519
+
520
+
521
+ # Function to delete a keyword
522
+ def delete_keyword(keyword: str) -> str:
523
+ keyword = keyword.strip().lower()
524
+ with db.get_connection() as conn:
525
+ cursor = conn.cursor()
526
+ try:
527
+ cursor.execute('SELECT id FROM Keywords WHERE keyword = ?', (keyword,))
528
+ keyword_id = cursor.fetchone()
529
+ if keyword_id:
530
+ cursor.execute('DELETE FROM Keywords WHERE keyword = ?', (keyword,))
531
+ cursor.execute('DELETE FROM keyword_fts WHERE rowid = ?', (keyword_id[0],))
532
+ conn.commit()
533
+ return f"Keyword '{keyword}' deleted successfully."
534
+ else:
535
+ return f"Keyword '{keyword}' not found."
536
+ except sqlite3.Error as e:
537
+ raise DatabaseError(f"Error deleting keyword: {e}")
538
+
539
+
540
+
541
+ # Function to add media with keywords
542
+ def add_media_with_keywords(url, title, media_type, content, keywords, prompt, summary, transcription_model, author,
543
+ ingestion_date):
544
+ # Set default values for missing fields
545
+ url = url or 'Unknown'
546
+ title = title or 'Untitled'
547
+ media_type = media_type or 'Unknown'
548
+ content = content or 'No content available'
549
+ keywords = keywords or 'default'
550
+ prompt = prompt or 'No prompt available'
551
+ summary = summary or 'No summary available'
552
+ transcription_model = transcription_model or 'Unknown'
553
+ author = author or 'Unknown'
554
+ ingestion_date = ingestion_date or datetime.now().strftime('%Y-%m-%d')
555
+
556
+ # Ensure URL is valid
557
+ if not is_valid_url(url):
558
+ url = 'localhost'
559
+
560
+ if media_type not in ['article', 'audio', 'document', 'obsidian_note', 'podcast', 'text', 'video', 'unknown']:
561
+ raise InputError("Invalid media type. Allowed types: article, audio file, document, obsidian_note podcast, text, video, unknown.")
562
+
563
+ if ingestion_date and not is_valid_date(ingestion_date):
564
+ raise InputError("Invalid ingestion date format. Use YYYY-MM-DD.")
565
+
566
+ # Handle keywords as either string or list
567
+ if isinstance(keywords, str):
568
+ keyword_list = [keyword.strip().lower() for keyword in keywords.split(',')]
569
+ elif isinstance(keywords, list):
570
+ keyword_list = [keyword.strip().lower() for keyword in keywords]
571
+ else:
572
+ keyword_list = ['default']
573
+
574
+ logging.info(f"Adding/updating media: URL={url}, Title={title}, Type={media_type}")
575
+ logging.debug(f"Content (first 500 chars): {content[:500]}...")
576
+ logging.debug(f"Keywords: {keyword_list}")
577
+ logging.info(f"Prompt: {prompt}")
578
+ logging.info(f"Summary: {summary}")
579
+ logging.info(f"Author: {author}")
580
+ logging.info(f"Ingestion Date: {ingestion_date}")
581
+ logging.info(f"Transcription Model: {transcription_model}")
582
+
583
+ try:
584
+ with db.get_connection() as conn:
585
+ conn.execute("BEGIN TRANSACTION")
586
+ cursor = conn.cursor()
587
+
588
+ # Check if media already exists
589
+ cursor.execute('SELECT id FROM Media WHERE url = ?', (url,))
590
+ existing_media = cursor.fetchone()
591
+
592
+ if existing_media:
593
+ media_id = existing_media[0]
594
+ logging.info(f"Updating existing media with ID: {media_id}")
595
+
596
+ cursor.execute('''
597
+ UPDATE Media
598
+ SET content = ?, transcription_model = ?, title = ?, type = ?, author = ?, ingestion_date = ?
599
+ WHERE id = ?
600
+ ''', (content, transcription_model, title, media_type, author, ingestion_date, media_id))
601
+ else:
602
+ logging.info("Creating new media entry")
603
+
604
+ cursor.execute('''
605
+ INSERT INTO Media (url, title, type, content, author, ingestion_date, transcription_model)
606
+ VALUES (?, ?, ?, ?, ?, ?, ?)
607
+ ''', (url, title, media_type, content, author, ingestion_date, transcription_model))
608
+ media_id = cursor.lastrowid
609
+
610
+ logging.info(f"Adding new modification to MediaModifications for media ID: {media_id}")
611
+ cursor.execute('''
612
+ INSERT INTO MediaModifications (media_id, prompt, summary, modification_date)
613
+ VALUES (?, ?, ?, ?)
614
+ ''', (media_id, prompt, summary, ingestion_date))
615
+ logger.info("New modification added to MediaModifications")
616
+
617
+ # Insert keywords and associate with media item
618
+ logging.info("Processing keywords")
619
+ for keyword in keyword_list:
620
+ keyword = keyword.strip().lower()
621
+ cursor.execute('INSERT OR IGNORE INTO Keywords (keyword) VALUES (?)', (keyword,))
622
+ cursor.execute('SELECT id FROM Keywords WHERE keyword = ?', (keyword,))
623
+ keyword_id = cursor.fetchone()[0]
624
+ cursor.execute('INSERT OR IGNORE INTO MediaKeywords (media_id, keyword_id) VALUES (?, ?)',
625
+ (media_id, keyword_id))
626
+
627
+ # Update full-text search index
628
+ logging.info("Updating full-text search index")
629
+ cursor.execute('INSERT OR REPLACE INTO media_fts (rowid, title, content) VALUES (?, ?, ?)',
630
+ (media_id, title, content))
631
+
632
+ logging.info("Adding new media version")
633
+ add_media_version(media_id, prompt, summary)
634
+
635
+ conn.commit()
636
+ logging.info(f"Media '{title}' successfully added/updated with ID: {media_id}")
637
+
638
+ return f"Media '{title}' added/updated successfully with keywords: {', '.join(keyword_list)}"
639
+
640
+ except sqlite3.Error as e:
641
+ conn.rollback()
642
+ logging.error(f"SQL Error: {e}")
643
+ raise DatabaseError(f"Error adding media with keywords: {e}")
644
+ except Exception as e:
645
+ conn.rollback()
646
+ logging.error(f"Unexpected Error: {e}")
647
+ raise DatabaseError(f"Unexpected error: {e}")
648
+
649
+
650
+ def ingest_article_to_db(url, title, author, content, keywords, summary, ingestion_date, custom_prompt):
651
+ try:
652
+ # Check if content is not empty or whitespace
653
+ if not content.strip():
654
+ raise ValueError("Content is empty.")
655
+
656
+ keyword_list = keywords.split(',') if keywords else ["default"]
657
+ keyword_str = ', '.join(keyword_list)
658
+
659
+ # Set default values for missing fields
660
+ url = url or 'Unknown'
661
+ title = title or 'Unknown'
662
+ author = author or 'Unknown'
663
+ keywords = keywords or 'default'
664
+ summary = summary or 'No summary available'
665
+ ingestion_date = ingestion_date or datetime.now().strftime('%Y-%m-%d')
666
+
667
+ # Log the values of all fields before calling add_media_with_keywords
668
+ logging.debug(f"URL: {url}")
669
+ logging.debug(f"Title: {title}")
670
+ logging.debug(f"Author: {author}")
671
+ logging.debug(f"Content: {content[:50]}... (length: {len(content)})") # Log first 50 characters of content
672
+ logging.debug(f"Keywords: {keywords}")
673
+ logging.debug(f"Summary: {summary}")
674
+ logging.debug(f"Ingestion Date: {ingestion_date}")
675
+ logging.debug(f"Custom Prompt: {custom_prompt}")
676
+
677
+ # Check if any required field is empty and log the specific missing field
678
+ if not url:
679
+ logging.error("URL is missing.")
680
+ raise ValueError("URL is missing.")
681
+ if not title:
682
+ logging.error("Title is missing.")
683
+ raise ValueError("Title is missing.")
684
+ if not content:
685
+ logging.error("Content is missing.")
686
+ raise ValueError("Content is missing.")
687
+ if not keywords:
688
+ logging.error("Keywords are missing.")
689
+ raise ValueError("Keywords are missing.")
690
+ if not summary:
691
+ logging.error("Summary is missing.")
692
+ raise ValueError("Summary is missing.")
693
+ if not ingestion_date:
694
+ logging.error("Ingestion date is missing.")
695
+ raise ValueError("Ingestion date is missing.")
696
+ if not custom_prompt:
697
+ logging.error("Custom prompt is missing.")
698
+ raise ValueError("Custom prompt is missing.")
699
+
700
+ # Add media with keywords to the database
701
+ result = add_media_with_keywords(
702
+ url=url,
703
+ title=title,
704
+ media_type='article',
705
+ content=content,
706
+ keywords=keyword_str or "article_default",
707
+ prompt=custom_prompt or None,
708
+ summary=summary or "No summary generated",
709
+ transcription_model=None, # or some default value if applicable
710
+ author=author or 'Unknown',
711
+ ingestion_date=ingestion_date
712
+ )
713
+ return result
714
+ except Exception as e:
715
+ logging.error(f"Failed to ingest article to the database: {e}")
716
+ return str(e)
717
+
718
+
719
+ def fetch_all_keywords() -> List[str]:
720
+ try:
721
+ with db.get_connection() as conn:
722
+ cursor = conn.cursor()
723
+ cursor.execute('SELECT keyword FROM Keywords')
724
+ keywords = [row[0] for row in cursor.fetchall()]
725
+ return keywords
726
+ except sqlite3.Error as e:
727
+ raise DatabaseError(f"Error fetching keywords: {e}")
728
+
729
+ def keywords_browser_interface():
730
+ keywords = fetch_all_keywords()
731
+ return gr.Markdown("\n".join(f"- {keyword}" for keyword in keywords))
732
+
733
+ def display_keywords():
734
+ try:
735
+ keywords = fetch_all_keywords()
736
+ return "\n".join(keywords) if keywords else "No keywords found."
737
+ except DatabaseError as e:
738
+ return str(e)
739
+
740
+
741
+ def export_keywords_to_csv():
742
+ try:
743
+ keywords = fetch_all_keywords()
744
+ if not keywords:
745
+ return None, "No keywords found in the database."
746
+
747
+ filename = "keywords.csv"
748
+ with open(filename, 'w', newline='', encoding='utf-8') as file:
749
+ writer = csv.writer(file)
750
+ writer.writerow(["Keyword"])
751
+ for keyword in keywords:
752
+ writer.writerow([keyword])
753
+
754
+ return filename, f"Keywords exported to {filename}"
755
+ except Exception as e:
756
+ logger.error(f"Error exporting keywords to CSV: {e}")
757
+ return None, f"Error exporting keywords: {e}"
758
+
759
+
760
+ # Function to fetch items based on search query and type
761
+ def browse_items(search_query, search_type):
762
+ try:
763
+ with db.get_connection() as conn:
764
+ cursor = conn.cursor()
765
+ if search_type == 'Title':
766
+ cursor.execute("SELECT id, title, url FROM Media WHERE title LIKE ?", (f'%{search_query}%',))
767
+ elif search_type == 'URL':
768
+ cursor.execute("SELECT id, title, url FROM Media WHERE url LIKE ?", (f'%{search_query}%',))
769
+ elif search_type == 'Keyword':
770
+ return fetch_items_by_keyword(search_query)
771
+ elif search_type == 'Content':
772
+ cursor.execute("SELECT id, title, url FROM Media WHERE content LIKE ?", (f'%{search_query}%',))
773
+ else:
774
+ raise ValueError(f"Invalid search type: {search_type}")
775
+
776
+ results = cursor.fetchall()
777
+ return results
778
+ except sqlite3.Error as e:
779
+ logger.error(f"Error fetching items by {search_type}: {e}")
780
+ raise DatabaseError(f"Error fetching items by {search_type}: {e}")
781
+
782
+
783
+ # Function to fetch item details
784
+ def fetch_item_details(media_id: int):
785
+ try:
786
+ with db.get_connection() as conn:
787
+ cursor = conn.cursor()
788
+ cursor.execute("""
789
+ SELECT prompt, summary
790
+ FROM MediaModifications
791
+ WHERE media_id = ?
792
+ ORDER BY modification_date DESC
793
+ LIMIT 1
794
+ """, (media_id,))
795
+ prompt_summary_result = cursor.fetchone()
796
+ cursor.execute("SELECT content FROM Media WHERE id = ?", (media_id,))
797
+ content_result = cursor.fetchone()
798
+
799
+ prompt = prompt_summary_result[0] if prompt_summary_result else ""
800
+ summary = prompt_summary_result[1] if prompt_summary_result else ""
801
+ content = content_result[0] if content_result else ""
802
+
803
+ return content, prompt, summary
804
+ except sqlite3.Error as e:
805
+ logging.error(f"Error fetching item details: {e}")
806
+ # Return empty strings if there's an error
807
+ return "", "", ""
808
+
809
+ #
810
+ #
811
+ #######################################################################################################################
812
+ #
813
+ # Media-related Functions
814
+
815
+
816
+
817
+ # Function to add a version of a prompt and summary
818
+ def add_media_version(media_id: int, prompt: str, summary: str) -> None:
819
+ try:
820
+ with db.get_connection() as conn:
821
+ cursor = conn.cursor()
822
+
823
+ # Get the current version number
824
+ cursor.execute('SELECT MAX(version) FROM MediaVersion WHERE media_id = ?', (media_id,))
825
+ current_version = cursor.fetchone()[0] or 0
826
+
827
+ # Insert the new version
828
+ cursor.execute('''
829
+ INSERT INTO MediaVersion (media_id, version, prompt, summary, created_at)
830
+ VALUES (?, ?, ?, ?, ?)
831
+ ''', (media_id, current_version + 1, prompt, summary, datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
832
+ conn.commit()
833
+ except sqlite3.Error as e:
834
+ raise DatabaseError(f"Error adding media version: {e}")
835
+
836
+
837
+ # Function to search the database with advanced options, including keyword search and full-text search
838
+ def search_db(search_query: str, search_fields: List[str], keywords: str, page: int = 1, results_per_page: int = 10):
839
+ if page < 1:
840
+ raise ValueError("Page number must be 1 or greater.")
841
+
842
+ # Prepare keywords by splitting and trimming
843
+ keywords = [keyword.strip().lower() for keyword in keywords.split(',') if keyword.strip()]
844
+
845
+ with db.get_connection() as conn:
846
+ cursor = conn.cursor()
847
+ offset = (page - 1) * results_per_page
848
+
849
+ # Prepare the search conditions for general fields
850
+ search_conditions = []
851
+ params = []
852
+
853
+ for field in search_fields:
854
+ if search_query: # Ensure there's a search query before adding this condition
855
+ search_conditions.append(f"Media.{field} LIKE ?")
856
+ params.append(f'%{search_query}%')
857
+
858
+ # Prepare the conditions for keywords filtering
859
+ keyword_conditions = []
860
+ for keyword in keywords:
861
+ keyword_conditions.append(
862
+ f"EXISTS (SELECT 1 FROM MediaKeywords mk JOIN Keywords k ON mk.keyword_id = k.id WHERE mk.media_id = Media.id AND k.keyword LIKE ?)")
863
+ params.append(f'%{keyword}%')
864
+
865
+ # Combine all conditions
866
+ where_clause = " AND ".join(
867
+ search_conditions + keyword_conditions) if search_conditions or keyword_conditions else "1=1"
868
+
869
+ # Complete the query
870
+ query = f'''
871
+ SELECT DISTINCT Media.id, Media.url, Media.title, Media.type, Media.content, Media.author, Media.ingestion_date,
872
+ MediaModifications.prompt, MediaModifications.summary
873
+ FROM Media
874
+ LEFT JOIN MediaModifications ON Media.id = MediaModifications.media_id
875
+ WHERE {where_clause}
876
+ ORDER BY Media.ingestion_date DESC
877
+ LIMIT ? OFFSET ?
878
+ '''
879
+ params.extend([results_per_page, offset])
880
+
881
+ cursor.execute(query, params)
882
+ results = cursor.fetchall()
883
+
884
+ return results
885
+
886
+
887
+ # Gradio function to handle user input and display results with pagination, with better feedback
888
+ def search_and_display(search_query, search_fields, keywords, page):
889
+ results = search_db(search_query, search_fields, keywords, page)
890
+
891
+ if isinstance(results, pd.DataFrame):
892
+ # Convert DataFrame to a list of tuples or lists
893
+ processed_results = results.values.tolist() # This converts DataFrame rows to lists
894
+ elif isinstance(results, list):
895
+ # Ensure that each element in the list is itself a list or tuple (not a dictionary)
896
+ processed_results = [list(item.values()) if isinstance(item, dict) else item for item in results]
897
+ else:
898
+ raise TypeError("Unsupported data type for results")
899
+
900
+ return processed_results
901
+
902
+
903
+ def display_details(index, results):
904
+ if index is None or results is None:
905
+ return "Please select a result to view details."
906
+
907
+ try:
908
+ # Ensure the index is an integer and access the row properly
909
+ index = int(index)
910
+ if isinstance(results, pd.DataFrame):
911
+ if index >= len(results):
912
+ return "Index out of range. Please select a valid index."
913
+ selected_row = results.iloc[index]
914
+ else:
915
+ # If results is not a DataFrame, but a list (assuming list of dicts)
916
+ selected_row = results[index]
917
+ except ValueError:
918
+ return "Index must be an integer."
919
+ except IndexError:
920
+ return "Index out of range. Please select a valid index."
921
+
922
+ # Build HTML output safely
923
+ details_html = f"""
924
+ <h3>{selected_row.get('Title', 'No Title')}</h3>
925
+ <p><strong>URL:</strong> {selected_row.get('URL', 'No URL')}</p>
926
+ <p><strong>Type:</strong> {selected_row.get('Type', 'No Type')}</p>
927
+ <p><strong>Author:</strong> {selected_row.get('Author', 'No Author')}</p>
928
+ <p><strong>Ingestion Date:</strong> {selected_row.get('Ingestion Date', 'No Date')}</p>
929
+ <p><strong>Prompt:</strong> {selected_row.get('Prompt', 'No Prompt')}</p>
930
+ <p><strong>Summary:</strong> {selected_row.get('Summary', 'No Summary')}</p>
931
+ <p><strong>Content:</strong> {selected_row.get('Content', 'No Content')}</p>
932
+ """
933
+ return details_html
934
+
935
+
936
+ def get_details(index, dataframe):
937
+ if index is None or dataframe is None or index >= len(dataframe):
938
+ return "Please select a result to view details."
939
+ row = dataframe.iloc[index]
940
+ details = f"""
941
+ <h3>{row['Title']}</h3>
942
+ <p><strong>URL:</strong> {row['URL']}</p>
943
+ <p><strong>Type:</strong> {row['Type']}</p>
944
+ <p><strong>Author:</strong> {row['Author']}</p>
945
+ <p><strong>Ingestion Date:</strong> {row['Ingestion Date']}</p>
946
+ <p><strong>Prompt:</strong> {row['Prompt']}</p>
947
+ <p><strong>Summary:</strong> {row['Summary']}</p>
948
+ <p><strong>Content:</strong></p>
949
+ <pre>{row['Content']}</pre>
950
+ """
951
+ return details
952
+
953
+
954
+ def format_results(results):
955
+ if not results:
956
+ return pd.DataFrame(columns=['URL', 'Title', 'Type', 'Content', 'Author', 'Ingestion Date', 'Prompt', 'Summary'])
957
+
958
+ df = pd.DataFrame(results, columns=['URL', 'Title', 'Type', 'Content', 'Author', 'Ingestion Date', 'Prompt', 'Summary'])
959
+ logging.debug(f"Formatted DataFrame: {df}")
960
+
961
+ return df
962
+
963
+
964
+ # Function to export search results to CSV or markdown with pagination
965
+ def export_to_file(search_query: str, search_fields: List[str], keyword: str, page: int = 1, results_per_file: int = 1000, export_format: str = 'csv'):
966
+ try:
967
+ results = search_db(search_query, search_fields, keyword, page, results_per_file)
968
+ if not results:
969
+ return "No results found to export."
970
+
971
+ # Create an 'exports' directory if it doesn't exist
972
+ if not os.path.exists('exports'):
973
+ os.makedirs('exports')
974
+
975
+ if export_format == 'csv':
976
+ filename = f'exports/search_results_page_{page}.csv'
977
+ with open(filename, 'w', newline='', encoding='utf-8') as file:
978
+ writer = csv.writer(file)
979
+ writer.writerow(['URL', 'Title', 'Type', 'Content', 'Author', 'Ingestion Date', 'Prompt', 'Summary'])
980
+ for row in results:
981
+ writer.writerow(row)
982
+ elif export_format == 'markdown':
983
+ filename = f'exports/search_results_page_{page}.md'
984
+ with open(filename, 'w', encoding='utf-8') as file:
985
+ for item in results:
986
+ markdown_content = convert_to_markdown({
987
+ 'title': item[1],
988
+ 'url': item[0],
989
+ 'type': item[2],
990
+ 'content': item[3],
991
+ 'author': item[4],
992
+ 'ingestion_date': item[5],
993
+ 'summary': item[7],
994
+ 'keywords': item[8].split(',') if item[8] else []
995
+ })
996
+ file.write(markdown_content)
997
+ file.write("\n---\n\n") # Separator between items
998
+ else:
999
+ return f"Unsupported export format: {export_format}"
1000
+
1001
+ return f"Results exported to {filename}"
1002
+ except (DatabaseError, InputError) as e:
1003
+ return str(e)
1004
+
1005
+
1006
+ # Helper function to validate date format
1007
+ def is_valid_date(date_string: str) -> bool:
1008
+ try:
1009
+ datetime.strptime(date_string, '%Y-%m-%d')
1010
+ return True
1011
+ except ValueError:
1012
+ return False
1013
+
1014
+
1015
+ # Add ingested media to DB
1016
+ def add_media_to_database(url, info_dict, segments, summary, keywords, custom_prompt_input, whisper_model, media_type='video'):
1017
+ try:
1018
+ # Extract content from segments
1019
+ if isinstance(segments, list):
1020
+ content = ' '.join([segment.get('Text', '') for segment in segments if 'Text' in segment])
1021
+ elif isinstance(segments, dict):
1022
+ content = segments.get('text', '') or segments.get('content', '')
1023
+ else:
1024
+ content = str(segments)
1025
+
1026
+ logging.debug(f"Extracted content (first 500 chars): {content[:500]}")
1027
+
1028
+ # Set default custom prompt if not provided
1029
+ if custom_prompt_input is None:
1030
+ custom_prompt_input = """No Custom Prompt Provided or Was Used."""
1031
+
1032
+ logging.info(f"Adding media to database: URL={url}, Title={info_dict.get('title', 'Untitled')}, Type={media_type}")
1033
+
1034
+ # Process keywords
1035
+ if isinstance(keywords, str):
1036
+ keyword_list = [keyword.strip().lower() for keyword in keywords.split(',')]
1037
+ elif isinstance(keywords, (list, tuple)):
1038
+ keyword_list = [keyword.strip().lower() for keyword in keywords]
1039
+ else:
1040
+ keyword_list = ['default']
1041
+
1042
+ with db.get_connection() as conn:
1043
+ cursor = conn.cursor()
1044
+
1045
+ # Check if media already exists
1046
+ cursor.execute('SELECT id FROM Media WHERE url = ?', (url,))
1047
+ existing_media = cursor.fetchone()
1048
+
1049
+ if existing_media:
1050
+ media_id = existing_media[0]
1051
+ logging.info(f"Updating existing media with ID: {media_id}")
1052
+
1053
+ cursor.execute('''
1054
+ UPDATE Media
1055
+ SET content = ?, transcription_model = ?, title = ?, type = ?, author = ?, ingestion_date = ?
1056
+ WHERE id = ?
1057
+ ''', (content, whisper_model, info_dict.get('title', 'Untitled'), media_type,
1058
+ info_dict.get('uploader', 'Unknown'), datetime.now().strftime('%Y-%m-%d'), media_id))
1059
+ else:
1060
+ logging.info("Creating new media entry")
1061
+
1062
+ cursor.execute('''
1063
+ INSERT INTO Media (url, title, type, content, author, ingestion_date, transcription_model)
1064
+ VALUES (?, ?, ?, ?, ?, ?, ?)
1065
+ ''', (url, info_dict.get('title', 'Untitled'), media_type, content,
1066
+ info_dict.get('uploader', 'Unknown'), datetime.now().strftime('%Y-%m-%d'), whisper_model))
1067
+ media_id = cursor.lastrowid
1068
+
1069
+ logging.info(f"Adding new modification to MediaModifications for media ID: {media_id}")
1070
+ cursor.execute('''
1071
+ INSERT INTO MediaModifications (media_id, prompt, summary, modification_date)
1072
+ VALUES (?, ?, ?, ?)
1073
+ ''', (media_id, custom_prompt_input, summary, datetime.now().strftime('%Y-%m-%d')))
1074
+
1075
+ # Insert keywords and associate with media item
1076
+ logging.info("Processing keywords")
1077
+ for keyword in keyword_list:
1078
+ cursor.execute('INSERT OR IGNORE INTO Keywords (keyword) VALUES (?)', (keyword,))
1079
+ cursor.execute('SELECT id FROM Keywords WHERE keyword = ?', (keyword,))
1080
+ keyword_id = cursor.fetchone()[0]
1081
+ cursor.execute('INSERT OR IGNORE INTO MediaKeywords (media_id, keyword_id) VALUES (?, ?)',
1082
+ (media_id, keyword_id))
1083
+
1084
+ # Update full-text search index
1085
+ logging.info("Updating full-text search index")
1086
+ cursor.execute('INSERT OR REPLACE INTO media_fts (rowid, title, content) VALUES (?, ?, ?)',
1087
+ (media_id, info_dict.get('title', 'Untitled'), content))
1088
+
1089
+ logging.info("Adding new media version")
1090
+ add_media_version(media_id, custom_prompt_input, summary)
1091
+
1092
+ # Create initial document version
1093
+ create_document_version(media_id, content)
1094
+
1095
+ conn.commit()
1096
+
1097
+ logging.info(f"Media '{info_dict.get('title', 'Untitled')}' successfully added/updated with ID: {media_id}")
1098
+
1099
+ return f"Media '{info_dict.get('title', 'Untitled')}' added/updated successfully with keywords: {', '.join(keyword_list)}"
1100
+
1101
+ except sqlite3.Error as e:
1102
+ logging.error(f"SQL Error: {e}")
1103
+ raise DatabaseError(f"Error adding media with keywords: {e}")
1104
+ except Exception as e:
1105
+ logging.error(f"Unexpected Error: {e}")
1106
+ raise DatabaseError(f"Unexpected error: {e}")
1107
+
1108
+ #
1109
+ # End of ....
1110
+ #######################################################################################################################
1111
+ #
1112
+ # Functions to manage prompts DB
1113
+
1114
+ def create_prompts_db():
1115
+ with sqlite3.connect('prompts.db') as conn:
1116
+ cursor = conn.cursor()
1117
+ cursor.executescript('''
1118
+ CREATE TABLE IF NOT EXISTS Prompts (
1119
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
1120
+ name TEXT NOT NULL UNIQUE,
1121
+ details TEXT,
1122
+ system TEXT,
1123
+ user TEXT
1124
+ );
1125
+ CREATE TABLE IF NOT EXISTS Keywords (
1126
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
1127
+ keyword TEXT NOT NULL UNIQUE COLLATE NOCASE
1128
+ );
1129
+ CREATE TABLE IF NOT EXISTS PromptKeywords (
1130
+ prompt_id INTEGER,
1131
+ keyword_id INTEGER,
1132
+ FOREIGN KEY (prompt_id) REFERENCES Prompts (id),
1133
+ FOREIGN KEY (keyword_id) REFERENCES Keywords (id),
1134
+ PRIMARY KEY (prompt_id, keyword_id)
1135
+ );
1136
+ CREATE INDEX IF NOT EXISTS idx_keywords_keyword ON Keywords(keyword);
1137
+ CREATE INDEX IF NOT EXISTS idx_promptkeywords_prompt_id ON PromptKeywords(prompt_id);
1138
+ CREATE INDEX IF NOT EXISTS idx_promptkeywords_keyword_id ON PromptKeywords(keyword_id);
1139
+ ''')
1140
+
1141
+
1142
+ def normalize_keyword(keyword):
1143
+ return re.sub(r'\s+', ' ', keyword.strip().lower())
1144
+
1145
+
1146
+ def add_prompt(name, details, system, user=None, keywords=None):
1147
+ if not name or not system:
1148
+ return "Name and system prompt are required."
1149
+
1150
+ try:
1151
+ with sqlite3.connect('prompts.db') as conn:
1152
+ cursor = conn.cursor()
1153
+ cursor.execute('''
1154
+ INSERT INTO Prompts (name, details, system, user)
1155
+ VALUES (?, ?, ?, ?)
1156
+ ''', (name, details, system, user))
1157
+ prompt_id = cursor.lastrowid
1158
+
1159
+ if keywords:
1160
+ normalized_keywords = [normalize_keyword(k) for k in keywords if k.strip()]
1161
+ for keyword in set(normalized_keywords): # Use set to remove duplicates
1162
+ cursor.execute('''
1163
+ INSERT OR IGNORE INTO Keywords (keyword) VALUES (?)
1164
+ ''', (keyword,))
1165
+ cursor.execute('SELECT id FROM Keywords WHERE keyword = ?', (keyword,))
1166
+ keyword_id = cursor.fetchone()[0]
1167
+ cursor.execute('''
1168
+ INSERT OR IGNORE INTO PromptKeywords (prompt_id, keyword_id) VALUES (?, ?)
1169
+ ''', (prompt_id, keyword_id))
1170
+ return "Prompt added successfully."
1171
+ except sqlite3.IntegrityError:
1172
+ return "Prompt with this name already exists."
1173
+ except sqlite3.Error as e:
1174
+ return f"Database error: {e}"
1175
+
1176
+
1177
+ def fetch_prompt_details(name):
1178
+ with sqlite3.connect('prompts.db') as conn:
1179
+ cursor = conn.cursor()
1180
+ cursor.execute('''
1181
+ SELECT p.name, p.details, p.system, p.user, GROUP_CONCAT(k.keyword, ', ') as keywords
1182
+ FROM Prompts p
1183
+ LEFT JOIN PromptKeywords pk ON p.id = pk.prompt_id
1184
+ LEFT JOIN Keywords k ON pk.keyword_id = k.id
1185
+ WHERE p.name = ?
1186
+ GROUP BY p.id
1187
+ ''', (name,))
1188
+ return cursor.fetchone()
1189
+
1190
+
1191
+ def list_prompts(page=1, per_page=10):
1192
+ offset = (page - 1) * per_page
1193
+ with sqlite3.connect('prompts.db') as conn:
1194
+ cursor = conn.cursor()
1195
+ cursor.execute('SELECT name FROM Prompts LIMIT ? OFFSET ?', (per_page, offset))
1196
+ prompts = [row[0] for row in cursor.fetchall()]
1197
+
1198
+ # Get total count of prompts
1199
+ cursor.execute('SELECT COUNT(*) FROM Prompts')
1200
+ total_count = cursor.fetchone()[0]
1201
+
1202
+ total_pages = (total_count + per_page - 1) // per_page
1203
+ return prompts, total_pages, page
1204
+
1205
+ # This will not scale. For a large number of prompts, use a more efficient method.
1206
+ # FIXME - see above statement.
1207
+ def load_preset_prompts():
1208
+ try:
1209
+ with sqlite3.connect('prompts.db') as conn:
1210
+ cursor = conn.cursor()
1211
+ cursor.execute('SELECT name FROM Prompts ORDER BY name ASC')
1212
+ prompts = [row[0] for row in cursor.fetchall()]
1213
+ return prompts
1214
+ except sqlite3.Error as e:
1215
+ print(f"Database error: {e}")
1216
+ return []
1217
+
1218
+
1219
+ def insert_prompt_to_db(title, description, system_prompt, user_prompt, keywords=None):
1220
+ return add_prompt(title, description, system_prompt, user_prompt, keywords)
1221
+
1222
+
1223
+ def search_prompts_by_keyword(keyword, page=1, per_page=10):
1224
+ normalized_keyword = normalize_keyword(keyword)
1225
+ offset = (page - 1) * per_page
1226
+ with sqlite3.connect('prompts.db') as conn:
1227
+ cursor = conn.cursor()
1228
+ cursor.execute('''
1229
+ SELECT DISTINCT p.name
1230
+ FROM Prompts p
1231
+ JOIN PromptKeywords pk ON p.id = pk.prompt_id
1232
+ JOIN Keywords k ON pk.keyword_id = k.id
1233
+ WHERE k.keyword LIKE ?
1234
+ LIMIT ? OFFSET ?
1235
+ ''', ('%' + normalized_keyword + '%', per_page, offset))
1236
+ prompts = [row[0] for row in cursor.fetchall()]
1237
+
1238
+ # Get total count of matching prompts
1239
+ cursor.execute('''
1240
+ SELECT COUNT(DISTINCT p.id)
1241
+ FROM Prompts p
1242
+ JOIN PromptKeywords pk ON p.id = pk.prompt_id
1243
+ JOIN Keywords k ON pk.keyword_id = k.id
1244
+ WHERE k.keyword LIKE ?
1245
+ ''', ('%' + normalized_keyword + '%',))
1246
+ total_count = cursor.fetchone()[0]
1247
+
1248
+ total_pages = (total_count + per_page - 1) // per_page
1249
+ return prompts, total_pages, page
1250
+
1251
+
1252
+ def update_prompt_keywords(prompt_name, new_keywords):
1253
+ try:
1254
+ with sqlite3.connect('prompts.db') as conn:
1255
+ cursor = conn.cursor()
1256
+
1257
+ cursor.execute('SELECT id FROM Prompts WHERE name = ?', (prompt_name,))
1258
+ prompt_id = cursor.fetchone()
1259
+ if not prompt_id:
1260
+ return "Prompt not found."
1261
+ prompt_id = prompt_id[0]
1262
+
1263
+ cursor.execute('DELETE FROM PromptKeywords WHERE prompt_id = ?', (prompt_id,))
1264
+
1265
+ normalized_keywords = [normalize_keyword(k) for k in new_keywords if k.strip()]
1266
+ for keyword in set(normalized_keywords): # Use set to remove duplicates
1267
+ cursor.execute('INSERT OR IGNORE INTO Keywords (keyword) VALUES (?)', (keyword,))
1268
+ cursor.execute('SELECT id FROM Keywords WHERE keyword = ?', (keyword,))
1269
+ keyword_id = cursor.fetchone()[0]
1270
+ cursor.execute('INSERT INTO PromptKeywords (prompt_id, keyword_id) VALUES (?, ?)',
1271
+ (prompt_id, keyword_id))
1272
+
1273
+ # Remove unused keywords
1274
+ cursor.execute('''
1275
+ DELETE FROM Keywords
1276
+ WHERE id NOT IN (SELECT DISTINCT keyword_id FROM PromptKeywords)
1277
+ ''')
1278
+ return "Keywords updated successfully."
1279
+ except sqlite3.Error as e:
1280
+ return f"Database error: {e}"
1281
+
1282
+
1283
+ def add_or_update_prompt(title, description, system_prompt, user_prompt, keywords=None):
1284
+ if not title:
1285
+ return "Error: Title is required."
1286
+
1287
+ existing_prompt = fetch_prompt_details(title)
1288
+ if existing_prompt:
1289
+ # Update existing prompt
1290
+ result = update_prompt_in_db(title, description, system_prompt, user_prompt)
1291
+ if "successfully" in result:
1292
+ # Update keywords if the prompt update was successful
1293
+ keyword_result = update_prompt_keywords(title, keywords or [])
1294
+ result += f" {keyword_result}"
1295
+ else:
1296
+ # Insert new prompt
1297
+ result = insert_prompt_to_db(title, description, system_prompt, user_prompt, keywords)
1298
+
1299
+ return result
1300
+
1301
+
1302
+ def load_prompt_details(selected_prompt):
1303
+ if selected_prompt:
1304
+ details = fetch_prompt_details(selected_prompt)
1305
+ if details:
1306
+ return details[0], details[1], details[2], details[3], details[4] # Include keywords
1307
+ return "", "", "", "", ""
1308
+
1309
+
1310
+ def update_prompt_in_db(title, description, system_prompt, user_prompt):
1311
+ try:
1312
+ with sqlite3.connect('prompts.db') as conn:
1313
+ cursor = conn.cursor()
1314
+ cursor.execute(
1315
+ "UPDATE Prompts SET details = ?, system = ?, user = ? WHERE name = ?",
1316
+ (description, system_prompt, user_prompt, title)
1317
+ )
1318
+ if cursor.rowcount == 0:
1319
+ return "No prompt found with the given title."
1320
+ return "Prompt updated successfully!"
1321
+ except sqlite3.Error as e:
1322
+ return f"Error updating prompt: {e}"
1323
+
1324
+
1325
+ create_prompts_db()
1326
+
1327
+ def delete_prompt(prompt_id):
1328
+ try:
1329
+ with sqlite3.connect('prompts.db') as conn:
1330
+ cursor = conn.cursor()
1331
+
1332
+ # Delete associated keywords
1333
+ cursor.execute("DELETE FROM PromptKeywords WHERE prompt_id = ?", (prompt_id,))
1334
+
1335
+ # Delete the prompt
1336
+ cursor.execute("DELETE FROM Prompts WHERE id = ?", (prompt_id,))
1337
+
1338
+ if cursor.rowcount == 0:
1339
+ return f"No prompt found with ID {prompt_id}"
1340
+ else:
1341
+ conn.commit()
1342
+ return f"Prompt with ID {prompt_id} has been successfully deleted"
1343
+ except sqlite3.Error as e:
1344
+ return f"An error occurred: {e}"
1345
+
1346
+ #
1347
+ #
1348
+ #######################################################################################################################
1349
+ #
1350
+ # Function to fetch/update media content
1351
+
1352
+ def update_media_content(selected_item, item_mapping, content_input, prompt_input, summary_input):
1353
+ try:
1354
+ if selected_item and item_mapping and selected_item in item_mapping:
1355
+ media_id = item_mapping[selected_item]
1356
+
1357
+ with db.get_connection() as conn:
1358
+ cursor = conn.cursor()
1359
+
1360
+ # Update the main content in the Media table
1361
+ cursor.execute("UPDATE Media SET content = ? WHERE id = ?", (content_input, media_id))
1362
+
1363
+ # Check if a row already exists in MediaModifications for this media_id
1364
+ cursor.execute("SELECT COUNT(*) FROM MediaModifications WHERE media_id = ?", (media_id,))
1365
+ exists = cursor.fetchone()[0] > 0
1366
+
1367
+ if exists:
1368
+ # Update existing row
1369
+ cursor.execute("""
1370
+ UPDATE MediaModifications
1371
+ SET prompt = ?, summary = ?, modification_date = CURRENT_TIMESTAMP
1372
+ WHERE media_id = ?
1373
+ """, (prompt_input, summary_input, media_id))
1374
+ else:
1375
+ # Insert new row
1376
+ cursor.execute("""
1377
+ INSERT INTO MediaModifications (media_id, prompt, summary, modification_date)
1378
+ VALUES (?, ?, ?, CURRENT_TIMESTAMP)
1379
+ """, (media_id, prompt_input, summary_input))
1380
+
1381
+ # Create new document version
1382
+ new_version = create_document_version(media_id, content_input)
1383
+
1384
+ conn.commit()
1385
+
1386
+ return f"Content updated successfully for media ID: {media_id}. New version: {new_version}"
1387
+ else:
1388
+ return "No item selected or invalid selection"
1389
+ except Exception as e:
1390
+ logging.error(f"Error updating media content: {e}")
1391
+ return f"Error updating content: {str(e)}"
1392
+
1393
+
1394
+ def search_media_database(query: str) -> List[Tuple[int, str, str]]:
1395
+ try:
1396
+ with db.get_connection() as conn:
1397
+ cursor = conn.cursor()
1398
+ cursor.execute("SELECT id, title, url FROM Media WHERE title LIKE ?", (f'%{query}%',))
1399
+ results = cursor.fetchall()
1400
+ return results
1401
+ except sqlite3.Error as e:
1402
+ raise Exception(f"Error searching media database: {e}")
1403
+
1404
+ def load_media_content(media_id: int) -> dict:
1405
+ try:
1406
+ with db.get_connection() as conn:
1407
+ cursor = conn.cursor()
1408
+ cursor.execute("SELECT content, prompt, summary FROM Media WHERE id = ?", (media_id,))
1409
+ result = cursor.fetchone()
1410
+ if result:
1411
+ return {
1412
+ "content": result[0],
1413
+ "prompt": result[1],
1414
+ "summary": result[2]
1415
+ }
1416
+ return {"content": "", "prompt": "", "summary": ""}
1417
+ except sqlite3.Error as e:
1418
+ raise Exception(f"Error loading media content: {e}")
1419
+
1420
+
1421
+ def fetch_items_by_title_or_url(search_query: str, search_type: str):
1422
+ try:
1423
+ with db.get_connection() as conn:
1424
+ cursor = conn.cursor()
1425
+ if search_type == 'Title':
1426
+ cursor.execute("SELECT id, title, url FROM Media WHERE title LIKE ?", (f'%{search_query}%',))
1427
+ elif search_type == 'URL':
1428
+ cursor.execute("SELECT id, title, url FROM Media WHERE url LIKE ?", (f'%{search_query}%',))
1429
+ results = cursor.fetchall()
1430
+ return results
1431
+ except sqlite3.Error as e:
1432
+ raise DatabaseError(f"Error fetching items by {search_type}: {e}")
1433
+
1434
+
1435
+ def fetch_items_by_keyword(search_query: str):
1436
+ try:
1437
+ with db.get_connection() as conn:
1438
+ cursor = conn.cursor()
1439
+ cursor.execute("""
1440
+ SELECT m.id, m.title, m.url
1441
+ FROM Media m
1442
+ JOIN MediaKeywords mk ON m.id = mk.media_id
1443
+ JOIN Keywords k ON mk.keyword_id = k.id
1444
+ WHERE k.keyword LIKE ?
1445
+ """, (f'%{search_query}%',))
1446
+ results = cursor.fetchall()
1447
+ return results
1448
+ except sqlite3.Error as e:
1449
+ raise DatabaseError(f"Error fetching items by keyword: {e}")
1450
+
1451
+
1452
+ def fetch_items_by_content(search_query: str):
1453
+ try:
1454
+ with db.get_connection() as conn:
1455
+ cursor = conn.cursor()
1456
+ cursor.execute("SELECT id, title, url FROM Media WHERE content LIKE ?", (f'%{search_query}%',))
1457
+ results = cursor.fetchall()
1458
+ return results
1459
+ except sqlite3.Error as e:
1460
+ raise DatabaseError(f"Error fetching items by content: {e}")
1461
+
1462
+
1463
+ def fetch_item_details_single(media_id: int):
1464
+ try:
1465
+ with db.get_connection() as conn:
1466
+ cursor = conn.cursor()
1467
+ cursor.execute("""
1468
+ SELECT prompt, summary
1469
+ FROM MediaModifications
1470
+ WHERE media_id = ?
1471
+ ORDER BY modification_date DESC
1472
+ LIMIT 1
1473
+ """, (media_id,))
1474
+ prompt_summary_result = cursor.fetchone()
1475
+ cursor.execute("SELECT content FROM Media WHERE id = ?", (media_id,))
1476
+ content_result = cursor.fetchone()
1477
+
1478
+ prompt = prompt_summary_result[0] if prompt_summary_result else ""
1479
+ summary = prompt_summary_result[1] if prompt_summary_result else ""
1480
+ content = content_result[0] if content_result else ""
1481
+
1482
+ return prompt, summary, content
1483
+ except sqlite3.Error as e:
1484
+ raise Exception(f"Error fetching item details: {e}")
1485
+
1486
+
1487
+
1488
+ def convert_to_markdown(item):
1489
+ markdown = f"# {item['title']}\n\n"
1490
+ markdown += f"**URL:** {item['url']}\n\n"
1491
+ markdown += f"**Author:** {item['author']}\n\n"
1492
+ markdown += f"**Ingestion Date:** {item['ingestion_date']}\n\n"
1493
+ markdown += f"**Type:** {item['type']}\n\n"
1494
+ markdown += f"**Keywords:** {', '.join(item['keywords'])}\n\n"
1495
+ markdown += "## Summary\n\n"
1496
+ markdown += f"{item['summary']}\n\n"
1497
+ markdown += "## Content\n\n"
1498
+ markdown += f"{item['content']}\n\n"
1499
+ return markdown
1500
+
1501
+ # Gradio function to handle user input and display results with pagination for displaying entries in the DB
1502
+ def fetch_paginated_data(page: int, results_per_page: int) -> Tuple[List[Tuple], int]:
1503
+ try:
1504
+ offset = (page - 1) * results_per_page
1505
+ with db.get_connection() as conn:
1506
+ cursor = conn.cursor()
1507
+ cursor.execute("SELECT COUNT(*) FROM Media")
1508
+ total_entries = cursor.fetchone()[0]
1509
+
1510
+ cursor.execute("SELECT id, title, url FROM Media LIMIT ? OFFSET ?", (results_per_page, offset))
1511
+ results = cursor.fetchall()
1512
+
1513
+ return results, total_entries
1514
+ except sqlite3.Error as e:
1515
+ raise Exception(f"Error fetching paginated data: {e}")
1516
+
1517
+ def format_results_as_html(results: List[Tuple]) -> str:
1518
+ html = "<table class='table table-striped'>"
1519
+ html += "<tr><th>ID</th><th>Title</th><th>URL</th></tr>"
1520
+ for row in results:
1521
+ html += f"<tr><td>{row[0]}</td><td>{row[1]}</td><td>{row[2]}</td></tr>"
1522
+ html += "</table>"
1523
+ return html
1524
+
1525
+ def view_database(page: int, results_per_page: int) -> Tuple[str, str, int]:
1526
+ results, total_entries = fetch_paginated_data(page, results_per_page)
1527
+ formatted_results = format_results_as_html(results)
1528
+ # Calculate total pages
1529
+ total_pages = (total_entries + results_per_page - 1) // results_per_page
1530
+ return formatted_results, f"Page {page} of {total_pages}", total_pages
1531
+
1532
+
1533
+ def search_and_display_items(query, search_type, page, entries_per_page,char_count):
1534
+ offset = (page - 1) * entries_per_page
1535
+ try:
1536
+ with sqlite3.connect('media_summary.db') as conn:
1537
+ cursor = conn.cursor()
1538
+
1539
+ # Adjust the SQL query based on the search type
1540
+ if search_type == "Title":
1541
+ where_clause = "WHERE m.title LIKE ?"
1542
+ elif search_type == "URL":
1543
+ where_clause = "WHERE m.url LIKE ?"
1544
+ elif search_type == "Keyword":
1545
+ where_clause = "WHERE k.keyword LIKE ?"
1546
+ elif search_type == "Content":
1547
+ where_clause = "WHERE m.content LIKE ?"
1548
+ else:
1549
+ raise ValueError("Invalid search type")
1550
+
1551
+ cursor.execute(f'''
1552
+ SELECT m.id, m.title, m.url, m.content, mm.summary, GROUP_CONCAT(k.keyword, ', ') as keywords
1553
+ FROM Media m
1554
+ LEFT JOIN MediaModifications mm ON m.id = mm.media_id
1555
+ LEFT JOIN MediaKeywords mk ON m.id = mk.media_id
1556
+ LEFT JOIN Keywords k ON mk.keyword_id = k.id
1557
+ {where_clause}
1558
+ GROUP BY m.id
1559
+ ORDER BY m.ingestion_date DESC
1560
+ LIMIT ? OFFSET ?
1561
+ ''', (f'%{query}%', entries_per_page, offset))
1562
+ items = cursor.fetchall()
1563
+
1564
+ cursor.execute(f'''
1565
+ SELECT COUNT(DISTINCT m.id)
1566
+ FROM Media m
1567
+ LEFT JOIN MediaKeywords mk ON m.id = mk.media_id
1568
+ LEFT JOIN Keywords k ON mk.keyword_id = k.id
1569
+ {where_clause}
1570
+ ''', (f'%{query}%',))
1571
+ total_items = cursor.fetchone()[0]
1572
+
1573
+ results = ""
1574
+ for item in items:
1575
+ title = html.escape(item[1]).replace('\n', '<br>')
1576
+ url = html.escape(item[2]).replace('\n', '<br>')
1577
+ # First X amount of characters of the content
1578
+ content = html.escape(item[3] or '')[:char_count] + '...'
1579
+ summary = html.escape(item[4] or '').replace('\n', '<br>')
1580
+ keywords = html.escape(item[5] or '').replace('\n', '<br>')
1581
+
1582
+ results += f"""
1583
+ <div style="border: 1px solid #ddd; padding: 10px; margin-bottom: 20px;">
1584
+ <div style="display: grid; grid-template-columns: 1fr 1fr; gap: 10px;">
1585
+ <div><strong>Title:</strong> {title}</div>
1586
+ <div><strong>URL:</strong> {url}</div>
1587
+ </div>
1588
+ <div style="margin-top: 10px;">
1589
+ <strong>Content (first {char_count} characters):</strong>
1590
+ <pre style="white-space: pre-wrap; word-wrap: break-word;">{content}</pre>
1591
+ </div>
1592
+ <div style="margin-top: 10px;">
1593
+ <strong>Summary:</strong>
1594
+ <pre style="white-space: pre-wrap; word-wrap: break-word;">{summary}</pre>
1595
+ </div>
1596
+ <div style="margin-top: 10px;">
1597
+ <strong>Keywords:</strong> {keywords}
1598
+ </div>
1599
+ </div>
1600
+ """
1601
+
1602
+ total_pages = (total_items + entries_per_page - 1) // entries_per_page
1603
+ pagination = f"Page {page} of {total_pages} (Total items: {total_items})"
1604
+
1605
+ return results, pagination, total_pages
1606
+ except sqlite3.Error as e:
1607
+ return f"<p>Error searching items: {e}</p>", "Error", 0
1608
+
1609
+
1610
+ #
1611
+ # End of Functions to manage prompts DB / Fetch and update media content
1612
+ #######################################################################################################################
1613
+ #
1614
+ # Obsidian-related Functions
1615
+
1616
+ def import_obsidian_note_to_db(note_data):
1617
+ try:
1618
+ with db.get_connection() as conn:
1619
+ cursor = conn.cursor()
1620
+
1621
+ cursor.execute("SELECT id FROM Media WHERE title = ? AND type = 'obsidian_note'", (note_data['title'],))
1622
+ existing_note = cursor.fetchone()
1623
+
1624
+ # Generate a relative path or meaningful identifier instead of using the temporary file path
1625
+ relative_path = os.path.relpath(note_data['file_path'], start=os.path.dirname(note_data['file_path']))
1626
+
1627
+ if existing_note:
1628
+ media_id = existing_note[0]
1629
+ cursor.execute("""
1630
+ UPDATE Media
1631
+ SET content = ?, author = ?, ingestion_date = CURRENT_TIMESTAMP, url = ?
1632
+ WHERE id = ?
1633
+ """, (note_data['content'], note_data['frontmatter'].get('author', 'Unknown'), relative_path, media_id))
1634
+
1635
+ cursor.execute("DELETE FROM MediaKeywords WHERE media_id = ?", (media_id,))
1636
+ else:
1637
+ cursor.execute("""
1638
+ INSERT INTO Media (title, content, type, author, ingestion_date, url)
1639
+ VALUES (?, ?, 'obsidian_note', ?, CURRENT_TIMESTAMP, ?)
1640
+ """, (note_data['title'], note_data['content'], note_data['frontmatter'].get('author', 'Unknown'),
1641
+ relative_path))
1642
+
1643
+ media_id = cursor.lastrowid
1644
+
1645
+ for tag in note_data['tags']:
1646
+ cursor.execute("INSERT OR IGNORE INTO Keywords (keyword) VALUES (?)", (tag,))
1647
+ cursor.execute("SELECT id FROM Keywords WHERE keyword = ?", (tag,))
1648
+ keyword_id = cursor.fetchone()[0]
1649
+ cursor.execute("INSERT OR IGNORE INTO MediaKeywords (media_id, keyword_id) VALUES (?, ?)",
1650
+ (media_id, keyword_id))
1651
+
1652
+ frontmatter_str = yaml.dump(note_data['frontmatter'])
1653
+ cursor.execute("""
1654
+ INSERT INTO MediaModifications (media_id, prompt, summary, modification_date)
1655
+ VALUES (?, 'Obsidian Frontmatter', ?, CURRENT_TIMESTAMP)
1656
+ """, (media_id, frontmatter_str))
1657
+
1658
+ # Update full-text search index
1659
+ cursor.execute('INSERT OR REPLACE INTO media_fts (rowid, title, content) VALUES (?, ?, ?)',
1660
+ (media_id, note_data['title'], note_data['content']))
1661
+
1662
+ action = "Updated" if existing_note else "Imported"
1663
+ logger.info(f"{action} Obsidian note: {note_data['title']}")
1664
+ return True, None
1665
+ except sqlite3.Error as e:
1666
+ error_msg = f"Database error {'updating' if existing_note else 'importing'} note {note_data['title']}: {str(e)}"
1667
+ logger.error(error_msg)
1668
+ return False, error_msg
1669
+ except Exception as e:
1670
+ error_msg = f"Unexpected error {'updating' if existing_note else 'importing'} note {note_data['title']}: {str(e)}\n{traceback.format_exc()}"
1671
+ logger.error(error_msg)
1672
+ return False, error_msg
1673
+
1674
+
1675
+ #
1676
+ # End of Obsidian-related Functions
1677
+ #######################################################################################################################
1678
+ #
1679
+ # Chat-related Functions
1680
+
1681
+
1682
+
1683
+ def create_chat_conversation(media_id, conversation_name):
1684
+ try:
1685
+ with db.get_connection() as conn:
1686
+ cursor = conn.cursor()
1687
+ cursor.execute('''
1688
+ INSERT INTO ChatConversations (media_id, conversation_name, created_at, updated_at)
1689
+ VALUES (?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
1690
+ ''', (media_id, conversation_name))
1691
+ conn.commit()
1692
+ return cursor.lastrowid
1693
+ except sqlite3.Error as e:
1694
+ logging.error(f"Error creating chat conversation: {e}")
1695
+ raise DatabaseError(f"Error creating chat conversation: {e}")
1696
+
1697
+
1698
+ def add_chat_message(conversation_id: int, sender: str, message: str) -> int:
1699
+ try:
1700
+ with db.get_connection() as conn:
1701
+ cursor = conn.cursor()
1702
+ cursor.execute('''
1703
+ INSERT INTO ChatMessages (conversation_id, sender, message)
1704
+ VALUES (?, ?, ?)
1705
+ ''', (conversation_id, sender, message))
1706
+ conn.commit()
1707
+ return cursor.lastrowid
1708
+ except sqlite3.Error as e:
1709
+ logging.error(f"Error adding chat message: {e}")
1710
+ raise DatabaseError(f"Error adding chat message: {e}")
1711
+
1712
+
1713
+ def get_chat_messages(conversation_id: int) -> List[Dict[str, Any]]:
1714
+ try:
1715
+ with db.get_connection() as conn:
1716
+ cursor = conn.cursor()
1717
+ cursor.execute('''
1718
+ SELECT id, sender, message, timestamp
1719
+ FROM ChatMessages
1720
+ WHERE conversation_id = ?
1721
+ ORDER BY timestamp ASC
1722
+ ''', (conversation_id,))
1723
+ messages = cursor.fetchall()
1724
+ return [
1725
+ {
1726
+ 'id': msg[0],
1727
+ 'sender': msg[1],
1728
+ 'message': msg[2],
1729
+ 'timestamp': msg[3]
1730
+ }
1731
+ for msg in messages
1732
+ ]
1733
+ except sqlite3.Error as e:
1734
+ logging.error(f"Error retrieving chat messages: {e}")
1735
+ raise DatabaseError(f"Error retrieving chat messages: {e}")
1736
+
1737
+
1738
+ def search_chat_conversations(search_query: str) -> List[Dict[str, Any]]:
1739
+ try:
1740
+ with db.get_connection() as conn:
1741
+ cursor = conn.cursor()
1742
+ cursor.execute('''
1743
+ SELECT cc.id, cc.media_id, cc.conversation_name, cc.created_at, m.title as media_title
1744
+ FROM ChatConversations cc
1745
+ LEFT JOIN Media m ON cc.media_id = m.id
1746
+ WHERE cc.conversation_name LIKE ? OR m.title LIKE ?
1747
+ ORDER BY cc.updated_at DESC
1748
+ ''', (f'%{search_query}%', f'%{search_query}%'))
1749
+ conversations = cursor.fetchall()
1750
+ return [
1751
+ {
1752
+ 'id': conv[0],
1753
+ 'media_id': conv[1],
1754
+ 'conversation_name': conv[2],
1755
+ 'created_at': conv[3],
1756
+ 'media_title': conv[4] or "Unknown Media"
1757
+ }
1758
+ for conv in conversations
1759
+ ]
1760
+ except sqlite3.Error as e:
1761
+ logging.error(f"Error searching chat conversations: {e}")
1762
+ return []
1763
+
1764
+
1765
+ def update_chat_message(message_id: int, new_message: str) -> None:
1766
+ try:
1767
+ with db.get_connection() as conn:
1768
+ cursor = conn.cursor()
1769
+ cursor.execute('''
1770
+ UPDATE ChatMessages
1771
+ SET message = ?, timestamp = CURRENT_TIMESTAMP
1772
+ WHERE id = ?
1773
+ ''', (new_message, message_id))
1774
+ conn.commit()
1775
+ except sqlite3.Error as e:
1776
+ logging.error(f"Error updating chat message: {e}")
1777
+ raise DatabaseError(f"Error updating chat message: {e}")
1778
+
1779
+
1780
+ def delete_chat_message(message_id: int) -> None:
1781
+ try:
1782
+ with db.get_connection() as conn:
1783
+ cursor = conn.cursor()
1784
+ cursor.execute('DELETE FROM ChatMessages WHERE id = ?', (message_id,))
1785
+ conn.commit()
1786
+ except sqlite3.Error as e:
1787
+ logging.error(f"Error deleting chat message: {e}")
1788
+ raise DatabaseError(f"Error deleting chat message: {e}")
1789
+
1790
+
1791
+ def save_chat_history_to_database(chatbot, conversation_id, media_id, media_name, conversation_name):
1792
+ try:
1793
+ with db.get_connection() as conn:
1794
+ cursor = conn.cursor()
1795
+
1796
+ # If conversation_id is None, create a new conversation
1797
+ if conversation_id is None:
1798
+ cursor.execute('''
1799
+ INSERT INTO ChatConversations (media_id, media_name, conversation_name, created_at, updated_at)
1800
+ VALUES (?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
1801
+ ''', (media_id, media_name, conversation_name))
1802
+ conversation_id = cursor.lastrowid
1803
+ else:
1804
+ # If conversation exists, update the media_name
1805
+ cursor.execute('''
1806
+ UPDATE ChatConversations
1807
+ SET media_name = ?, updated_at = CURRENT_TIMESTAMP
1808
+ WHERE id = ?
1809
+ ''', (media_name, conversation_id))
1810
+
1811
+ # Save each message in the chatbot history
1812
+ for i, (user_msg, ai_msg) in enumerate(chatbot):
1813
+ cursor.execute('''
1814
+ INSERT INTO ChatMessages (conversation_id, sender, message, timestamp)
1815
+ VALUES (?, ?, ?, CURRENT_TIMESTAMP)
1816
+ ''', (conversation_id, 'user', user_msg))
1817
+
1818
+ cursor.execute('''
1819
+ INSERT INTO ChatMessages (conversation_id, sender, message, timestamp)
1820
+ VALUES (?, ?, ?, CURRENT_TIMESTAMP)
1821
+ ''', (conversation_id, 'ai', ai_msg))
1822
+
1823
+ # Update the conversation's updated_at timestamp
1824
+ cursor.execute('''
1825
+ UPDATE ChatConversations
1826
+ SET updated_at = CURRENT_TIMESTAMP
1827
+ WHERE id = ?
1828
+ ''', (conversation_id,))
1829
+
1830
+ conn.commit()
1831
+
1832
+ return conversation_id
1833
+ except Exception as e:
1834
+ logging.error(f"Error saving chat history to database: {str(e)}")
1835
+ raise
1836
+
1837
+
1838
+ def get_conversation_name(conversation_id):
1839
+ if conversation_id is None:
1840
+ return None
1841
+
1842
+ try:
1843
+ with sqlite3.connect('media_summary.db') as conn: # Replace with your actual database name
1844
+ cursor = conn.cursor()
1845
+
1846
+ query = """
1847
+ SELECT conversation_name, media_name
1848
+ FROM ChatConversations
1849
+ WHERE id = ?
1850
+ """
1851
+
1852
+ cursor.execute(query, (conversation_id,))
1853
+ result = cursor.fetchone()
1854
+
1855
+ if result:
1856
+ conversation_name, media_name = result
1857
+ if conversation_name:
1858
+ return conversation_name
1859
+ elif media_name:
1860
+ return f"{media_name}-chat"
1861
+
1862
+ return None # Return None if no result found
1863
+ except sqlite3.Error as e:
1864
+ logging.error(f"Database error in get_conversation_name: {e}")
1865
+ return None
1866
+ except Exception as e:
1867
+ logging.error(f"Unexpected error in get_conversation_name: {e}")
1868
+ return None
1869
+
1870
+ #
1871
+ # End of Chat-related Functions
1872
+ #######################################################################################################################
1873
+
1874
+
1875
+ #######################################################################################################################
1876
+ #
1877
+ # Functions to Compare Transcripts
1878
+
1879
+ # Fetch Transcripts
1880
+ def get_transcripts(media_id):
1881
+ try:
1882
+ with db.get_connection() as conn:
1883
+ cursor = conn.cursor()
1884
+ cursor.execute('''
1885
+ SELECT id, whisper_model, transcription, created_at
1886
+ FROM Transcripts
1887
+ WHERE media_id = ?
1888
+ ORDER BY created_at DESC
1889
+ ''', (media_id,))
1890
+ return cursor.fetchall()
1891
+ except Exception as e:
1892
+ logging.error(f"Error in get_transcripts: {str(e)}")
1893
+ return []
1894
+
1895
+
1896
+ #
1897
+ # End of Functions to Compare Transcripts
1898
+ #######################################################################################################################
1899
+
1900
+
1901
+ #######################################################################################################################
1902
+ #
1903
+ # Functions to handle deletion of media items
1904
+
1905
+
1906
+ def mark_as_trash(media_id: int) -> None:
1907
+ with db.get_connection() as conn:
1908
+ cursor = conn.cursor()
1909
+ cursor.execute("""
1910
+ UPDATE Media
1911
+ SET is_trash = 1, trash_date = ?
1912
+ WHERE id = ?
1913
+ """, (datetime.now(), media_id))
1914
+ conn.commit()
1915
+
1916
+
1917
+ def restore_from_trash(media_id: int) -> None:
1918
+ with db.get_connection() as conn:
1919
+ cursor = conn.cursor()
1920
+ cursor.execute("""
1921
+ UPDATE Media
1922
+ SET is_trash = 0, trash_date = NULL
1923
+ WHERE id = ?
1924
+ """, (media_id,))
1925
+ conn.commit()
1926
+
1927
+
1928
+ def get_trashed_items() -> List[Dict]:
1929
+ with db.get_connection() as conn:
1930
+ cursor = conn.cursor()
1931
+ cursor.execute("""
1932
+ SELECT id, title, trash_date
1933
+ FROM Media
1934
+ WHERE is_trash = 1
1935
+ ORDER BY trash_date DESC
1936
+ """)
1937
+ return [{'id': row[0], 'title': row[1], 'trash_date': row[2]} for row in cursor.fetchall()]
1938
+
1939
+
1940
+ def permanently_delete_item(media_id: int) -> None:
1941
+ with db.get_connection() as conn:
1942
+ cursor = conn.cursor()
1943
+ cursor.execute("DELETE FROM Media WHERE id = ?", (media_id,))
1944
+ cursor.execute("DELETE FROM MediaKeywords WHERE media_id = ?", (media_id,))
1945
+ cursor.execute("DELETE FROM MediaVersion WHERE media_id = ?", (media_id,))
1946
+ cursor.execute("DELETE FROM MediaModifications WHERE media_id = ?", (media_id,))
1947
+ cursor.execute("DELETE FROM media_fts WHERE rowid = ?", (media_id,))
1948
+ conn.commit()
1949
+
1950
+
1951
+ def empty_trash(days_threshold: int) -> Tuple[int, int]:
1952
+ threshold_date = datetime.now() - timedelta(days=days_threshold)
1953
+ with db.get_connection() as conn:
1954
+ cursor = conn.cursor()
1955
+ cursor.execute("""
1956
+ SELECT id FROM Media
1957
+ WHERE is_trash = 1 AND trash_date <= ?
1958
+ """, (threshold_date,))
1959
+ old_items = cursor.fetchall()
1960
+
1961
+ for item in old_items:
1962
+ permanently_delete_item(item[0])
1963
+
1964
+ cursor.execute("""
1965
+ SELECT COUNT(*) FROM Media
1966
+ WHERE is_trash = 1 AND trash_date > ?
1967
+ """, (threshold_date,))
1968
+ remaining_items = cursor.fetchone()[0]
1969
+
1970
+ return len(old_items), remaining_items
1971
+
1972
+
1973
+ def user_delete_item(media_id: int, force: bool = False) -> str:
1974
+ with db.get_connection() as conn:
1975
+ cursor = conn.cursor()
1976
+ cursor.execute("SELECT is_trash, trash_date FROM Media WHERE id = ?", (media_id,))
1977
+ result = cursor.fetchone()
1978
+
1979
+ if not result:
1980
+ return "Item not found."
1981
+
1982
+ is_trash, trash_date = result
1983
+
1984
+ if not is_trash:
1985
+ mark_as_trash(media_id)
1986
+ return "Item moved to trash."
1987
+
1988
+ if force or (trash_date and (datetime.now() - trash_date).days >= 30):
1989
+ permanently_delete_item(media_id)
1990
+ return "Item permanently deleted."
1991
+ else:
1992
+ return "Item is already in trash. Use force=True to delete permanently before 30 days."
1993
+
1994
+ #
1995
+ # End of Functions to handle deletion of media items
1996
+ #######################################################################################################################
1997
+ #
1998
+ # Functions to manage document versions
1999
+
2000
+ def create_document_version(media_id: int, content: str) -> int:
2001
+ try:
2002
+ with db.get_connection() as conn:
2003
+ cursor = conn.cursor()
2004
+
2005
+ # Get the latest version number
2006
+ cursor.execute('''
2007
+ SELECT MAX(version_number)
2008
+ FROM DocumentVersions
2009
+ WHERE media_id = ?
2010
+ ''', (media_id,))
2011
+
2012
+ latest_version = cursor.fetchone()[0] or 0
2013
+ new_version = latest_version + 1
2014
+
2015
+ # Insert new version
2016
+ cursor.execute('''
2017
+ INSERT INTO DocumentVersions (media_id, version_number, content)
2018
+ VALUES (?, ?, ?)
2019
+ ''', (media_id, new_version, content))
2020
+
2021
+ conn.commit()
2022
+ return new_version
2023
+ except sqlite3.Error as e:
2024
+ logging.error(f"Error creating document version: {e}")
2025
+ raise DatabaseError(f"Error creating document version: {e}")
2026
+
2027
+
2028
+ def get_document_version(media_id: int, version_number: int = None) -> Dict[str, Any]:
2029
+ try:
2030
+ with db.get_connection() as conn:
2031
+ cursor = conn.cursor()
2032
+
2033
+ if version_number is None:
2034
+ # Get the latest version
2035
+ cursor.execute('''
2036
+ SELECT id, version_number, content, created_at
2037
+ FROM DocumentVersions
2038
+ WHERE media_id = ?
2039
+ ORDER BY version_number DESC
2040
+ LIMIT 1
2041
+ ''', (media_id,))
2042
+ else:
2043
+ cursor.execute('''
2044
+ SELECT id, version_number, content, created_at
2045
+ FROM DocumentVersions
2046
+ WHERE media_id = ? AND version_number = ?
2047
+ ''', (media_id, version_number))
2048
+
2049
+ result = cursor.fetchone()
2050
+
2051
+ if result:
2052
+ return {
2053
+ 'id': result[0],
2054
+ 'version_number': result[1],
2055
+ 'content': result[2],
2056
+ 'created_at': result[3]
2057
+ }
2058
+ else:
2059
+ return None
2060
+ except sqlite3.Error as e:
2061
+ logging.error(f"Error retrieving document version: {e}")
2062
+ raise DatabaseError(f"Error retrieving document version: {e}")
2063
+
2064
+ #
2065
+ # End of Functions to manage document versions
2066
+ #######################################################################################################################
App_Function_Libraries/DB/__init__.py ADDED
File without changes