oceansweep commited on
Commit
49ba24f
·
verified ·
1 Parent(s): 6d96d95

Delete SQLite_DB.py

Browse files
Files changed (1) hide show
  1. SQLite_DB.py +0 -1826
SQLite_DB.py DELETED
@@ -1,1826 +0,0 @@
1
- # SQLite_DB.py
2
- #########################################
3
- # SQLite_DB Library
4
- # This library is used to perform any/all DB operations related to SQLite.
5
- #
6
- ####
7
-
8
- ####################
9
- # Function List
10
- # FIXME - UPDATE Function Arguments
11
- # 1. get_connection(self)
12
- # 2. execute_query(self, query: str, params: Tuple = ())
13
- # 3. create_tables()
14
- # 4. add_keyword(keyword: str)
15
- # 5. delete_keyword(keyword: str)
16
- # 6. add_media_with_keywords(url, title, media_type, content, keywords, prompt, summary, transcription_model, author, ingestion_date)
17
- # 7. fetch_all_keywords()
18
- # 8. keywords_browser_interface()
19
- # 9. display_keywords()
20
- # 10. export_keywords_to_csv()
21
- # 11. browse_items(search_query, search_type)
22
- # 12. fetch_item_details(media_id: int)
23
- # 13. add_media_version(media_id: int, prompt: str, summary: str)
24
- # 14. search_db(search_query: str, search_fields: List[str], keywords: str, page: int = 1, results_per_page: int = 10)
25
- # 15. search_and_display(search_query, search_fields, keywords, page)
26
- # 16. display_details(index, results)
27
- # 17. get_details(index, dataframe)
28
- # 18. format_results(results)
29
- # 19. export_to_csv(search_query: str, search_fields: List[str], keyword: str, page: int = 1, results_per_file: int = 1000)
30
- # 20. is_valid_url(url: str) -> bool
31
- # 21. is_valid_date(date_string: str) -> bool
32
- # 22. add_media_to_database(url, info_dict, segments, summary, keywords, custom_prompt_input, whisper_model)
33
- # 23. create_prompts_db()
34
- # 24. add_prompt(name, details, system, user=None)
35
- # 25. fetch_prompt_details(name)
36
- # 26. list_prompts()
37
- # 27. insert_prompt_to_db(title, description, system_prompt, user_prompt)
38
- # 28. update_media_content(media_id: int, content: str, prompt: str, summary: str)
39
- # 29. search_media_database(query: str) -> List[Tuple[int, str, str]]
40
- # 30. load_media_content(media_id: int)
41
- # 31.
42
- # 32.
43
- #
44
- #
45
- #####################
46
- #
47
- # Import necessary libraries
48
- import csv
49
- import html
50
- import logging
51
- import os
52
- import re
53
- import shutil
54
- import sqlite3
55
- import time
56
- import traceback
57
- from contextlib import contextmanager
58
- from datetime import datetime, timedelta
59
- from typing import List, Tuple, Dict, Any
60
-
61
- # Third-Party Libraries
62
- import gradio as gr
63
- import pandas as pd
64
- import yaml
65
-
66
-
67
- # Import Local Libraries
68
- #
69
- #######################################################################################################################
70
- # Function Definitions
71
- #
72
-
73
- # Set up logging
74
- #logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
75
- #logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
76
- logger = logging.getLogger(__name__)
77
-
78
-
79
- #
80
- # Backup-related functions
81
-
82
- def create_incremental_backup(db_path, backup_dir):
83
- conn = sqlite3.connect(db_path)
84
- cursor = conn.cursor()
85
-
86
- # Get the page count of the database
87
- cursor.execute("PRAGMA page_count")
88
- page_count = cursor.fetchone()[0]
89
-
90
- # Create a new backup file
91
- timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
92
- backup_file = os.path.join(backup_dir, f"incremental_backup_{timestamp}.sqlib")
93
-
94
- # Perform the incremental backup
95
- conn.execute(f"VACUUM INTO '{backup_file}'")
96
-
97
- conn.close()
98
- print(f"Incremental backup created: {backup_file}")
99
- return backup_file
100
-
101
-
102
- def create_automated_backup(db_path, backup_dir):
103
- # Ensure backup directory exists
104
- os.makedirs(backup_dir, exist_ok=True)
105
-
106
- # Create a timestamped backup file name
107
- timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
108
- backup_file = os.path.join(backup_dir, f"backup_{timestamp}.db")
109
-
110
- # Copy the database file
111
- shutil.copy2(db_path, backup_file)
112
-
113
- print(f"Backup created: {backup_file}")
114
- return backup_file
115
-
116
- # FIXME - boto3 aint getting installed by default....
117
- # def upload_to_s3(file_path, bucket_name, s3_key):
118
- # import boto3
119
- # s3 = boto3.client('s3')
120
- # try:
121
- # s3.upload_file(file_path, bucket_name, s3_key)
122
- # print(f"File uploaded to S3: {s3_key}")
123
- # except Exception as e:
124
- # print(f"Error uploading to S3: {str(e)}")
125
-
126
-
127
- def rotate_backups(backup_dir, max_backups=10):
128
- backups = sorted(
129
- [f for f in os.listdir(backup_dir) if f.endswith('.db')],
130
- key=lambda x: os.path.getmtime(os.path.join(backup_dir, x)),
131
- reverse=True
132
- )
133
-
134
- while len(backups) > max_backups:
135
- old_backup = backups.pop()
136
- os.remove(os.path.join(backup_dir, old_backup))
137
- print(f"Removed old backup: {old_backup}")
138
-
139
-
140
- # FIXME - Setup properly and test/add documentation for its existence...
141
- db_path = "path/to/your/database.db"
142
- backup_dir = "path/to/backup/directory"
143
- #create_automated_backup(db_path, backup_dir)
144
-
145
- # FIXME - Setup properly and test/add documentation for its existence...
146
- #backup_file = create_automated_backup(db_path, backup_dir)
147
- #upload_to_s3(backup_file, 'your-s3-bucket-name', f"database_backups/{os.path.basename(backup_file)}")
148
-
149
- # FIXME - Setup properly and test/add documentation for its existence...
150
- #create_incremental_backup(db_path, backup_dir)
151
-
152
- # FIXME - Setup properly and test/add documentation for its existence...
153
- #rotate_backups(backup_dir)
154
-
155
- #
156
- #
157
- #######################################################################################################################
158
- #
159
- # DB-Integrity Check Functions
160
-
161
- def check_database_integrity(db_path):
162
- conn = sqlite3.connect(db_path)
163
- cursor = conn.cursor()
164
-
165
- cursor.execute("PRAGMA integrity_check")
166
- result = cursor.fetchone()
167
-
168
- conn.close()
169
-
170
- if result[0] == "ok":
171
- print("Database integrity check passed.")
172
- return True
173
- else:
174
- print("Database integrity check failed:", result[0])
175
- return False
176
-
177
- #check_database_integrity(db_path)
178
-
179
- #
180
- # End of DB-Integrity Check functions
181
- #######################################################################################################################
182
- #
183
- # Media-related Functions
184
-
185
- # Custom exceptions
186
- class DatabaseError(Exception):
187
- pass
188
-
189
-
190
- class InputError(Exception):
191
- pass
192
-
193
-
194
- # Database connection function with connection pooling
195
- class Database:
196
- def __init__(self, db_name=None):
197
- self.db_name = db_name or os.getenv('DB_NAME', 'media_summary.db')
198
- self.pool = []
199
- self.pool_size = 10
200
-
201
- @contextmanager
202
- def get_connection(self):
203
- retry_count = 5
204
- retry_delay = 1
205
- conn = None
206
- while retry_count > 0:
207
- try:
208
- conn = self.pool.pop() if self.pool else sqlite3.connect(self.db_name, check_same_thread=False)
209
- yield conn
210
- self.pool.append(conn)
211
- return
212
- except sqlite3.OperationalError as e:
213
- if 'database is locked' in str(e):
214
- logging.warning(f"Database is locked, retrying in {retry_delay} seconds...")
215
- retry_count -= 1
216
- time.sleep(retry_delay)
217
- else:
218
- raise DatabaseError(f"Database error: {e}")
219
- except Exception as e:
220
- raise DatabaseError(f"Unexpected error: {e}")
221
- finally:
222
- # Ensure the connection is returned to the pool even on failure
223
- if conn:
224
- self.pool.append(conn)
225
- raise DatabaseError("Database is locked and retries have been exhausted")
226
-
227
- def execute_query(self, query: str, params: Tuple = ()) -> None:
228
- with self.get_connection() as conn:
229
- try:
230
- cursor = conn.cursor()
231
- cursor.execute(query, params)
232
- conn.commit()
233
- except sqlite3.Error as e:
234
- raise DatabaseError(f"Database error: {e}, Query: {query}")
235
-
236
- db = Database()
237
-
238
-
239
- # Function to create tables with the new media schema
240
- def create_tables() -> None:
241
- table_queries = [
242
- # CREATE TABLE statements
243
- '''
244
- CREATE TABLE IF NOT EXISTS Media (
245
- id INTEGER PRIMARY KEY AUTOINCREMENT,
246
- url TEXT,
247
- title TEXT NOT NULL,
248
- type TEXT NOT NULL,
249
- content TEXT,
250
- author TEXT,
251
- ingestion_date TEXT,
252
- prompt TEXT,
253
- summary TEXT,
254
- transcription_model TEXT,
255
- is_trash BOOLEAN DEFAULT 0,
256
- trash_date DATETIME
257
- )
258
- ''',
259
- '''
260
- CREATE TABLE IF NOT EXISTS Keywords (
261
- id INTEGER PRIMARY KEY AUTOINCREMENT,
262
- keyword TEXT NOT NULL UNIQUE
263
- )
264
- ''',
265
- '''
266
- CREATE TABLE IF NOT EXISTS MediaKeywords (
267
- id INTEGER PRIMARY KEY AUTOINCREMENT,
268
- media_id INTEGER NOT NULL,
269
- keyword_id INTEGER NOT NULL,
270
- FOREIGN KEY (media_id) REFERENCES Media(id),
271
- FOREIGN KEY (keyword_id) REFERENCES Keywords(id)
272
- )
273
- ''',
274
- '''
275
- CREATE TABLE IF NOT EXISTS MediaVersion (
276
- id INTEGER PRIMARY KEY AUTOINCREMENT,
277
- media_id INTEGER NOT NULL,
278
- version INTEGER NOT NULL,
279
- prompt TEXT,
280
- summary TEXT,
281
- created_at TEXT NOT NULL,
282
- FOREIGN KEY (media_id) REFERENCES Media(id)
283
- )
284
- ''',
285
- '''
286
- CREATE TABLE IF NOT EXISTS MediaModifications (
287
- id INTEGER PRIMARY KEY AUTOINCREMENT,
288
- media_id INTEGER NOT NULL,
289
- prompt TEXT,
290
- summary TEXT,
291
- modification_date TEXT,
292
- FOREIGN KEY (media_id) REFERENCES Media(id)
293
- )
294
- ''',
295
- '''
296
- CREATE TABLE IF NOT EXISTS ChatConversations (
297
- id INTEGER PRIMARY KEY AUTOINCREMENT,
298
- media_id INTEGER,
299
- conversation_name TEXT,
300
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
301
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
302
- FOREIGN KEY (media_id) REFERENCES Media(id)
303
- )
304
- ''',
305
- '''
306
- CREATE TABLE IF NOT EXISTS ChatMessages (
307
- id INTEGER PRIMARY KEY AUTOINCREMENT,
308
- conversation_id INTEGER,
309
- sender TEXT,
310
- message TEXT,
311
- timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
312
- FOREIGN KEY (conversation_id) REFERENCES ChatConversations(id)
313
- )
314
- ''',
315
- '''
316
- CREATE TABLE IF NOT EXISTS Transcripts (
317
- id INTEGER PRIMARY KEY AUTOINCREMENT,
318
- media_id INTEGER,
319
- whisper_model TEXT,
320
- transcription TEXT,
321
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
322
- FOREIGN KEY (media_id) REFERENCES Media(id)
323
- )
324
- ''',
325
-
326
- # CREATE INDEX statements
327
- '''
328
- CREATE INDEX IF NOT EXISTS idx_media_title ON Media(title);
329
- ''',
330
- '''
331
- CREATE INDEX IF NOT EXISTS idx_media_type ON Media(type);
332
- ''',
333
- '''
334
- CREATE INDEX IF NOT EXISTS idx_media_author ON Media(author);
335
- ''',
336
- '''
337
- CREATE INDEX IF NOT EXISTS idx_media_ingestion_date ON Media(ingestion_date);
338
- ''',
339
- '''
340
- CREATE INDEX IF NOT EXISTS idx_keywords_keyword ON Keywords(keyword);
341
- ''',
342
- '''
343
- CREATE INDEX IF NOT EXISTS idx_mediakeywords_media_id ON MediaKeywords(media_id);
344
- ''',
345
- '''
346
- CREATE INDEX IF NOT EXISTS idx_mediakeywords_keyword_id ON MediaKeywords(keyword_id);
347
- ''',
348
- '''
349
- CREATE INDEX IF NOT EXISTS idx_media_version_media_id ON MediaVersion(media_id);
350
- ''',
351
- '''
352
- CREATE INDEX IF NOT EXISTS idx_mediamodifications_media_id ON MediaModifications(media_id);
353
- ''',
354
- '''
355
- CREATE INDEX IF NOT EXISTS idx_chatconversations_media_id ON ChatConversations(media_id);
356
- ''',
357
- '''
358
- CREATE INDEX IF NOT EXISTS idx_chatmessages_conversation_id ON ChatMessages(conversation_id);
359
- ''',
360
- '''
361
- CREATE INDEX IF NOT EXISTS idx_media_is_trash ON Media(is_trash);
362
- ''',
363
-
364
- # CREATE UNIQUE INDEX statements
365
- '''
366
- CREATE UNIQUE INDEX IF NOT EXISTS idx_unique_media_url ON Media(url);
367
- ''',
368
- '''
369
- CREATE UNIQUE INDEX IF NOT EXISTS idx_unique_media_keyword ON MediaKeywords(media_id, keyword_id);
370
- ''',
371
-
372
- # CREATE VIRTUAL TABLE statements
373
- '''
374
- CREATE VIRTUAL TABLE IF NOT EXISTS media_fts USING fts5(title, content);
375
- ''',
376
- '''
377
- CREATE VIRTUAL TABLE IF NOT EXISTS keyword_fts USING fts5(keyword);
378
- '''
379
- ]
380
-
381
- for query in table_queries:
382
- db.execute_query(query)
383
-
384
- for query in table_queries:
385
- db.execute_query(query)
386
-
387
- # Add new columns to the Media table if they don't exist
388
- alter_queries = [
389
- "ALTER TABLE Media ADD COLUMN is_trash BOOLEAN DEFAULT 0;",
390
- "ALTER TABLE Media ADD COLUMN trash_date DATETIME;"
391
- ]
392
-
393
- for query in alter_queries:
394
- try:
395
- db.execute_query(query)
396
- except Exception as e:
397
- # If the column already exists, SQLite will throw an error. We can safely ignore it.
398
- logging.debug(f"Note: {str(e)}")
399
-
400
- logging.info("All tables and indexes created successfully.")
401
-
402
- create_tables()
403
-
404
-
405
- def check_media_exists(title, url):
406
- """Check if media with the given title or URL exists in the database."""
407
- with db.get_connection() as conn:
408
- cursor = conn.cursor()
409
- cursor.execute("SELECT id FROM Media WHERE title = ? OR url = ?", (title, url))
410
- result = cursor.fetchone()
411
- return result is not None
412
-
413
-
414
- def check_media_and_whisper_model(title=None, url=None, current_whisper_model=None):
415
- """
416
- Check if media exists in the database and compare the whisper model used.
417
-
418
- :param title: Title of the media (optional)
419
- :param url: URL of the media (optional)
420
- :param current_whisper_model: The whisper model currently selected for use
421
- :return: Tuple (bool, str) - (should_download, reason)
422
- """
423
- if not title and not url:
424
- return True, "No title or URL provided"
425
-
426
- with db.get_connection() as conn:
427
- cursor = conn.cursor()
428
-
429
- # First, find the media_id
430
- query = "SELECT id FROM Media WHERE "
431
- params = []
432
-
433
- if title:
434
- query += "title = ?"
435
- params.append(title)
436
-
437
- if url:
438
- if params:
439
- query += " OR "
440
- query += "url = ?"
441
- params.append(url)
442
-
443
- cursor.execute(query, tuple(params))
444
- result = cursor.fetchone()
445
-
446
- if not result:
447
- return True, "Media not found in database"
448
-
449
- media_id = result[0]
450
-
451
- # Now, get the latest transcript for this media
452
- cursor.execute("""
453
- SELECT transcription
454
- FROM Transcripts
455
- WHERE media_id = ?
456
- ORDER BY created_at DESC
457
- LIMIT 1
458
- """, (media_id,))
459
-
460
- transcript_result = cursor.fetchone()
461
-
462
- if not transcript_result:
463
- return True, f"No transcript found for media (ID: {media_id})"
464
-
465
- transcription = transcript_result[0]
466
-
467
- # Extract the whisper model from the transcription
468
- match = re.search(r"This text was transcribed using whisper model: (.+)$", transcription, re.MULTILINE)
469
- if not match:
470
- return True, f"Whisper model information not found in transcript (Media ID: {media_id})"
471
-
472
- db_whisper_model = match.group(1).strip()
473
-
474
- if not current_whisper_model:
475
- return False, f"Media found in database (ID: {media_id})"
476
-
477
- if db_whisper_model != current_whisper_model:
478
- return True, f"Different whisper model (DB: {db_whisper_model}, Current: {current_whisper_model})"
479
-
480
- return False, f"Media found with same whisper model (ID: {media_id})"
481
-
482
-
483
- #######################################################################################################################
484
- # Keyword-related Functions
485
- #
486
-
487
- # Function to add a keyword
488
- def add_keyword(keyword: str) -> int:
489
- keyword = keyword.strip().lower()
490
- with db.get_connection() as conn:
491
- cursor = conn.cursor()
492
- try:
493
- cursor.execute('INSERT OR IGNORE INTO Keywords (keyword) VALUES (?)', (keyword,))
494
- cursor.execute('SELECT id FROM Keywords WHERE keyword = ?', (keyword,))
495
- keyword_id = cursor.fetchone()[0]
496
- cursor.execute('INSERT OR IGNORE INTO keyword_fts (rowid, keyword) VALUES (?, ?)', (keyword_id, keyword))
497
- logging.info(f"Keyword '{keyword}' added to keyword_fts with ID: {keyword_id}")
498
- conn.commit()
499
- return keyword_id
500
- except sqlite3.IntegrityError as e:
501
- logging.error(f"Integrity error adding keyword: {e}")
502
- raise DatabaseError(f"Integrity error adding keyword: {e}")
503
- except sqlite3.Error as e:
504
- logging.error(f"Error adding keyword: {e}")
505
- raise DatabaseError(f"Error adding keyword: {e}")
506
-
507
-
508
- # Function to delete a keyword
509
- def delete_keyword(keyword: str) -> str:
510
- keyword = keyword.strip().lower()
511
- with db.get_connection() as conn:
512
- cursor = conn.cursor()
513
- try:
514
- cursor.execute('SELECT id FROM Keywords WHERE keyword = ?', (keyword,))
515
- keyword_id = cursor.fetchone()
516
- if keyword_id:
517
- cursor.execute('DELETE FROM Keywords WHERE keyword = ?', (keyword,))
518
- cursor.execute('DELETE FROM keyword_fts WHERE rowid = ?', (keyword_id[0],))
519
- conn.commit()
520
- return f"Keyword '{keyword}' deleted successfully."
521
- else:
522
- return f"Keyword '{keyword}' not found."
523
- except sqlite3.Error as e:
524
- raise DatabaseError(f"Error deleting keyword: {e}")
525
-
526
-
527
-
528
- # Function to add media with keywords
529
- def add_media_with_keywords(url, title, media_type, content, keywords, prompt, summary, transcription_model, author,
530
- ingestion_date):
531
- # Set default values for missing fields
532
- url = url or 'Unknown'
533
- title = title or 'Untitled'
534
- media_type = media_type or 'Unknown'
535
- content = content or 'No content available'
536
- keywords = keywords or 'default'
537
- prompt = prompt or 'No prompt available'
538
- summary = summary or 'No summary available'
539
- transcription_model = transcription_model or 'Unknown'
540
- author = author or 'Unknown'
541
- ingestion_date = ingestion_date or datetime.now().strftime('%Y-%m-%d')
542
-
543
- # Ensure URL is valid
544
- if not is_valid_url(url):
545
- url = 'localhost'
546
-
547
- if media_type not in ['article', 'audio', 'document', 'obsidian_note', 'podcast', 'text', 'video', 'unknown']:
548
- raise InputError("Invalid media type. Allowed types: article, audio file, document, obsidian_note podcast, text, video, unknown.")
549
-
550
- if ingestion_date and not is_valid_date(ingestion_date):
551
- raise InputError("Invalid ingestion date format. Use YYYY-MM-DD.")
552
-
553
- # Handle keywords as either string or list
554
- if isinstance(keywords, str):
555
- keyword_list = [keyword.strip().lower() for keyword in keywords.split(',')]
556
- elif isinstance(keywords, list):
557
- keyword_list = [keyword.strip().lower() for keyword in keywords]
558
- else:
559
- keyword_list = ['default']
560
-
561
- logging.info(f"Adding/updating media: URL={url}, Title={title}, Type={media_type}")
562
- logging.debug(f"Content (first 500 chars): {content[:500]}...")
563
- logging.debug(f"Keywords: {keyword_list}")
564
- logging.info(f"Prompt: {prompt}")
565
- logging.info(f"Summary: {summary}")
566
- logging.info(f"Author: {author}")
567
- logging.info(f"Ingestion Date: {ingestion_date}")
568
- logging.info(f"Transcription Model: {transcription_model}")
569
-
570
- try:
571
- with db.get_connection() as conn:
572
- conn.execute("BEGIN TRANSACTION")
573
- cursor = conn.cursor()
574
-
575
- # Check if media already exists
576
- cursor.execute('SELECT id FROM Media WHERE url = ?', (url,))
577
- existing_media = cursor.fetchone()
578
-
579
- if existing_media:
580
- media_id = existing_media[0]
581
- logging.info(f"Updating existing media with ID: {media_id}")
582
-
583
- cursor.execute('''
584
- UPDATE Media
585
- SET content = ?, transcription_model = ?, title = ?, type = ?, author = ?, ingestion_date = ?
586
- WHERE id = ?
587
- ''', (content, transcription_model, title, media_type, author, ingestion_date, media_id))
588
- else:
589
- logging.info("Creating new media entry")
590
-
591
- cursor.execute('''
592
- INSERT INTO Media (url, title, type, content, author, ingestion_date, transcription_model)
593
- VALUES (?, ?, ?, ?, ?, ?, ?)
594
- ''', (url, title, media_type, content, author, ingestion_date, transcription_model))
595
- media_id = cursor.lastrowid
596
-
597
- logging.info(f"Adding new modification to MediaModifications for media ID: {media_id}")
598
- cursor.execute('''
599
- INSERT INTO MediaModifications (media_id, prompt, summary, modification_date)
600
- VALUES (?, ?, ?, ?)
601
- ''', (media_id, prompt, summary, ingestion_date))
602
- logger.info("New modification added to MediaModifications")
603
-
604
- # Insert keywords and associate with media item
605
- logging.info("Processing keywords")
606
- for keyword in keyword_list:
607
- keyword = keyword.strip().lower()
608
- cursor.execute('INSERT OR IGNORE INTO Keywords (keyword) VALUES (?)', (keyword,))
609
- cursor.execute('SELECT id FROM Keywords WHERE keyword = ?', (keyword,))
610
- keyword_id = cursor.fetchone()[0]
611
- cursor.execute('INSERT OR IGNORE INTO MediaKeywords (media_id, keyword_id) VALUES (?, ?)',
612
- (media_id, keyword_id))
613
-
614
- # Update full-text search index
615
- logging.info("Updating full-text search index")
616
- cursor.execute('INSERT OR REPLACE INTO media_fts (rowid, title, content) VALUES (?, ?, ?)',
617
- (media_id, title, content))
618
-
619
- logging.info("Adding new media version")
620
- add_media_version(media_id, prompt, summary)
621
-
622
- conn.commit()
623
- logging.info(f"Media '{title}' successfully added/updated with ID: {media_id}")
624
-
625
- return f"Media '{title}' added/updated successfully with keywords: {', '.join(keyword_list)}"
626
-
627
- except sqlite3.Error as e:
628
- conn.rollback()
629
- logging.error(f"SQL Error: {e}")
630
- raise DatabaseError(f"Error adding media with keywords: {e}")
631
- except Exception as e:
632
- conn.rollback()
633
- logging.error(f"Unexpected Error: {e}")
634
- raise DatabaseError(f"Unexpected error: {e}")
635
-
636
-
637
- def fetch_all_keywords() -> List[str]:
638
- try:
639
- with db.get_connection() as conn:
640
- cursor = conn.cursor()
641
- cursor.execute('SELECT keyword FROM Keywords')
642
- keywords = [row[0] for row in cursor.fetchall()]
643
- return keywords
644
- except sqlite3.Error as e:
645
- raise DatabaseError(f"Error fetching keywords: {e}")
646
-
647
- def keywords_browser_interface():
648
- keywords = fetch_all_keywords()
649
- return gr.Markdown("\n".join(f"- {keyword}" for keyword in keywords))
650
-
651
- def display_keywords():
652
- try:
653
- keywords = fetch_all_keywords()
654
- return "\n".join(keywords) if keywords else "No keywords found."
655
- except DatabaseError as e:
656
- return str(e)
657
-
658
-
659
- def export_keywords_to_csv():
660
- try:
661
- keywords = fetch_all_keywords()
662
- if not keywords:
663
- return None, "No keywords found in the database."
664
-
665
- filename = "keywords.csv"
666
- with open(filename, 'w', newline='', encoding='utf-8') as file:
667
- writer = csv.writer(file)
668
- writer.writerow(["Keyword"])
669
- for keyword in keywords:
670
- writer.writerow([keyword])
671
-
672
- return filename, f"Keywords exported to {filename}"
673
- except Exception as e:
674
- logger.error(f"Error exporting keywords to CSV: {e}")
675
- return None, f"Error exporting keywords: {e}"
676
-
677
-
678
- # Function to fetch items based on search query and type
679
- def browse_items(search_query, search_type):
680
- try:
681
- with db.get_connection() as conn:
682
- cursor = conn.cursor()
683
- if search_type == 'Title':
684
- cursor.execute("SELECT id, title, url FROM Media WHERE title LIKE ?", (f'%{search_query}%',))
685
- elif search_type == 'URL':
686
- cursor.execute("SELECT id, title, url FROM Media WHERE url LIKE ?", (f'%{search_query}%',))
687
- elif search_type == 'Keyword':
688
- return fetch_items_by_keyword(search_query)
689
- elif search_type == 'Content':
690
- cursor.execute("SELECT id, title, url FROM Media WHERE content LIKE ?", (f'%{search_query}%',))
691
- else:
692
- raise ValueError(f"Invalid search type: {search_type}")
693
-
694
- results = cursor.fetchall()
695
- return results
696
- except sqlite3.Error as e:
697
- logger.error(f"Error fetching items by {search_type}: {e}")
698
- raise DatabaseError(f"Error fetching items by {search_type}: {e}")
699
-
700
-
701
- # Function to fetch item details
702
- def fetch_item_details(media_id: int):
703
- try:
704
- with db.get_connection() as conn:
705
- cursor = conn.cursor()
706
- cursor.execute("""
707
- SELECT prompt, summary
708
- FROM MediaModifications
709
- WHERE media_id = ?
710
- ORDER BY modification_date DESC
711
- LIMIT 1
712
- """, (media_id,))
713
- prompt_summary_result = cursor.fetchone()
714
- cursor.execute("SELECT content FROM Media WHERE id = ?", (media_id,))
715
- content_result = cursor.fetchone()
716
-
717
- prompt = prompt_summary_result[0] if prompt_summary_result else ""
718
- summary = prompt_summary_result[1] if prompt_summary_result else ""
719
- content = content_result[0] if content_result else ""
720
-
721
- return content, prompt, summary
722
- except sqlite3.Error as e:
723
- logging.error(f"Error fetching item details: {e}")
724
- # Return empty strings if there's an error
725
- return "", "", ""
726
-
727
- #
728
- #
729
- #######################################################################################################################
730
- #
731
- # Media-related Functions
732
-
733
-
734
-
735
- # Function to add a version of a prompt and summary
736
- def add_media_version(media_id: int, prompt: str, summary: str) -> None:
737
- try:
738
- with db.get_connection() as conn:
739
- cursor = conn.cursor()
740
-
741
- # Get the current version number
742
- cursor.execute('SELECT MAX(version) FROM MediaVersion WHERE media_id = ?', (media_id,))
743
- current_version = cursor.fetchone()[0] or 0
744
-
745
- # Insert the new version
746
- cursor.execute('''
747
- INSERT INTO MediaVersion (media_id, version, prompt, summary, created_at)
748
- VALUES (?, ?, ?, ?, ?)
749
- ''', (media_id, current_version + 1, prompt, summary, datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
750
- conn.commit()
751
- except sqlite3.Error as e:
752
- raise DatabaseError(f"Error adding media version: {e}")
753
-
754
-
755
- # Function to search the database with advanced options, including keyword search and full-text search
756
- def search_db(search_query: str, search_fields: List[str], keywords: str, page: int = 1, results_per_page: int = 10):
757
- if page < 1:
758
- raise ValueError("Page number must be 1 or greater.")
759
-
760
- # Prepare keywords by splitting and trimming
761
- keywords = [keyword.strip().lower() for keyword in keywords.split(',') if keyword.strip()]
762
-
763
- with db.get_connection() as conn:
764
- cursor = conn.cursor()
765
- offset = (page - 1) * results_per_page
766
-
767
- # Prepare the search conditions for general fields
768
- search_conditions = []
769
- params = []
770
-
771
- for field in search_fields:
772
- if search_query: # Ensure there's a search query before adding this condition
773
- search_conditions.append(f"Media.{field} LIKE ?")
774
- params.append(f'%{search_query}%')
775
-
776
- # Prepare the conditions for keywords filtering
777
- keyword_conditions = []
778
- for keyword in keywords:
779
- keyword_conditions.append(
780
- f"EXISTS (SELECT 1 FROM MediaKeywords mk JOIN Keywords k ON mk.keyword_id = k.id WHERE mk.media_id = Media.id AND k.keyword LIKE ?)")
781
- params.append(f'%{keyword}%')
782
-
783
- # Combine all conditions
784
- where_clause = " AND ".join(
785
- search_conditions + keyword_conditions) if search_conditions or keyword_conditions else "1=1"
786
-
787
- # Complete the query
788
- query = f'''
789
- SELECT DISTINCT Media.id, Media.url, Media.title, Media.type, Media.content, Media.author, Media.ingestion_date,
790
- MediaModifications.prompt, MediaModifications.summary
791
- FROM Media
792
- LEFT JOIN MediaModifications ON Media.id = MediaModifications.media_id
793
- WHERE {where_clause}
794
- ORDER BY Media.ingestion_date DESC
795
- LIMIT ? OFFSET ?
796
- '''
797
- params.extend([results_per_page, offset])
798
-
799
- cursor.execute(query, params)
800
- results = cursor.fetchall()
801
-
802
- return results
803
-
804
-
805
- # Gradio function to handle user input and display results with pagination, with better feedback
806
- def search_and_display(search_query, search_fields, keywords, page):
807
- results = search_db(search_query, search_fields, keywords, page)
808
-
809
- if isinstance(results, pd.DataFrame):
810
- # Convert DataFrame to a list of tuples or lists
811
- processed_results = results.values.tolist() # This converts DataFrame rows to lists
812
- elif isinstance(results, list):
813
- # Ensure that each element in the list is itself a list or tuple (not a dictionary)
814
- processed_results = [list(item.values()) if isinstance(item, dict) else item for item in results]
815
- else:
816
- raise TypeError("Unsupported data type for results")
817
-
818
- return processed_results
819
-
820
-
821
- def display_details(index, results):
822
- if index is None or results is None:
823
- return "Please select a result to view details."
824
-
825
- try:
826
- # Ensure the index is an integer and access the row properly
827
- index = int(index)
828
- if isinstance(results, pd.DataFrame):
829
- if index >= len(results):
830
- return "Index out of range. Please select a valid index."
831
- selected_row = results.iloc[index]
832
- else:
833
- # If results is not a DataFrame, but a list (assuming list of dicts)
834
- selected_row = results[index]
835
- except ValueError:
836
- return "Index must be an integer."
837
- except IndexError:
838
- return "Index out of range. Please select a valid index."
839
-
840
- # Build HTML output safely
841
- details_html = f"""
842
- <h3>{selected_row.get('Title', 'No Title')}</h3>
843
- <p><strong>URL:</strong> {selected_row.get('URL', 'No URL')}</p>
844
- <p><strong>Type:</strong> {selected_row.get('Type', 'No Type')}</p>
845
- <p><strong>Author:</strong> {selected_row.get('Author', 'No Author')}</p>
846
- <p><strong>Ingestion Date:</strong> {selected_row.get('Ingestion Date', 'No Date')}</p>
847
- <p><strong>Prompt:</strong> {selected_row.get('Prompt', 'No Prompt')}</p>
848
- <p><strong>Summary:</strong> {selected_row.get('Summary', 'No Summary')}</p>
849
- <p><strong>Content:</strong> {selected_row.get('Content', 'No Content')}</p>
850
- """
851
- return details_html
852
-
853
-
854
- def get_details(index, dataframe):
855
- if index is None or dataframe is None or index >= len(dataframe):
856
- return "Please select a result to view details."
857
- row = dataframe.iloc[index]
858
- details = f"""
859
- <h3>{row['Title']}</h3>
860
- <p><strong>URL:</strong> {row['URL']}</p>
861
- <p><strong>Type:</strong> {row['Type']}</p>
862
- <p><strong>Author:</strong> {row['Author']}</p>
863
- <p><strong>Ingestion Date:</strong> {row['Ingestion Date']}</p>
864
- <p><strong>Prompt:</strong> {row['Prompt']}</p>
865
- <p><strong>Summary:</strong> {row['Summary']}</p>
866
- <p><strong>Content:</strong></p>
867
- <pre>{row['Content']}</pre>
868
- """
869
- return details
870
-
871
-
872
- def format_results(results):
873
- if not results:
874
- return pd.DataFrame(columns=['URL', 'Title', 'Type', 'Content', 'Author', 'Ingestion Date', 'Prompt', 'Summary'])
875
-
876
- df = pd.DataFrame(results, columns=['URL', 'Title', 'Type', 'Content', 'Author', 'Ingestion Date', 'Prompt', 'Summary'])
877
- logging.debug(f"Formatted DataFrame: {df}")
878
-
879
- return df
880
-
881
-
882
- # Function to export search results to CSV or markdown with pagination
883
- def export_to_file(search_query: str, search_fields: List[str], keyword: str, page: int = 1, results_per_file: int = 1000, export_format: str = 'csv'):
884
- try:
885
- results = search_db(search_query, search_fields, keyword, page, results_per_file)
886
- if not results:
887
- return "No results found to export."
888
-
889
- # Create an 'exports' directory if it doesn't exist
890
- if not os.path.exists('exports'):
891
- os.makedirs('exports')
892
-
893
- if export_format == 'csv':
894
- filename = f'exports/search_results_page_{page}.csv'
895
- with open(filename, 'w', newline='', encoding='utf-8') as file:
896
- writer = csv.writer(file)
897
- writer.writerow(['URL', 'Title', 'Type', 'Content', 'Author', 'Ingestion Date', 'Prompt', 'Summary'])
898
- for row in results:
899
- writer.writerow(row)
900
- elif export_format == 'markdown':
901
- filename = f'exports/search_results_page_{page}.md'
902
- with open(filename, 'w', encoding='utf-8') as file:
903
- for item in results:
904
- markdown_content = convert_to_markdown({
905
- 'title': item[1],
906
- 'url': item[0],
907
- 'type': item[2],
908
- 'content': item[3],
909
- 'author': item[4],
910
- 'ingestion_date': item[5],
911
- 'summary': item[7],
912
- 'keywords': item[8].split(',') if item[8] else []
913
- })
914
- file.write(markdown_content)
915
- file.write("\n---\n\n") # Separator between items
916
- else:
917
- return f"Unsupported export format: {export_format}"
918
-
919
- return f"Results exported to {filename}"
920
- except (DatabaseError, InputError) as e:
921
- return str(e)
922
-
923
-
924
- # Helper function to validate URL format
925
- def is_valid_url(url: str) -> bool:
926
- regex = re.compile(
927
- r'^(?:http|ftp)s?://' # http:// or https://
928
- r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain...
929
- r'localhost|' # localhost...
930
- r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|' # ...or ipv4
931
- r'\[?[A-F0-9]*:[A-F0-9:]+\]?)' # ...or ipv6
932
- r'(?::\d+)?' # optional port
933
- r'(?:/?|[/?]\S+)$', re.IGNORECASE)
934
- return re.match(regex, url) is not None
935
-
936
-
937
- # Helper function to validate date format
938
- def is_valid_date(date_string: str) -> bool:
939
- try:
940
- datetime.strptime(date_string, '%Y-%m-%d')
941
- return True
942
- except ValueError:
943
- return False
944
-
945
-
946
- # Add ingested media to DB
947
- def add_media_to_database(url, info_dict, segments, summary, keywords, custom_prompt_input, whisper_model, media_type='video'):
948
- try:
949
- # Extract content from segments
950
- if isinstance(segments, list):
951
- content = ' '.join([segment.get('Text', '') for segment in segments if 'Text' in segment])
952
- elif isinstance(segments, dict):
953
- content = segments.get('text', '') or segments.get('content', '')
954
- else:
955
- content = str(segments)
956
-
957
- logging.debug(f"Extracted content (first 500 chars): {content[:500]}")
958
-
959
- # Set default custom prompt if not provided
960
- if custom_prompt_input is None:
961
- custom_prompt_input = """No Custom Prompt Provided or Was Used."""
962
-
963
- logging.info(f"Adding media to database: URL={url}, Title={info_dict.get('title', 'Untitled')}, Type={media_type}")
964
-
965
- result = add_media_with_keywords(
966
- url=url,
967
- title=info_dict.get('title', 'Untitled'),
968
- media_type=media_type,
969
- content=content,
970
- keywords=','.join(keywords) if isinstance(keywords, list) else keywords,
971
- prompt=custom_prompt_input or 'No prompt provided',
972
- summary=summary or 'No summary provided',
973
- transcription_model=whisper_model,
974
- author=info_dict.get('uploader', 'Unknown'),
975
- ingestion_date=datetime.now().strftime('%Y-%m-%d')
976
- )
977
-
978
- logging.info(f"Media added successfully: {result}")
979
- return result
980
-
981
- except Exception as e:
982
- logging.error(f"Error in add_media_to_database: {str(e)}")
983
- raise
984
-
985
-
986
- #
987
- # End of ....
988
- #######################################################################################################################
989
- #
990
- # Functions to manage prompts DB
991
-
992
- def create_prompts_db():
993
- with sqlite3.connect('prompts.db') as conn:
994
- cursor = conn.cursor()
995
- cursor.executescript('''
996
- CREATE TABLE IF NOT EXISTS Prompts (
997
- id INTEGER PRIMARY KEY AUTOINCREMENT,
998
- name TEXT NOT NULL UNIQUE,
999
- details TEXT,
1000
- system TEXT,
1001
- user TEXT
1002
- );
1003
- CREATE TABLE IF NOT EXISTS Keywords (
1004
- id INTEGER PRIMARY KEY AUTOINCREMENT,
1005
- keyword TEXT NOT NULL UNIQUE COLLATE NOCASE
1006
- );
1007
- CREATE TABLE IF NOT EXISTS PromptKeywords (
1008
- prompt_id INTEGER,
1009
- keyword_id INTEGER,
1010
- FOREIGN KEY (prompt_id) REFERENCES Prompts (id),
1011
- FOREIGN KEY (keyword_id) REFERENCES Keywords (id),
1012
- PRIMARY KEY (prompt_id, keyword_id)
1013
- );
1014
- CREATE INDEX IF NOT EXISTS idx_keywords_keyword ON Keywords(keyword);
1015
- CREATE INDEX IF NOT EXISTS idx_promptkeywords_prompt_id ON PromptKeywords(prompt_id);
1016
- CREATE INDEX IF NOT EXISTS idx_promptkeywords_keyword_id ON PromptKeywords(keyword_id);
1017
- ''')
1018
-
1019
-
1020
- def normalize_keyword(keyword):
1021
- return re.sub(r'\s+', ' ', keyword.strip().lower())
1022
-
1023
-
1024
- def add_prompt(name, details, system, user=None, keywords=None):
1025
- if not name or not system:
1026
- return "Name and system prompt are required."
1027
-
1028
- try:
1029
- with sqlite3.connect('prompts.db') as conn:
1030
- cursor = conn.cursor()
1031
- cursor.execute('''
1032
- INSERT INTO Prompts (name, details, system, user)
1033
- VALUES (?, ?, ?, ?)
1034
- ''', (name, details, system, user))
1035
- prompt_id = cursor.lastrowid
1036
-
1037
- if keywords:
1038
- normalized_keywords = [normalize_keyword(k) for k in keywords if k.strip()]
1039
- for keyword in set(normalized_keywords): # Use set to remove duplicates
1040
- cursor.execute('''
1041
- INSERT OR IGNORE INTO Keywords (keyword) VALUES (?)
1042
- ''', (keyword,))
1043
- cursor.execute('SELECT id FROM Keywords WHERE keyword = ?', (keyword,))
1044
- keyword_id = cursor.fetchone()[0]
1045
- cursor.execute('''
1046
- INSERT OR IGNORE INTO PromptKeywords (prompt_id, keyword_id) VALUES (?, ?)
1047
- ''', (prompt_id, keyword_id))
1048
- return "Prompt added successfully."
1049
- except sqlite3.IntegrityError:
1050
- return "Prompt with this name already exists."
1051
- except sqlite3.Error as e:
1052
- return f"Database error: {e}"
1053
-
1054
-
1055
- def fetch_prompt_details(name):
1056
- with sqlite3.connect('prompts.db') as conn:
1057
- cursor = conn.cursor()
1058
- cursor.execute('''
1059
- SELECT p.name, p.details, p.system, p.user, GROUP_CONCAT(k.keyword, ', ') as keywords
1060
- FROM Prompts p
1061
- LEFT JOIN PromptKeywords pk ON p.id = pk.prompt_id
1062
- LEFT JOIN Keywords k ON pk.keyword_id = k.id
1063
- WHERE p.name = ?
1064
- GROUP BY p.id
1065
- ''', (name,))
1066
- return cursor.fetchone()
1067
-
1068
-
1069
- def list_prompts(page=1, per_page=10):
1070
- offset = (page - 1) * per_page
1071
- with sqlite3.connect('prompts.db') as conn:
1072
- cursor = conn.cursor()
1073
- cursor.execute('SELECT name FROM Prompts LIMIT ? OFFSET ?', (per_page, offset))
1074
- prompts = [row[0] for row in cursor.fetchall()]
1075
-
1076
- # Get total count of prompts
1077
- cursor.execute('SELECT COUNT(*) FROM Prompts')
1078
- total_count = cursor.fetchone()[0]
1079
-
1080
- total_pages = (total_count + per_page - 1) // per_page
1081
- return prompts, total_pages, page
1082
-
1083
- # This will not scale. For a large number of prompts, use a more efficient method.
1084
- # FIXME - see above statement.
1085
- def load_preset_prompts():
1086
- try:
1087
- with sqlite3.connect('prompts.db') as conn:
1088
- cursor = conn.cursor()
1089
- cursor.execute('SELECT name FROM Prompts ORDER BY name ASC')
1090
- prompts = [row[0] for row in cursor.fetchall()]
1091
- return prompts
1092
- except sqlite3.Error as e:
1093
- print(f"Database error: {e}")
1094
- return []
1095
-
1096
-
1097
- def insert_prompt_to_db(title, description, system_prompt, user_prompt, keywords=None):
1098
- return add_prompt(title, description, system_prompt, user_prompt, keywords)
1099
-
1100
-
1101
- def search_prompts_by_keyword(keyword, page=1, per_page=10):
1102
- normalized_keyword = normalize_keyword(keyword)
1103
- offset = (page - 1) * per_page
1104
- with sqlite3.connect('prompts.db') as conn:
1105
- cursor = conn.cursor()
1106
- cursor.execute('''
1107
- SELECT DISTINCT p.name
1108
- FROM Prompts p
1109
- JOIN PromptKeywords pk ON p.id = pk.prompt_id
1110
- JOIN Keywords k ON pk.keyword_id = k.id
1111
- WHERE k.keyword LIKE ?
1112
- LIMIT ? OFFSET ?
1113
- ''', ('%' + normalized_keyword + '%', per_page, offset))
1114
- prompts = [row[0] for row in cursor.fetchall()]
1115
-
1116
- # Get total count of matching prompts
1117
- cursor.execute('''
1118
- SELECT COUNT(DISTINCT p.id)
1119
- FROM Prompts p
1120
- JOIN PromptKeywords pk ON p.id = pk.prompt_id
1121
- JOIN Keywords k ON pk.keyword_id = k.id
1122
- WHERE k.keyword LIKE ?
1123
- ''', ('%' + normalized_keyword + '%',))
1124
- total_count = cursor.fetchone()[0]
1125
-
1126
- total_pages = (total_count + per_page - 1) // per_page
1127
- return prompts, total_pages, page
1128
-
1129
-
1130
- def update_prompt_keywords(prompt_name, new_keywords):
1131
- try:
1132
- with sqlite3.connect('prompts.db') as conn:
1133
- cursor = conn.cursor()
1134
-
1135
- cursor.execute('SELECT id FROM Prompts WHERE name = ?', (prompt_name,))
1136
- prompt_id = cursor.fetchone()
1137
- if not prompt_id:
1138
- return "Prompt not found."
1139
- prompt_id = prompt_id[0]
1140
-
1141
- cursor.execute('DELETE FROM PromptKeywords WHERE prompt_id = ?', (prompt_id,))
1142
-
1143
- normalized_keywords = [normalize_keyword(k) for k in new_keywords if k.strip()]
1144
- for keyword in set(normalized_keywords): # Use set to remove duplicates
1145
- cursor.execute('INSERT OR IGNORE INTO Keywords (keyword) VALUES (?)', (keyword,))
1146
- cursor.execute('SELECT id FROM Keywords WHERE keyword = ?', (keyword,))
1147
- keyword_id = cursor.fetchone()[0]
1148
- cursor.execute('INSERT INTO PromptKeywords (prompt_id, keyword_id) VALUES (?, ?)',
1149
- (prompt_id, keyword_id))
1150
-
1151
- # Remove unused keywords
1152
- cursor.execute('''
1153
- DELETE FROM Keywords
1154
- WHERE id NOT IN (SELECT DISTINCT keyword_id FROM PromptKeywords)
1155
- ''')
1156
- return "Keywords updated successfully."
1157
- except sqlite3.Error as e:
1158
- return f"Database error: {e}"
1159
-
1160
-
1161
- def add_or_update_prompt(title, description, system_prompt, user_prompt, keywords=None):
1162
- if not title:
1163
- return "Error: Title is required."
1164
-
1165
- existing_prompt = fetch_prompt_details(title)
1166
- if existing_prompt:
1167
- # Update existing prompt
1168
- result = update_prompt_in_db(title, description, system_prompt, user_prompt)
1169
- if "successfully" in result:
1170
- # Update keywords if the prompt update was successful
1171
- keyword_result = update_prompt_keywords(title, keywords or [])
1172
- result += f" {keyword_result}"
1173
- else:
1174
- # Insert new prompt
1175
- result = insert_prompt_to_db(title, description, system_prompt, user_prompt, keywords)
1176
-
1177
- return result
1178
-
1179
-
1180
- def load_prompt_details(selected_prompt):
1181
- if selected_prompt:
1182
- details = fetch_prompt_details(selected_prompt)
1183
- if details:
1184
- return details[0], details[1], details[2], details[3], details[4] # Include keywords
1185
- return "", "", "", "", ""
1186
-
1187
-
1188
- def update_prompt_in_db(title, description, system_prompt, user_prompt):
1189
- try:
1190
- with sqlite3.connect('prompts.db') as conn:
1191
- cursor = conn.cursor()
1192
- cursor.execute(
1193
- "UPDATE Prompts SET details = ?, system = ?, user = ? WHERE name = ?",
1194
- (description, system_prompt, user_prompt, title)
1195
- )
1196
- if cursor.rowcount == 0:
1197
- return "No prompt found with the given title."
1198
- return "Prompt updated successfully!"
1199
- except sqlite3.Error as e:
1200
- return f"Error updating prompt: {e}"
1201
-
1202
-
1203
- create_prompts_db()
1204
-
1205
- def delete_prompt(prompt_id):
1206
- try:
1207
- with sqlite3.connect('prompts.db') as conn:
1208
- cursor = conn.cursor()
1209
-
1210
- # Delete associated keywords
1211
- cursor.execute("DELETE FROM PromptKeywords WHERE prompt_id = ?", (prompt_id,))
1212
-
1213
- # Delete the prompt
1214
- cursor.execute("DELETE FROM Prompts WHERE id = ?", (prompt_id,))
1215
-
1216
- if cursor.rowcount == 0:
1217
- return f"No prompt found with ID {prompt_id}"
1218
- else:
1219
- conn.commit()
1220
- return f"Prompt with ID {prompt_id} has been successfully deleted"
1221
- except sqlite3.Error as e:
1222
- return f"An error occurred: {e}"
1223
-
1224
- #
1225
- #
1226
- #######################################################################################################################
1227
- #
1228
- # Function to fetch/update media content
1229
-
1230
- def update_media_content(selected_item, item_mapping, content_input, prompt_input, summary_input):
1231
- try:
1232
- if selected_item and item_mapping and selected_item in item_mapping:
1233
- media_id = item_mapping[selected_item]
1234
-
1235
- with db.get_connection() as conn:
1236
- cursor = conn.cursor()
1237
-
1238
- # Update the main content in the Media table
1239
- cursor.execute("UPDATE Media SET content = ? WHERE id = ?", (content_input, media_id))
1240
-
1241
- # Check if a row already exists in MediaModifications for this media_id
1242
- cursor.execute("SELECT COUNT(*) FROM MediaModifications WHERE media_id = ?", (media_id,))
1243
- exists = cursor.fetchone()[0] > 0
1244
-
1245
- if exists:
1246
- # Update existing row
1247
- cursor.execute("""
1248
- UPDATE MediaModifications
1249
- SET prompt = ?, summary = ?, modification_date = CURRENT_TIMESTAMP
1250
- WHERE media_id = ?
1251
- """, (prompt_input, summary_input, media_id))
1252
- else:
1253
- # Insert new row
1254
- cursor.execute("""
1255
- INSERT INTO MediaModifications (media_id, prompt, summary, modification_date)
1256
- VALUES (?, ?, ?, CURRENT_TIMESTAMP)
1257
- """, (media_id, prompt_input, summary_input))
1258
-
1259
- conn.commit()
1260
-
1261
- return f"Content updated successfully for media ID: {media_id}"
1262
- else:
1263
- return "No item selected or invalid selection"
1264
- except Exception as e:
1265
- logging.error(f"Error updating media content: {e}")
1266
- return f"Error updating content: {str(e)}"
1267
-
1268
- def search_media_database(query: str) -> List[Tuple[int, str, str]]:
1269
- try:
1270
- with db.get_connection() as conn:
1271
- cursor = conn.cursor()
1272
- cursor.execute("SELECT id, title, url FROM Media WHERE title LIKE ?", (f'%{query}%',))
1273
- results = cursor.fetchall()
1274
- return results
1275
- except sqlite3.Error as e:
1276
- raise Exception(f"Error searching media database: {e}")
1277
-
1278
- def load_media_content(media_id: int) -> dict:
1279
- try:
1280
- with db.get_connection() as conn:
1281
- cursor = conn.cursor()
1282
- cursor.execute("SELECT content, prompt, summary FROM Media WHERE id = ?", (media_id,))
1283
- result = cursor.fetchone()
1284
- if result:
1285
- return {
1286
- "content": result[0],
1287
- "prompt": result[1],
1288
- "summary": result[2]
1289
- }
1290
- return {"content": "", "prompt": "", "summary": ""}
1291
- except sqlite3.Error as e:
1292
- raise Exception(f"Error loading media content: {e}")
1293
-
1294
-
1295
- def fetch_items_by_title_or_url(search_query: str, search_type: str):
1296
- try:
1297
- with db.get_connection() as conn:
1298
- cursor = conn.cursor()
1299
- if search_type == 'Title':
1300
- cursor.execute("SELECT id, title, url FROM Media WHERE title LIKE ?", (f'%{search_query}%',))
1301
- elif search_type == 'URL':
1302
- cursor.execute("SELECT id, title, url FROM Media WHERE url LIKE ?", (f'%{search_query}%',))
1303
- results = cursor.fetchall()
1304
- return results
1305
- except sqlite3.Error as e:
1306
- raise DatabaseError(f"Error fetching items by {search_type}: {e}")
1307
-
1308
-
1309
- def fetch_items_by_keyword(search_query: str):
1310
- try:
1311
- with db.get_connection() as conn:
1312
- cursor = conn.cursor()
1313
- cursor.execute("""
1314
- SELECT m.id, m.title, m.url
1315
- FROM Media m
1316
- JOIN MediaKeywords mk ON m.id = mk.media_id
1317
- JOIN Keywords k ON mk.keyword_id = k.id
1318
- WHERE k.keyword LIKE ?
1319
- """, (f'%{search_query}%',))
1320
- results = cursor.fetchall()
1321
- return results
1322
- except sqlite3.Error as e:
1323
- raise DatabaseError(f"Error fetching items by keyword: {e}")
1324
-
1325
-
1326
- def fetch_items_by_content(search_query: str):
1327
- try:
1328
- with db.get_connection() as conn:
1329
- cursor = conn.cursor()
1330
- cursor.execute("SELECT id, title, url FROM Media WHERE content LIKE ?", (f'%{search_query}%',))
1331
- results = cursor.fetchall()
1332
- return results
1333
- except sqlite3.Error as e:
1334
- raise DatabaseError(f"Error fetching items by content: {e}")
1335
-
1336
-
1337
- def fetch_item_details_single(media_id: int):
1338
- try:
1339
- with db.get_connection() as conn:
1340
- cursor = conn.cursor()
1341
- cursor.execute("""
1342
- SELECT prompt, summary
1343
- FROM MediaModifications
1344
- WHERE media_id = ?
1345
- ORDER BY modification_date DESC
1346
- LIMIT 1
1347
- """, (media_id,))
1348
- prompt_summary_result = cursor.fetchone()
1349
- cursor.execute("SELECT content FROM Media WHERE id = ?", (media_id,))
1350
- content_result = cursor.fetchone()
1351
-
1352
- prompt = prompt_summary_result[0] if prompt_summary_result else ""
1353
- summary = prompt_summary_result[1] if prompt_summary_result else ""
1354
- content = content_result[0] if content_result else ""
1355
-
1356
- return prompt, summary, content
1357
- except sqlite3.Error as e:
1358
- raise Exception(f"Error fetching item details: {e}")
1359
-
1360
-
1361
-
1362
- def convert_to_markdown(item):
1363
- markdown = f"# {item['title']}\n\n"
1364
- markdown += f"**URL:** {item['url']}\n\n"
1365
- markdown += f"**Author:** {item['author']}\n\n"
1366
- markdown += f"**Ingestion Date:** {item['ingestion_date']}\n\n"
1367
- markdown += f"**Type:** {item['type']}\n\n"
1368
- markdown += f"**Keywords:** {', '.join(item['keywords'])}\n\n"
1369
- markdown += "## Summary\n\n"
1370
- markdown += f"{item['summary']}\n\n"
1371
- markdown += "## Content\n\n"
1372
- markdown += f"{item['content']}\n\n"
1373
- return markdown
1374
-
1375
- # Gradio function to handle user input and display results with pagination for displaying entries in the DB
1376
- def fetch_paginated_data(page: int, results_per_page: int) -> Tuple[List[Tuple], int]:
1377
- try:
1378
- offset = (page - 1) * results_per_page
1379
- with db.get_connection() as conn:
1380
- cursor = conn.cursor()
1381
- cursor.execute("SELECT COUNT(*) FROM Media")
1382
- total_entries = cursor.fetchone()[0]
1383
-
1384
- cursor.execute("SELECT id, title, url FROM Media LIMIT ? OFFSET ?", (results_per_page, offset))
1385
- results = cursor.fetchall()
1386
-
1387
- return results, total_entries
1388
- except sqlite3.Error as e:
1389
- raise Exception(f"Error fetching paginated data: {e}")
1390
-
1391
- def format_results_as_html(results: List[Tuple]) -> str:
1392
- html = "<table class='table table-striped'>"
1393
- html += "<tr><th>ID</th><th>Title</th><th>URL</th></tr>"
1394
- for row in results:
1395
- html += f"<tr><td>{row[0]}</td><td>{row[1]}</td><td>{row[2]}</td></tr>"
1396
- html += "</table>"
1397
- return html
1398
-
1399
- def view_database(page: int, results_per_page: int) -> Tuple[str, str, int]:
1400
- results, total_entries = fetch_paginated_data(page, results_per_page)
1401
- formatted_results = format_results_as_html(results)
1402
- # Calculate total pages
1403
- total_pages = (total_entries + results_per_page - 1) // results_per_page
1404
- return formatted_results, f"Page {page} of {total_pages}", total_pages
1405
-
1406
-
1407
- def search_and_display_items(query, search_type, page, entries_per_page,char_count):
1408
- offset = (page - 1) * entries_per_page
1409
- try:
1410
- with sqlite3.connect('media_summary.db') as conn:
1411
- cursor = conn.cursor()
1412
-
1413
- # Adjust the SQL query based on the search type
1414
- if search_type == "Title":
1415
- where_clause = "WHERE m.title LIKE ?"
1416
- elif search_type == "URL":
1417
- where_clause = "WHERE m.url LIKE ?"
1418
- elif search_type == "Keyword":
1419
- where_clause = "WHERE k.keyword LIKE ?"
1420
- elif search_type == "Content":
1421
- where_clause = "WHERE m.content LIKE ?"
1422
- else:
1423
- raise ValueError("Invalid search type")
1424
-
1425
- cursor.execute(f'''
1426
- SELECT m.id, m.title, m.url, m.content, mm.summary, GROUP_CONCAT(k.keyword, ', ') as keywords
1427
- FROM Media m
1428
- LEFT JOIN MediaModifications mm ON m.id = mm.media_id
1429
- LEFT JOIN MediaKeywords mk ON m.id = mk.media_id
1430
- LEFT JOIN Keywords k ON mk.keyword_id = k.id
1431
- {where_clause}
1432
- GROUP BY m.id
1433
- ORDER BY m.ingestion_date DESC
1434
- LIMIT ? OFFSET ?
1435
- ''', (f'%{query}%', entries_per_page, offset))
1436
- items = cursor.fetchall()
1437
-
1438
- cursor.execute(f'''
1439
- SELECT COUNT(DISTINCT m.id)
1440
- FROM Media m
1441
- LEFT JOIN MediaKeywords mk ON m.id = mk.media_id
1442
- LEFT JOIN Keywords k ON mk.keyword_id = k.id
1443
- {where_clause}
1444
- ''', (f'%{query}%',))
1445
- total_items = cursor.fetchone()[0]
1446
-
1447
- results = ""
1448
- for item in items:
1449
- title = html.escape(item[1]).replace('\n', '<br>')
1450
- url = html.escape(item[2]).replace('\n', '<br>')
1451
- # First X amount of characters of the content
1452
- content = html.escape(item[3] or '')[:char_count] + '...'
1453
- summary = html.escape(item[4] or '').replace('\n', '<br>')
1454
- keywords = html.escape(item[5] or '').replace('\n', '<br>')
1455
-
1456
- results += f"""
1457
- <div style="border: 1px solid #ddd; padding: 10px; margin-bottom: 20px;">
1458
- <div style="display: grid; grid-template-columns: 1fr 1fr; gap: 10px;">
1459
- <div><strong>Title:</strong> {title}</div>
1460
- <div><strong>URL:</strong> {url}</div>
1461
- </div>
1462
- <div style="margin-top: 10px;">
1463
- <strong>Content (first {char_count} characters):</strong>
1464
- <pre style="white-space: pre-wrap; word-wrap: break-word;">{content}</pre>
1465
- </div>
1466
- <div style="margin-top: 10px;">
1467
- <strong>Summary:</strong>
1468
- <pre style="white-space: pre-wrap; word-wrap: break-word;">{summary}</pre>
1469
- </div>
1470
- <div style="margin-top: 10px;">
1471
- <strong>Keywords:</strong> {keywords}
1472
- </div>
1473
- </div>
1474
- """
1475
-
1476
- total_pages = (total_items + entries_per_page - 1) // entries_per_page
1477
- pagination = f"Page {page} of {total_pages} (Total items: {total_items})"
1478
-
1479
- return results, pagination, total_pages
1480
- except sqlite3.Error as e:
1481
- return f"<p>Error searching items: {e}</p>", "Error", 0
1482
-
1483
-
1484
- #
1485
- # End of Functions to manage prompts DB / Fetch and update media content
1486
- #######################################################################################################################
1487
- #
1488
- # Obsidian-related Functions
1489
-
1490
- def import_obsidian_note_to_db(note_data):
1491
- try:
1492
- with db.get_connection() as conn:
1493
- cursor = conn.cursor()
1494
-
1495
- cursor.execute("SELECT id FROM Media WHERE title = ? AND type = 'obsidian_note'", (note_data['title'],))
1496
- existing_note = cursor.fetchone()
1497
-
1498
- # Generate a relative path or meaningful identifier instead of using the temporary file path
1499
- relative_path = os.path.relpath(note_data['file_path'], start=os.path.dirname(note_data['file_path']))
1500
-
1501
- if existing_note:
1502
- media_id = existing_note[0]
1503
- cursor.execute("""
1504
- UPDATE Media
1505
- SET content = ?, author = ?, ingestion_date = CURRENT_TIMESTAMP, url = ?
1506
- WHERE id = ?
1507
- """, (note_data['content'], note_data['frontmatter'].get('author', 'Unknown'), relative_path, media_id))
1508
-
1509
- cursor.execute("DELETE FROM MediaKeywords WHERE media_id = ?", (media_id,))
1510
- else:
1511
- cursor.execute("""
1512
- INSERT INTO Media (title, content, type, author, ingestion_date, url)
1513
- VALUES (?, ?, 'obsidian_note', ?, CURRENT_TIMESTAMP, ?)
1514
- """, (note_data['title'], note_data['content'], note_data['frontmatter'].get('author', 'Unknown'),
1515
- relative_path))
1516
-
1517
- media_id = cursor.lastrowid
1518
-
1519
- for tag in note_data['tags']:
1520
- cursor.execute("INSERT OR IGNORE INTO Keywords (keyword) VALUES (?)", (tag,))
1521
- cursor.execute("SELECT id FROM Keywords WHERE keyword = ?", (tag,))
1522
- keyword_id = cursor.fetchone()[0]
1523
- cursor.execute("INSERT OR IGNORE INTO MediaKeywords (media_id, keyword_id) VALUES (?, ?)",
1524
- (media_id, keyword_id))
1525
-
1526
- frontmatter_str = yaml.dump(note_data['frontmatter'])
1527
- cursor.execute("""
1528
- INSERT INTO MediaModifications (media_id, prompt, summary, modification_date)
1529
- VALUES (?, 'Obsidian Frontmatter', ?, CURRENT_TIMESTAMP)
1530
- """, (media_id, frontmatter_str))
1531
-
1532
- # Update full-text search index
1533
- cursor.execute('INSERT OR REPLACE INTO media_fts (rowid, title, content) VALUES (?, ?, ?)',
1534
- (media_id, note_data['title'], note_data['content']))
1535
-
1536
- action = "Updated" if existing_note else "Imported"
1537
- logger.info(f"{action} Obsidian note: {note_data['title']}")
1538
- return True, None
1539
- except sqlite3.Error as e:
1540
- error_msg = f"Database error {'updating' if existing_note else 'importing'} note {note_data['title']}: {str(e)}"
1541
- logger.error(error_msg)
1542
- return False, error_msg
1543
- except Exception as e:
1544
- error_msg = f"Unexpected error {'updating' if existing_note else 'importing'} note {note_data['title']}: {str(e)}\n{traceback.format_exc()}"
1545
- logger.error(error_msg)
1546
- return False, error_msg
1547
-
1548
-
1549
- #
1550
- # End of Obsidian-related Functions
1551
- #######################################################################################################################
1552
- #
1553
- # Chat-related Functions
1554
-
1555
-
1556
-
1557
- def create_chat_conversation(media_id, conversation_name):
1558
- try:
1559
- with db.get_connection() as conn:
1560
- cursor = conn.cursor()
1561
- cursor.execute('''
1562
- INSERT INTO ChatConversations (media_id, conversation_name, created_at, updated_at)
1563
- VALUES (?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
1564
- ''', (media_id, conversation_name))
1565
- conn.commit()
1566
- return cursor.lastrowid
1567
- except sqlite3.Error as e:
1568
- logging.error(f"Error creating chat conversation: {e}")
1569
- raise DatabaseError(f"Error creating chat conversation: {e}")
1570
-
1571
-
1572
- def add_chat_message(conversation_id: int, sender: str, message: str) -> int:
1573
- try:
1574
- with db.get_connection() as conn:
1575
- cursor = conn.cursor()
1576
- cursor.execute('''
1577
- INSERT INTO ChatMessages (conversation_id, sender, message)
1578
- VALUES (?, ?, ?)
1579
- ''', (conversation_id, sender, message))
1580
- conn.commit()
1581
- return cursor.lastrowid
1582
- except sqlite3.Error as e:
1583
- logging.error(f"Error adding chat message: {e}")
1584
- raise DatabaseError(f"Error adding chat message: {e}")
1585
-
1586
-
1587
- def get_chat_messages(conversation_id: int) -> List[Dict[str, Any]]:
1588
- try:
1589
- with db.get_connection() as conn:
1590
- cursor = conn.cursor()
1591
- cursor.execute('''
1592
- SELECT id, sender, message, timestamp
1593
- FROM ChatMessages
1594
- WHERE conversation_id = ?
1595
- ORDER BY timestamp ASC
1596
- ''', (conversation_id,))
1597
- messages = cursor.fetchall()
1598
- return [
1599
- {
1600
- 'id': msg[0],
1601
- 'sender': msg[1],
1602
- 'message': msg[2],
1603
- 'timestamp': msg[3]
1604
- }
1605
- for msg in messages
1606
- ]
1607
- except sqlite3.Error as e:
1608
- logging.error(f"Error retrieving chat messages: {e}")
1609
- raise DatabaseError(f"Error retrieving chat messages: {e}")
1610
-
1611
-
1612
- def search_chat_conversations(search_query: str) -> List[Dict[str, Any]]:
1613
- try:
1614
- with db.get_connection() as conn:
1615
- cursor = conn.cursor()
1616
- cursor.execute('''
1617
- SELECT cc.id, cc.media_id, cc.conversation_name, cc.created_at, m.title as media_title
1618
- FROM ChatConversations cc
1619
- LEFT JOIN Media m ON cc.media_id = m.id
1620
- WHERE cc.conversation_name LIKE ? OR m.title LIKE ?
1621
- ORDER BY cc.updated_at DESC
1622
- ''', (f'%{search_query}%', f'%{search_query}%'))
1623
- conversations = cursor.fetchall()
1624
- return [
1625
- {
1626
- 'id': conv[0],
1627
- 'media_id': conv[1],
1628
- 'conversation_name': conv[2],
1629
- 'created_at': conv[3],
1630
- 'media_title': conv[4] or "Unknown Media"
1631
- }
1632
- for conv in conversations
1633
- ]
1634
- except sqlite3.Error as e:
1635
- logging.error(f"Error searching chat conversations: {e}")
1636
- return []
1637
-
1638
-
1639
- def update_chat_message(message_id: int, new_message: str) -> None:
1640
- try:
1641
- with db.get_connection() as conn:
1642
- cursor = conn.cursor()
1643
- cursor.execute('''
1644
- UPDATE ChatMessages
1645
- SET message = ?, timestamp = CURRENT_TIMESTAMP
1646
- WHERE id = ?
1647
- ''', (new_message, message_id))
1648
- conn.commit()
1649
- except sqlite3.Error as e:
1650
- logging.error(f"Error updating chat message: {e}")
1651
- raise DatabaseError(f"Error updating chat message: {e}")
1652
-
1653
-
1654
- def delete_chat_message(message_id: int) -> None:
1655
- try:
1656
- with db.get_connection() as conn:
1657
- cursor = conn.cursor()
1658
- cursor.execute('DELETE FROM ChatMessages WHERE id = ?', (message_id,))
1659
- conn.commit()
1660
- except sqlite3.Error as e:
1661
- logging.error(f"Error deleting chat message: {e}")
1662
- raise DatabaseError(f"Error deleting chat message: {e}")
1663
-
1664
-
1665
- def save_chat_history_to_database(chatbot, conversation_id, media_id, conversation_name):
1666
- try:
1667
- with db.get_connection() as conn:
1668
- cursor = conn.cursor()
1669
-
1670
- # If conversation_id is None, create a new conversation
1671
- if conversation_id is None:
1672
- cursor.execute('''
1673
- INSERT INTO ChatConversations (media_id, conversation_name, created_at, updated_at)
1674
- VALUES (?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
1675
- ''', (media_id, conversation_name))
1676
- conversation_id = cursor.lastrowid
1677
-
1678
- # Save each message in the chatbot history
1679
- for i, (user_msg, ai_msg) in enumerate(chatbot):
1680
- cursor.execute('''
1681
- INSERT INTO ChatMessages (conversation_id, sender, message, timestamp)
1682
- VALUES (?, ?, ?, CURRENT_TIMESTAMP)
1683
- ''', (conversation_id, 'user', user_msg))
1684
-
1685
- cursor.execute('''
1686
- INSERT INTO ChatMessages (conversation_id, sender, message, timestamp)
1687
- VALUES (?, ?, ?, CURRENT_TIMESTAMP)
1688
- ''', (conversation_id, 'ai', ai_msg))
1689
-
1690
- # Update the conversation's updated_at timestamp
1691
- cursor.execute('''
1692
- UPDATE ChatConversations
1693
- SET updated_at = CURRENT_TIMESTAMP
1694
- WHERE id = ?
1695
- ''', (conversation_id,))
1696
-
1697
- conn.commit()
1698
-
1699
- return conversation_id
1700
- except Exception as e:
1701
- logging.error(f"Error saving chat history to database: {str(e)}")
1702
- raise
1703
-
1704
-
1705
- #
1706
- # End of Chat-related Functions
1707
- #######################################################################################################################
1708
- #
1709
- # Functions to Compare Transcripts
1710
-
1711
- # Fetch Transcripts
1712
- def get_transcripts(media_id):
1713
- try:
1714
- with db.get_connection() as conn:
1715
- cursor = conn.cursor()
1716
- cursor.execute('''
1717
- SELECT id, whisper_model, transcription, created_at
1718
- FROM Transcripts
1719
- WHERE media_id = ?
1720
- ORDER BY created_at DESC
1721
- ''', (media_id,))
1722
- return cursor.fetchall()
1723
- except Exception as e:
1724
- logging.error(f"Error in get_transcripts: {str(e)}")
1725
- return []
1726
-
1727
-
1728
- #
1729
- # End of Functions to Compare Transcripts
1730
- #######################################################################################################################
1731
- #
1732
- # Functions to handle deletion of media items
1733
-
1734
-
1735
- def mark_as_trash(media_id: int) -> None:
1736
- with db.get_connection() as conn:
1737
- cursor = conn.cursor()
1738
- cursor.execute("""
1739
- UPDATE Media
1740
- SET is_trash = 1, trash_date = ?
1741
- WHERE id = ?
1742
- """, (datetime.now(), media_id))
1743
- conn.commit()
1744
-
1745
-
1746
- def restore_from_trash(media_id: int) -> None:
1747
- with db.get_connection() as conn:
1748
- cursor = conn.cursor()
1749
- cursor.execute("""
1750
- UPDATE Media
1751
- SET is_trash = 0, trash_date = NULL
1752
- WHERE id = ?
1753
- """, (media_id,))
1754
- conn.commit()
1755
-
1756
-
1757
- def get_trashed_items() -> List[Dict]:
1758
- with db.get_connection() as conn:
1759
- cursor = conn.cursor()
1760
- cursor.execute("""
1761
- SELECT id, title, trash_date
1762
- FROM Media
1763
- WHERE is_trash = 1
1764
- ORDER BY trash_date DESC
1765
- """)
1766
- return [{'id': row[0], 'title': row[1], 'trash_date': row[2]} for row in cursor.fetchall()]
1767
-
1768
-
1769
- def permanently_delete_item(media_id: int) -> None:
1770
- with db.get_connection() as conn:
1771
- cursor = conn.cursor()
1772
- cursor.execute("DELETE FROM Media WHERE id = ?", (media_id,))
1773
- cursor.execute("DELETE FROM MediaKeywords WHERE media_id = ?", (media_id,))
1774
- cursor.execute("DELETE FROM MediaVersion WHERE media_id = ?", (media_id,))
1775
- cursor.execute("DELETE FROM MediaModifications WHERE media_id = ?", (media_id,))
1776
- cursor.execute("DELETE FROM media_fts WHERE rowid = ?", (media_id,))
1777
- conn.commit()
1778
-
1779
-
1780
- def empty_trash(days_threshold: int) -> Tuple[int, int]:
1781
- threshold_date = datetime.now() - timedelta(days=days_threshold)
1782
- with db.get_connection() as conn:
1783
- cursor = conn.cursor()
1784
- cursor.execute("""
1785
- SELECT id FROM Media
1786
- WHERE is_trash = 1 AND trash_date <= ?
1787
- """, (threshold_date,))
1788
- old_items = cursor.fetchall()
1789
-
1790
- for item in old_items:
1791
- permanently_delete_item(item[0])
1792
-
1793
- cursor.execute("""
1794
- SELECT COUNT(*) FROM Media
1795
- WHERE is_trash = 1 AND trash_date > ?
1796
- """, (threshold_date,))
1797
- remaining_items = cursor.fetchone()[0]
1798
-
1799
- return len(old_items), remaining_items
1800
-
1801
-
1802
- def user_delete_item(media_id: int, force: bool = False) -> str:
1803
- with db.get_connection() as conn:
1804
- cursor = conn.cursor()
1805
- cursor.execute("SELECT is_trash, trash_date FROM Media WHERE id = ?", (media_id,))
1806
- result = cursor.fetchone()
1807
-
1808
- if not result:
1809
- return "Item not found."
1810
-
1811
- is_trash, trash_date = result
1812
-
1813
- if not is_trash:
1814
- mark_as_trash(media_id)
1815
- return "Item moved to trash."
1816
-
1817
- if force or (trash_date and (datetime.now() - trash_date).days >= 30):
1818
- permanently_delete_item(media_id)
1819
- return "Item permanently deleted."
1820
- else:
1821
- return "Item is already in trash. Use force=True to delete permanently before 30 days."
1822
-
1823
-
1824
- #
1825
- # End of Functions to handle deletion of media items
1826
- #######################################################################################################################