from llama_index import Prompt from typing import List import logging def compact_text_chunks(self, prompt: Prompt, text_chunks: List[str]) -> List[str]: logging.debug("Compacting text chunks...🚀🚀🚀") combined_str = [c.strip() for c in text_chunks if c.strip()] combined_str = [f"[{index+1}] {c}" for index, c in enumerate(combined_str)] combined_str = "\n\n".join(combined_str) # resplit based on self.max_chunk_overlap text_splitter = self.get_text_splitter_given_prompt(prompt, 1, padding=1) return text_splitter.split_text(combined_str)