llamameta commited on
Commit
7cfd7e6
·
verified ·
1 Parent(s): 448325d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -560,8 +560,8 @@ async def ask_website(url: str, question: str, model: str = "llama-3-70b", proxy
560
  response = requests.get(url, headers=headers, proxies=proxies)
561
  response.raise_for_status()
562
  visible_text = extract_text_from_webpage(response.text)
563
- if len(visible_text) > 7500: # Adjust max_chars based on your needs
564
- visible_text = visible_text[:7500] + "..."
565
 
566
  # Construct a prompt for the chat model
567
  prompt = f"Based on the following text, answer this question in Paragraph: [QUESTION] {question} [TEXT] {visible_text}"
 
560
  response = requests.get(url, headers=headers, proxies=proxies)
561
  response.raise_for_status()
562
  visible_text = extract_text_from_webpage(response.text)
563
+ if len(visible_text) > 75000: # Adjust max_chars based on your needs
564
+ visible_text = visible_text[:75000] + "..."
565
 
566
  # Construct a prompt for the chat model
567
  prompt = f"Based on the following text, answer this question in Paragraph: [QUESTION] {question} [TEXT] {visible_text}"