Spaces:
Runtime error
Runtime error
SunderAli17
commited on
Commit
•
3c35e3b
1
Parent(s):
c2e4255
Update app.py
Browse files
app.py
CHANGED
@@ -11,8 +11,8 @@ load_dotenv()
|
|
11 |
|
12 |
# Configure the Llama index settings
|
13 |
Settings.llm = HuggingFaceInferenceAPI(
|
14 |
-
model_name="meta-llama/Meta-Llama-3
|
15 |
-
tokenizer_name="meta-llama/Meta-Llama-3
|
16 |
context_window=3000,
|
17 |
token=os.getenv("HF_TOKEN"),
|
18 |
max_new_tokens=512,
|
@@ -33,7 +33,7 @@ os.makedirs(PERSIST_DIR, exist_ok=True)
|
|
33 |
# Variable to store current chat conversation
|
34 |
current_chat_history = []
|
35 |
|
36 |
-
|
37 |
def data_ingestion_from_directory():
|
38 |
# Use SimpleDirectoryReader on the directory containing the PDF files
|
39 |
documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
|
@@ -114,7 +114,7 @@ output_component = gr.Textbox()
|
|
114 |
def chatbot_handler(query):
|
115 |
response = handle_query(query)
|
116 |
return response
|
117 |
-
|
118 |
# Create the Gradio interface
|
119 |
interface = gr.Interface(
|
120 |
fn=chatbot_handler,
|
|
|
11 |
|
12 |
# Configure the Llama index settings
|
13 |
Settings.llm = HuggingFaceInferenceAPI(
|
14 |
+
model_name="meta-llama/Meta-Llama-3-8B-Instruct",
|
15 |
+
tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",
|
16 |
context_window=3000,
|
17 |
token=os.getenv("HF_TOKEN"),
|
18 |
max_new_tokens=512,
|
|
|
33 |
# Variable to store current chat conversation
|
34 |
current_chat_history = []
|
35 |
|
36 |
+
|
37 |
def data_ingestion_from_directory():
|
38 |
# Use SimpleDirectoryReader on the directory containing the PDF files
|
39 |
documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
|
|
|
114 |
def chatbot_handler(query):
|
115 |
response = handle_query(query)
|
116 |
return response
|
117 |
+
@spaces.GPU
|
118 |
# Create the Gradio interface
|
119 |
interface = gr.Interface(
|
120 |
fn=chatbot_handler,
|