Asaad Almutareb commited on
Commit
e0a73da
β€’
1 Parent(s): fe0d4e9

created a FastAPI backend

Browse files
Files changed (30) hide show
  1. innovation_pathfinder_ai/agents/hf_mixtral_agent.py +0 -81
  2. innovation_pathfinder_ai/backend/__init__.py +0 -0
  3. innovation_pathfinder_ai/backend/app/api/__init__.py +0 -0
  4. innovation_pathfinder_ai/backend/app/api/v1/__init__.py +0 -0
  5. innovation_pathfinder_ai/backend/app/api/v1/agents/__init__.py +0 -0
  6. innovation_pathfinder_ai/backend/app/api/v1/agents/hf_mixtral_agent.py +112 -0
  7. innovation_pathfinder_ai/{agents β†’ backend/app/api/v1/agents}/ollama_mixtral_agent.py +0 -0
  8. innovation_pathfinder_ai/{agents β†’ backend/app/api/v1/agents}/requirements.txt +0 -0
  9. innovation_pathfinder_ai/backend/app/api/v1/api.py +12 -0
  10. innovation_pathfinder_ai/backend/app/api/v1/endpoints/__init__.py +0 -0
  11. innovation_pathfinder_ai/backend/app/api/v1/endpoints/add_to_kb.py +33 -0
  12. innovation_pathfinder_ai/backend/app/core/__init__.py +0 -0
  13. innovation_pathfinder_ai/backend/app/core/config.py +34 -0
  14. innovation_pathfinder_ai/{database β†’ backend/app/crud}/db_handler.py +2 -2
  15. innovation_pathfinder_ai/{database/schema.py β†’ backend/app/database/db_schema.py} +0 -0
  16. innovation_pathfinder_ai/{knowledge_base β†’ backend/app/knowledge_base}/placeholder.txt +0 -0
  17. innovation_pathfinder_ai/backend/app/main.py +27 -0
  18. innovation_pathfinder_ai/backend/app/schemas/message_schema.py +10 -0
  19. innovation_pathfinder_ai/backend/app/schemas/response_schema.py +18 -0
  20. innovation_pathfinder_ai/{structured_tools β†’ backend/app/structured_tools}/structured_tools.py +1 -1
  21. innovation_pathfinder_ai/{templates β†’ backend/app/templates}/react_json_with_memory.py +0 -0
  22. innovation_pathfinder_ai/backend/app/utils/__init__.py +0 -0
  23. innovation_pathfinder_ai/{utils β†’ backend/app/utils}/logger.py +0 -0
  24. innovation_pathfinder_ai/{utils β†’ backend/app/utils}/utils.py +1 -1
  25. innovation_pathfinder_ai/{vector_store β†’ backend/app/vector_store}/chroma_vector_store.py +0 -0
  26. innovation_pathfinder_ai/backend/app/vector_store/initialize_chroma_db.py +20 -0
  27. app.py β†’ innovation_pathfinder_ai/frontend/app.py +8 -4
  28. innovation_pathfinder_ai/{assets β†’ frontend/assets}/avatar.png +0 -0
  29. innovation_pathfinder_ai/{assets β†’ frontend/assets}/favicon.ico +0 -0
  30. requirements.txt +1 -0
innovation_pathfinder_ai/agents/hf_mixtral_agent.py DELETED
@@ -1,81 +0,0 @@
1
- # HF libraries
2
- from langchain_community.llms import HuggingFaceEndpoint
3
- from langchain.agents import AgentExecutor
4
- from langchain.agents.format_scratchpad import format_log_to_str
5
- from langchain.agents.output_parsers import ReActJsonSingleInputOutputParser
6
- # Import things that are needed generically
7
- from langchain.tools.render import render_text_description
8
- import os
9
- from dotenv import load_dotenv
10
- from innovation_pathfinder_ai.structured_tools.structured_tools import (
11
- arxiv_search, get_arxiv_paper, google_search, wikipedia_search, knowledgeBase_search, memory_search
12
- )
13
-
14
- from langchain.prompts import PromptTemplate
15
- from innovation_pathfinder_ai.templates.react_json_with_memory import template_system
16
- from innovation_pathfinder_ai.utils import logger
17
- from langchain.globals import set_llm_cache
18
- from langchain.cache import SQLiteCache
19
-
20
- set_llm_cache(SQLiteCache(database_path=".cache.db"))
21
- logger = logger.get_console_logger("hf_mixtral_agent")
22
-
23
- config = load_dotenv(".env")
24
- HUGGINGFACEHUB_API_TOKEN = os.getenv('HUGGINGFACEHUB_API_TOKEN')
25
- GOOGLE_CSE_ID = os.getenv('GOOGLE_CSE_ID')
26
- GOOGLE_API_KEY = os.getenv('GOOGLE_API_KEY')
27
- LANGCHAIN_TRACING_V2 = "true"
28
- LANGCHAIN_ENDPOINT = "https://api.smith.langchain.com"
29
- LANGCHAIN_API_KEY = os.getenv('LANGCHAIN_API_KEY')
30
- LANGCHAIN_PROJECT = os.getenv('LANGCHAIN_PROJECT')
31
-
32
- # Load the model from the Hugging Face Hub
33
- llm = HuggingFaceEndpoint(repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
34
- temperature=0.1,
35
- max_new_tokens=1024,
36
- repetition_penalty=1.2,
37
- return_full_text=False
38
- )
39
-
40
-
41
- tools = [
42
- memory_search,
43
- knowledgeBase_search,
44
- arxiv_search,
45
- wikipedia_search,
46
- google_search,
47
- # get_arxiv_paper,
48
- ]
49
-
50
- prompt = PromptTemplate.from_template(
51
- template=template_system
52
- )
53
- prompt = prompt.partial(
54
- tools=render_text_description(tools),
55
- tool_names=", ".join([t.name for t in tools]),
56
- )
57
-
58
-
59
- # define the agent
60
- chat_model_with_stop = llm.bind(stop=["\nObservation"])
61
- agent = (
62
- {
63
- "input": lambda x: x["input"],
64
- "agent_scratchpad": lambda x: format_log_to_str(x["intermediate_steps"]),
65
- "chat_history": lambda x: x["chat_history"],
66
- }
67
- | prompt
68
- | chat_model_with_stop
69
- | ReActJsonSingleInputOutputParser()
70
- )
71
-
72
- # instantiate AgentExecutor
73
- agent_executor = AgentExecutor(
74
- agent=agent,
75
- tools=tools,
76
- verbose=True,
77
- max_iterations=10, # cap number of iterations
78
- #max_execution_time=60, # timout at 60 sec
79
- return_intermediate_steps=True,
80
- handle_parsing_errors=True,
81
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
innovation_pathfinder_ai/backend/__init__.py ADDED
File without changes
innovation_pathfinder_ai/backend/app/api/__init__.py ADDED
File without changes
innovation_pathfinder_ai/backend/app/api/v1/__init__.py ADDED
File without changes
innovation_pathfinder_ai/backend/app/api/v1/agents/__init__.py ADDED
File without changes
innovation_pathfinder_ai/backend/app/api/v1/agents/hf_mixtral_agent.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # HF libraries
2
+ from langchain_community.llms import HuggingFaceEndpoint
3
+ from langchain.agents import AgentExecutor
4
+ from langchain.agents.format_scratchpad import format_log_to_str
5
+ from langchain.agents.output_parsers import ReActJsonSingleInputOutputParser
6
+ # Import things that are needed generically
7
+ from langchain.tools.render import render_text_description
8
+ import os
9
+ from dotenv import load_dotenv
10
+ from innovation_pathfinder_ai.backend.app.structured_tools.structured_tools import (
11
+ arxiv_search, get_arxiv_paper, google_search, wikipedia_search, knowledgeBase_search, memory_search
12
+ )
13
+ from fastapi import APIRouter, WebSocket, WebSocketDisconnect
14
+ from langchain.prompts import PromptTemplate
15
+ from innovation_pathfinder_ai.backend.app.templates.react_json_with_memory import template_system
16
+ from innovation_pathfinder_ai.backend.app.utils import logger
17
+ from innovation_pathfinder_ai.backend.app.utils import generate_uuid
18
+ from langchain.globals import set_llm_cache
19
+ from langchain.cache import SQLiteCache
20
+
21
+ set_llm_cache(SQLiteCache(database_path=".cache.db"))
22
+ logger = logger.get_console_logger("hf_mixtral_agent")
23
+
24
+ config = load_dotenv(".env")
25
+ HUGGINGFACEHUB_API_TOKEN = os.getenv('HUGGINGFACEHUB_API_TOKEN')
26
+ GOOGLE_CSE_ID = os.getenv('GOOGLE_CSE_ID')
27
+ GOOGLE_API_KEY = os.getenv('GOOGLE_API_KEY')
28
+ LANGCHAIN_TRACING_V2 = "true"
29
+ LANGCHAIN_ENDPOINT = "https://api.smith.langchain.com"
30
+ LANGCHAIN_API_KEY = os.getenv('LANGCHAIN_API_KEY')
31
+ LANGCHAIN_PROJECT = os.getenv('LANGCHAIN_PROJECT')
32
+
33
+ router = APIRouter()
34
+
35
+ @router.websocket("")
36
+ async def websocket_endpoint(websocket: WebSocket):
37
+ await websocket.accept()
38
+
39
+ while True:
40
+ try:
41
+ data = await websocket.receive_json()
42
+ user_message = data["message"]
43
+
44
+ resp = IChatResponse(
45
+ sender="you",
46
+ message=user_message_card.to_dict(),
47
+ type="start",
48
+ message_id=generate_uuid(),
49
+ id=generate_uuid(),
50
+ )
51
+
52
+ await websocket.send_json(resp.dict())
53
+ message_id: str = generate_uuid()
54
+ # custom_handler = CustomFinalStreamingStdOutCallbackHandler(
55
+ # websocket, message_id=message_id
56
+ # )
57
+
58
+ # Load the model from the Hugging Face Hub
59
+ llm = HuggingFaceEndpoint(repo_id="mistralai/Mixtral-8x7B-Instruct-v0.1",
60
+ temperature=0.1,
61
+ max_new_tokens=1024,
62
+ repetition_penalty=1.2,
63
+ return_full_text=False
64
+ )
65
+
66
+
67
+ tools = [
68
+ memory_search,
69
+ knowledgeBase_search,
70
+ arxiv_search,
71
+ wikipedia_search,
72
+ google_search,
73
+ # get_arxiv_paper,
74
+ ]
75
+
76
+ prompt = PromptTemplate.from_template(
77
+ template=template_system
78
+ )
79
+ prompt = prompt.partial(
80
+ tools=render_text_description(tools),
81
+ tool_names=", ".join([t.name for t in tools]),
82
+ )
83
+
84
+
85
+ # define the agent
86
+ chat_model_with_stop = llm.bind(stop=["\nObservation"])
87
+ agent = (
88
+ {
89
+ "input": lambda x: x["input"],
90
+ "agent_scratchpad": lambda x: format_log_to_str(x["intermediate_steps"]),
91
+ "chat_history": lambda x: x["chat_history"],
92
+ }
93
+ | prompt
94
+ | chat_model_with_stop
95
+ | ReActJsonSingleInputOutputParser()
96
+ )
97
+
98
+ # instantiate AgentExecutor
99
+ agent_executor = AgentExecutor(
100
+ agent=agent,
101
+ tools=tools,
102
+ verbose=True,
103
+ max_iterations=10, # cap number of iterations
104
+ #max_execution_time=60, # timout at 60 sec
105
+ return_intermediate_steps=True,
106
+ handle_parsing_errors=True,
107
+ )
108
+
109
+ await agent_executor.arun(input=user_message) #, callbacks=[custom_handler]
110
+ except WebSocketDisconnect:
111
+ logger.info("websocket disconnect")
112
+ break
innovation_pathfinder_ai/{agents β†’ backend/app/api/v1/agents}/ollama_mixtral_agent.py RENAMED
File without changes
innovation_pathfinder_ai/{agents β†’ backend/app/api/v1/agents}/requirements.txt RENAMED
File without changes
innovation_pathfinder_ai/backend/app/api/v1/api.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter
2
+ from app.api.v1.agents import (
3
+ hf_mixtral_agent,
4
+ )
5
+ from app.api.v1.endpoints import (
6
+ add_to_kb
7
+ )
8
+
9
+ api_router = APIRouter()
10
+
11
+ api_router.include_router(hf_mixtral_agent.router, prefix="/chat", tags=["chat"])
12
+ api_router.include_router(add_to_kb.router, prefix="/add-documents", tags=["documents"])
innovation_pathfinder_ai/backend/app/api/v1/endpoints/__init__.py ADDED
File without changes
innovation_pathfinder_ai/backend/app/api/v1/endpoints/add_to_kb.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter
2
+ from innovation_pathfinder_ai.backend.app.utils.utils import extract_urls
3
+ from innovation_pathfinder_ai.backend.app.utils import logger
4
+ from innovation_pathfinder_ai.backend.app.vector_store import initialize_chroma_db
5
+ from innovation_pathfinder_ai.backend.app.utils.utils import (
6
+ generate_uuid
7
+ )
8
+ from langchain_community.vectorstores import Chroma
9
+
10
+ router = APIRouter()
11
+
12
+ @router.post("/add-document", status_code=201)
13
+ async def add_document(conversation):
14
+
15
+ db = initialize_chroma_db()
16
+ all_sources = conversation.sources
17
+ documents = conversation.documents
18
+ message = conversation.query
19
+
20
+ sources = extract_urls(all_sources)
21
+ src_list = '\n'.join(sources)
22
+ current_id = generate_uuid()
23
+ db.add(
24
+ ids=[current_id],
25
+ documents=documents,
26
+ metadatas=[
27
+ {
28
+ "human_message":message,
29
+ "sources": 'Internal Knowledge Base From: \n\n' + src_list
30
+ }
31
+ ]
32
+ )
33
+ return {"message": "Document added successfully", "ids": current_id}
innovation_pathfinder_ai/backend/app/core/__init__.py ADDED
File without changes
innovation_pathfinder_ai/backend/app/core/config.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from pydantic import AnyHttpUrl, BaseSettings
3
+ from enum import Enum
4
+
5
+
6
+ class ModeEnum(str, Enum):
7
+ development = "development"
8
+ production = "production"
9
+ testing = "testing"
10
+
11
+
12
+ class Settings(BaseSettings):
13
+ PROJECT_NAME: str = "app"
14
+ BACKEND_CORS_ORIGINS: list[str] | list[AnyHttpUrl]
15
+ MODE: ModeEnum = ModeEnum.development
16
+ API_VERSION: str = "v1"
17
+ API_V1_STR: str = f"/api/{API_VERSION}"
18
+ OPENAI_API_KEY: str
19
+ UNSPLASH_API_KEY: str
20
+ SERP_API_KEY: str
21
+ HUGGINGFACEHUB_API_TOKEN: str
22
+ GOOGLE_CSE_ID: str
23
+ GOOGLE_API_KEY: str
24
+ VECTOR_DATABASE_LOCATION: str
25
+ CONVERSATION_COLLECTION_NAME: str
26
+ EMBEDDING_MODEL: str
27
+ SOURCES_CACHE: str
28
+
29
+ class Config:
30
+ case_sensitive = True
31
+ env_file = os.path.expanduser("~/.env")
32
+
33
+
34
+ settings = Settings()
innovation_pathfinder_ai/{database β†’ backend/app/crud}/db_handler.py RENAMED
@@ -1,6 +1,6 @@
1
  from sqlmodel import SQLModel, create_engine, Session, select
2
- from innovation_pathfinder_ai.database.schema import Sources
3
- from innovation_pathfinder_ai.utils.logger import get_console_logger
4
  import os
5
  from dotenv import load_dotenv
6
 
 
1
  from sqlmodel import SQLModel, create_engine, Session, select
2
+ from innovation_pathfinder_ai.backend.app.database.db_schema import Sources
3
+ from innovation_pathfinder_ai.backend.app.utils.logger import get_console_logger
4
  import os
5
  from dotenv import load_dotenv
6
 
innovation_pathfinder_ai/{database/schema.py β†’ backend/app/database/db_schema.py} RENAMED
File without changes
innovation_pathfinder_ai/{knowledge_base β†’ backend/app/knowledge_base}/placeholder.txt RENAMED
File without changes
innovation_pathfinder_ai/backend/app/main.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from innovation_pathfinder_ai.backend.app.api.v1.api import api_router as api_router_v1
3
+ #from app.core.config import settings
4
+ from fastapi.middleware.cors import CORSMiddleware
5
+
6
+ app = FastAPI()
7
+ BACKEND_CORS_ORIGINS = ["*"]
8
+ # CORS Middleware setup for allowing frontend requests
9
+ # ToDO: replace with settings.BACKEND_CORS_ORIGINS once core/config.py is implemented
10
+ if BACKEND_CORS_ORIGINS:
11
+ app.add_middleware(
12
+ CORSMiddleware,
13
+ allow_origins=[str(origin) for origin in BACKEND_CORS_ORIGINS],
14
+ allow_credentials=True,
15
+ allow_methods=["*"],
16
+ allow_headers=["*"],
17
+ )
18
+
19
+ @app.get("/", tags=["Root"])
20
+ async def root():
21
+ """
22
+ Simple root endpoint to verify the API is running.
23
+ """
24
+ return {"message": "API is running"}
25
+
26
+ # Include the versioned API router from api.py
27
+ app.include_router(api_router_v1)
innovation_pathfinder_ai/backend/app/schemas/message_schema.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel
2
+ from typing import List, Tuple, Optional
3
+
4
+ class InferRequest(BaseModel):
5
+ question: str
6
+ history: List[Tuple[str, str]]
7
+
8
+ class BotRequest(BaseModel):
9
+ history: List[Tuple[str, str]]
10
+
innovation_pathfinder_ai/backend/app/schemas/response_schema.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel
2
+ from typing import List, Tuple, Optional
3
+
4
+ class SourceData(BaseModel):
5
+ human_message: str
6
+ sources: str
7
+
8
+ class DocumentAddResponse(BaseModel):
9
+ success: bool
10
+ message: Optional[str] = None
11
+
12
+ class InferResponse(BaseModel):
13
+ output: str
14
+ sources: Optional[List[str]] = []
15
+
16
+ class BotResponse(BaseModel):
17
+ history: List[Tuple[str, str]]
18
+ response_with_sources: str # Consolidated response with sources appended
innovation_pathfinder_ai/{structured_tools β†’ backend/app/structured_tools}/structured_tools.py RENAMED
@@ -21,7 +21,7 @@ from innovation_pathfinder_ai.source_container.container import (
21
  from innovation_pathfinder_ai.utils.utils import (
22
  parse_list_to_dicts, format_wiki_summaries, format_arxiv_documents, format_search_results
23
  )
24
- from innovation_pathfinder_ai.database.db_handler import (
25
  add_many
26
  )
27
 
 
21
  from innovation_pathfinder_ai.utils.utils import (
22
  parse_list_to_dicts, format_wiki_summaries, format_arxiv_documents, format_search_results
23
  )
24
+ from backend.app.crud.db_handler import (
25
  add_many
26
  )
27
 
innovation_pathfinder_ai/{templates β†’ backend/app/templates}/react_json_with_memory.py RENAMED
File without changes
innovation_pathfinder_ai/backend/app/utils/__init__.py ADDED
File without changes
innovation_pathfinder_ai/{utils β†’ backend/app/utils}/logger.py RENAMED
File without changes
innovation_pathfinder_ai/{utils β†’ backend/app/utils}/utils.py RENAMED
@@ -3,7 +3,7 @@ import datetime
3
  import os
4
  import uuid
5
 
6
- from innovation_pathfinder_ai.utils import logger
7
 
8
  logger = logger.get_console_logger("utils")
9
 
 
3
  import os
4
  import uuid
5
 
6
+ from innovation_pathfinder_ai.backend.app.utils import logger
7
 
8
  logger = logger.get_console_logger("utils")
9
 
innovation_pathfinder_ai/{vector_store β†’ backend/app/vector_store}/chroma_vector_store.py RENAMED
File without changes
innovation_pathfinder_ai/backend/app/vector_store/initialize_chroma_db.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.vectorstores import Chroma
2
+ import chromadb
3
+ import dotenv
4
+ import os
5
+
6
+ dotenv.load_dotenv()
7
+ persist_directory = os.getenv('VECTOR_DATABASE_LOCATION')
8
+
9
+ def initialize_chroma_db() -> Chroma:
10
+ collection_name = os.getenv('CONVERSATION_COLLECTION_NAME')
11
+
12
+ client = chromadb.PersistentClient(
13
+ path=persist_directory
14
+ )
15
+
16
+ collection = client.get_or_create_collection(
17
+ name=collection_name,
18
+ )
19
+
20
+ return collection
app.py β†’ innovation_pathfinder_ai/frontend/app.py RENAMED
@@ -1,14 +1,14 @@
1
  from fastapi import FastAPI
2
  import gradio as gr
3
  from gradio.themes.base import Base
4
- from innovation_pathfinder_ai.agents.hf_mixtral_agent import agent_executor
5
  from innovation_pathfinder_ai.source_container.container import (
6
  all_sources
7
  )
8
- from innovation_pathfinder_ai.utils.utils import extract_urls
9
- from innovation_pathfinder_ai.utils import logger
10
 
11
- from innovation_pathfinder_ai.utils.utils import (
12
  generate_uuid
13
  )
14
  from langchain_community.vectorstores import Chroma
@@ -49,6 +49,10 @@ if __name__ == "__main__":
49
 
50
  def bot(history):
51
  response = infer(history[-1][0], history)
 
 
 
 
52
  sources = extract_urls(all_sources)
53
  src_list = '\n'.join(sources)
54
  current_id = generate_uuid()
 
1
  from fastapi import FastAPI
2
  import gradio as gr
3
  from gradio.themes.base import Base
4
+ from innovation_pathfinder_ai.backend.app.api.v1.agents.hf_mixtral_agent import agent_executor
5
  from innovation_pathfinder_ai.source_container.container import (
6
  all_sources
7
  )
8
+ from innovation_pathfinder_ai.backend.app.utils.utils import extract_urls
9
+ from innovation_pathfinder_ai.backend.app.utils import logger
10
 
11
+ from innovation_pathfinder_ai.backend.app.utils.utils import (
12
  generate_uuid
13
  )
14
  from langchain_community.vectorstores import Chroma
 
49
 
50
  def bot(history):
51
  response = infer(history[-1][0], history)
52
+ # Existing logic remains the same up to the point where you need to call backend operations
53
+ # Example for calling generate_uuid from the backend
54
+ # response = requests.post("http://localhost:8000/add-document")
55
+ #current_id = response.text
56
  sources = extract_urls(all_sources)
57
  src_list = '\n'.join(sources)
58
  current_id = generate_uuid()
innovation_pathfinder_ai/{assets β†’ frontend/assets}/avatar.png RENAMED
File without changes
innovation_pathfinder_ai/{assets β†’ frontend/assets}/favicon.ico RENAMED
File without changes
requirements.txt CHANGED
@@ -12,4 +12,5 @@ pypdf2
12
  sqlmodel
13
  rich
14
  fastapi
 
15
  sentence-transformers
 
12
  sqlmodel
13
  rich
14
  fastapi
15
+ uvicorn
16
  sentence-transformers