Update app.py
Browse files
app.py
CHANGED
@@ -21,10 +21,12 @@ os.environ["TOGETHER_AI_API"] = "1c27fe0df51a29edee1bec6b4b648b436cc80cf4ccc36f5
|
|
21 |
os.environ['TRANSFORMERS_CACHE'] = '/tmp/cache'
|
22 |
|
23 |
# Initialize FastAPI Router
|
|
|
24 |
app = FastAPI()
|
|
|
25 |
# Include the router
|
26 |
app.include_router(router)
|
27 |
-
|
28 |
bot_name = "LawGPT"
|
29 |
|
30 |
# Lazy loading of large models (only load embeddings and index when required)
|
@@ -150,4 +152,4 @@ async def chat(request: ChatRequest):
|
|
150 |
# GET endpoint to check if the API is running
|
151 |
@router.get("/")
|
152 |
async def root():
|
153 |
-
return {"message": "LawGPT API is running."}
|
|
|
21 |
os.environ['TRANSFORMERS_CACHE'] = '/tmp/cache'
|
22 |
|
23 |
# Initialize FastAPI Router
|
24 |
+
router = APIRouter() # Define router here first
|
25 |
app = FastAPI()
|
26 |
+
|
27 |
# Include the router
|
28 |
app.include_router(router)
|
29 |
+
|
30 |
bot_name = "LawGPT"
|
31 |
|
32 |
# Lazy loading of large models (only load embeddings and index when required)
|
|
|
152 |
# GET endpoint to check if the API is running
|
153 |
@router.get("/")
|
154 |
async def root():
|
155 |
+
return {"message": "LawGPT API is running."}
|