pratikshahp commited on
Commit
caf16cf
·
verified ·
1 Parent(s): a6c778b

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +108 -0
app.py ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from typing import Literal
3
+ from langgraph.graph import StateGraph, START
4
+ from langgraph.types import Command
5
+ from dotenv import load_dotenv
6
+ import gradio as gr
7
+ from langchain_huggingface import HuggingFaceEndpoint
8
+
9
+ # Load environment variables
10
+ load_dotenv()
11
+ HF_TOKEN = os.getenv("HF_TOKEN")
12
+
13
+ # Define HuggingFaceEndpoint
14
+ llm = HuggingFaceEndpoint(
15
+ repo_id="mistralai/Mistral-7B-Instruct-v0.3",
16
+ huggingfacehub_api_token=HF_TOKEN.strip(),
17
+ temperature=0.7,
18
+ max_new_tokens=300,
19
+ )
20
+
21
+ # Define state
22
+ class State(dict):
23
+ disaster_type: str
24
+ severity: str
25
+ resources: list[str]
26
+ recommendation: str
27
+
28
+ # Create the graph
29
+ builder = StateGraph(State)
30
+
31
+ # Define nodes
32
+ def disaster_type_node(state: State) -> Command[Literal["severity_node"]]:
33
+ """Set disaster type and proceed to severity selection."""
34
+ return Command(update={"disaster_type": state["disaster_type"]}, goto="severity_node")
35
+
36
+ def severity_node(state: State) -> Command[Literal["resources_node"]]:
37
+ """Set severity level and proceed to resource selection."""
38
+ return Command(update={"severity": state["severity"]}, goto="resources_node")
39
+
40
+ def resources_node(state: State) -> Command[Literal["generate_response_node"]]:
41
+ """Set available resources and proceed to generating response."""
42
+ return Command(update={"resources": state["resources"]}, goto="generate_response_node")
43
+
44
+ def generate_response_node(state: State):
45
+ """Generate a disaster response plan based on inputs."""
46
+ resources = ", ".join(state["resources"])
47
+ prompt = f"""
48
+ Disaster: {state['disaster_type']}
49
+ Severity: {state['severity']}
50
+ Resources: {resources}
51
+
52
+ You are an emergency response assistant. Provide a detailed response plan for the given situation.
53
+ """
54
+ response = llm(prompt)
55
+ return {"recommendation": response}
56
+
57
+ # Add nodes to the graph
58
+ builder.add_edge(START, "disaster_type_node")
59
+ builder.add_node("disaster_type_node", disaster_type_node)
60
+ builder.add_node("severity_node", severity_node)
61
+ builder.add_node("resources_node", resources_node)
62
+ builder.add_node("generate_response_node", generate_response_node)
63
+
64
+ # Compile the graph
65
+ graph = builder.compile()
66
+
67
+ # Gradio Interface
68
+ def process_disaster(disaster_type, severity, resources):
69
+ """Run the disaster response graph with user inputs."""
70
+ state = {"disaster_type": disaster_type, "severity": severity, "resources": resources}
71
+ result = graph.invoke(state)
72
+
73
+ # Generate Mermaid code for graph visualization
74
+ mermaid_code = graph.get_graph().draw_mermaid()
75
+
76
+ # Return the response plan and mermaid code
77
+ return result["recommendation"], f"```mermaid\n{mermaid_code}\n```"
78
+
79
+ iface = gr.Interface(
80
+ fn=process_disaster,
81
+ inputs=[
82
+ gr.Dropdown(
83
+ ["Flood", "Fire", "Earthquake", "Tornado", "Other"],
84
+ label="Select Disaster Type",
85
+ ),
86
+ gr.Dropdown(
87
+ ["Low", "Medium", "High", "Critical"],
88
+ label="Select Severity Level",
89
+ ),
90
+ gr.CheckboxGroup(
91
+ [
92
+ "Water and Food Supplies",
93
+ "Medical Aid",
94
+ "Shelter and Evacuation",
95
+ "Search and Rescue Teams",
96
+ "Other",
97
+ ],
98
+ label="Select Available Resources (Multi-Select)",
99
+ ),
100
+ ],
101
+ outputs=[
102
+ gr.Textbox(label="Generated Disaster Response Plan"),
103
+ gr.Markdown(label="Mermaid Graph Visualization")
104
+ ],
105
+ title="Disaster Response Assistant",
106
+ )
107
+
108
+ iface.launch()