pratikshahp's picture
Update app.py
d59eade verified
raw
history blame
1.92 kB
import os
from typing import Annotated
from typing_extensions import TypedDict
from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import add_messages
#from langchain_anthropic import ChatAnthropic
from langchain_huggingface import HuggingFaceEndpoint
from dotenv import load_dotenv
import logging
# Initialize logging
logging.basicConfig(level=logging.INFO)
load_dotenv()
#ANTHROPIC_API_KEY = os.getenv("ANTHROPIC_API_KEY")
HF_TOKEN = os.getenv("HF_TOKEN")
# Initialize Anthropic Chat model
#llm = ChatAnthropic(model="claude-3-5-sonnet-20240620", anthropic_api_key=ANTHROPIC_API_KEY)
llm = HuggingFaceEndpoint(
repo_id="mistralai/Mistral-7B-Instruct-v0.3",
huggingfacehub_api_token=HF_TOKEN.strip(),
temperature=0.7,
max_new_tokens=200
)
# Define the state structure
class State(TypedDict):
messages: Annotated[list, add_messages]
# Create a state graph builder
graph_builder = StateGraph(State)
# Define the chatbot function
def chatbot(state: State):
try:
logging.info(f"Input Messages: {state['messages']}")
response = llm.invoke(state["messages"])
logging.info(f"LLM Response: {response}")
return {"messages": [response]}
except Exception as e:
logging.error(f"Error: {str(e)}")
return {"messages": [f"Error: {str(e)}"]}
# Add nodes and edges to the state graph
graph_builder.add_node("chatbot", chatbot)
graph_builder.add_edge(START, "chatbot")
graph_builder.add_edge("chatbot", END)
# Compile the state graph
graph = graph_builder.compile()
# Generate Mermaid code for visualization
mermaid_code = graph.get_graph().draw_mermaid()
# Display the Mermaid code
print(f"```mermaid\n{mermaid_code}\n```")
# Test the graph with a sample state
if __name__ == "__main__":
initial_state = {"messages": ["Hello, how are you?"]}
result = graph.invoke(initial_state)
print(f"Final State: {result}")