import streamlit as st import requests import json # API Gateway Invoke URL (Replace with your actual API endpoint) API_URL = "https://zyqnod20re.execute-api.us-east-1.amazonaws.com/dev/test_bedrock_v4_deepseek" # Function to call DeepSeek-R1 model def query_deepseek(prompt: str) -> str: """ Sends a request to the DeepSeek-R1 model via AWS Bedrock API and returns the response. Args: prompt (str): The user input query. Returns: str: The generated response from the model. """ payload = { "prompt": prompt, "max_gen_len": 1024, "temperature": 0.1, "top_p": 0.9 } headers = {"Content-Type": "application/json"} try: response = requests.post(API_URL, headers=headers, data=json.dumps(payload)) if response.status_code == 200: full_response = response.json() # Extract the output text from the model response output_text = full_response.get("model_response", {}).get("generation", "No output found.") return output_text else: return f"Error {response.status_code}: {response.text}" except requests.exceptions.RequestException as e: return f"Request failed: {e}" # Streamlit app UI st.title("DeepSeek-R1 Chatbot") # Initialize chat history if "messages" not in st.session_state: st.session_state.messages = [] # Display chat messages from history on app rerun for message in st.session_state.messages: with st.chat_message(message["role"]): st.markdown(message["content"]) # User input if prompt := st.chat_input("Ask me anything!"): # Display user message in chat history st.chat_message("user").markdown(prompt) st.session_state.messages.append({"role": "user", "content": prompt}) # Query DeepSeek-R1 model response = query_deepseek(prompt) # Display assistant response with st.chat_message("assistant"): st.markdown(response) # Add assistant response to chat history st.session_state.messages.append({"role": "assistant", "content": response})