Kvikontent's picture
Create app.py
b3b2bb1 verified
raw
history blame
No virus
2.48 kB
import gradio as gr
import pathlib
import textwrap
try:
import google.generativeai as genai
except ImportError:
print("WARNING: google.generativeai not found. Install with `pip install google-generativeai` for AI-powered responses.")
genai = None
from IPython.display import display # Only for development/testing
from IPython.display import Markdown # Only for development/testing
def to_markdown(text):
"""Converts text to Markdown format with proper indentation.
Args:
text (str): The text to convert.
Returns:
str: The converted Markdown text.
"""
text = text.replace('•', ' *')
return Markdown(textwrap.indent(text, '> ', predicate=lambda _: True))
def chat(chat_history):
"""Generates a response based on the chat history.
Args:
chat_history (list): A list containing user messages and AI responses.
Returns:
str: The AI's response to the latest user message.
"""
if not genai:
return "AI responses are currently unavailable. Please install `google-generativeai` for this functionality."
user_message = chat_history[-1] # Get the latest user message
try:
response = model.generate_content(user_message, stream=True)
for chunk in response:
return chunk.text # Return the first generated text chunk
except Exception as e:
print(f"Error during generation: {e}")
return "An error occurred while generating the response. Please try again later."
interface = gr.Interface(
fn=chat,
inputs="chat",
outputs="textbox",
title="Gradio Chat App",
description="Chat with an AI assistant (requires `google-generativeai`)",
catch_exceptions=True, # Catch exceptions and display informative messages
)
interface.launch()
# Code below is for development/testing purposes only (not required for Gradio app)
if __name__ == "__main__":
if not genai:
print("WARNING: google.generativeai not found. Install with `pip install google-generativeai` for AI-powered responses.")
genai.configure(api_key='AIzaSyCMBk81YmILNTok8hd6tYtJaevp1qbl6I0') # Replace with your actual API key
model = genai.GenerativeModel('gemini-pro')
chat_history = []
while True:
user_message = input("You: ")
chat_history.append(user_message)
response = chat(chat_history)
print(f"AI: {response}")
chat_history.append(response)
print("-" * 80)