File size: 3,892 Bytes
211910d
 
 
79a526f
211910d
 
 
 
 
 
 
79a526f
 
 
 
211910d
79a526f
 
 
 
 
 
 
 
 
 
211910d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79a526f
 
 
 
 
211910d
79a526f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
211910d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79a526f
 
 
 
211910d
 
79a526f
 
211910d
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import streamlit as st
import pandas as pd
from pandasai import SmartDataframe
from pandasai.llm import OpenAI, ChatGroq
from dotenv import load_dotenv
from datasets import load_dataset
import os

# Load environment variables
load_dotenv()

def initialize_llm(model_choice):
    """Initialize the chosen LLM based on the user's selection."""
    groq_api_key = os.getenv("GROQ_API_KEY")
    openai_api_key = os.getenv("OPENAI_API_KEY")

    if model_choice == "llama-3.3-70b":
        if not groq_api_key:
            st.error("Groq API key is missing. Please set the GROQ_API_KEY environment variable.")
            return None
        return ChatGroq(groq_api_key=groq_api_key, model="groq/llama-3.3-70b-versatile")
    elif model_choice == "GPT-4o":
        if not openai_api_key:
            st.error("OpenAI API key is missing. Please set the OPENAI_API_KEY environment variable.")
            return None
        return OpenAI(api_token=openai_api_key)

# App title and description
st.title("Patent Analytics: Chat With Your Dataset")
st.markdown(
    """
    Upload a CSV file or load a dataset from Hugging Face to:
    - Analyze data with natural language queries.
    - Visualize trends and insights (e.g., "Plot the number of patents filed per year").
    """
)

# Initialize session state for the dataframe
if "df" not in st.session_state:
    st.session_state.df = None

# Select the model
model_choice = st.radio("Select LLM", ["GPT-4o", "llama-3.3-70b"], index=0, horizontal=True)
llm = initialize_llm(model_choice)
if not llm:
    st.stop()

def load_dataset_into_session():
    """Load dataset based on user input."""
    input_option = st.radio("Choose Dataset Input Method", ["Use Hugging Face Dataset", "Upload CSV File"], index=0)

    if input_option == "Use Hugging Face Dataset":
        dataset_name = st.text_input("Enter Hugging Face Dataset Name:", value="HUPD/hupd")
        if st.button("Load Dataset"):
            try:
                dataset = load_dataset(dataset_name, split="train", trust_remote_code=True)
                st.session_state.df = pd.DataFrame(dataset)
                st.success(f"Dataset '{dataset_name}' loaded successfully!")
            except Exception as e:
                st.error(f"Error loading dataset: {e}")
    elif input_option == "Upload CSV File":
        uploaded_file = st.file_uploader("Upload CSV File:", type=["csv"])
        if uploaded_file:
            try:
                st.session_state.df = pd.read_csv(uploaded_file)
                st.success("File uploaded successfully!")
            except Exception as e:
                st.error(f"Error loading file: {e}")

# Load dataset
load_dataset_into_session()

# Show the loaded dataframe preview
if st.session_state.df is not None:
    st.subheader("Dataset Preview")
    st.dataframe(st.session_state.df.head(10))

    # Create a SmartDataFrame for PandasAI
    chat_df = SmartDataframe(st.session_state.df, config={"llm": llm})

    # Input box for user questions
    question = st.text_input(
        "Ask a question about your data or request a visualization",
        placeholder="E.g., 'Which assignee has the most patents?' or 'Plot patent filings per year'",
    )

    if question:
        with st.spinner("Processing your request..."):
            try:
                # Chat with the dataframe
                response = chat_df.chat(question)

                # Display response
                if isinstance(response, pd.DataFrame):
                    st.write("### Response")
                    st.dataframe(response)
                else:
                    st.write("### Response")
                    st.write(response)

                st.success("Request processed successfully!")
            except Exception as e:
                st.error(f"An error occurred: {e}")
else:
    st.write("Upload a CSV file or load a dataset to get started.")