import gradio as gr from transformers import pipeline import chardet # Initialize the question-answering pipeline #qa_pipeline = pipeline("question-answering",model="deepset/roberta-base-squad2") qa_pipeline = pipeline("question-answering",model="distilbert-base-cased-distilled-squad") def answer_question(context, question): result = qa_pipeline(question=question, context=context) return result['answer'] def process(context_file, question): # Read the context from the uploaded file #with open(context_file.name, 'r') as file: #context = file.read() with open(context_file.name, 'rb') as file: raw_data = file.read() result = chardet.detect(raw_data) encoding = result['encoding'] # Fallback to a default encoding if detection fails if encoding is None: encoding = 'utf-8' # You can change this to another default encoding context = raw_data.decode(encoding, errors='replace') # Replace errors with a placeholder answer = answer_question(context, question) return answer # Gradio interface demo = gr.Interface( fn=process, inputs=[gr.File(label="Upload Context File"), gr.Textbox(label="Question")], outputs=[gr.Textbox(label="Answer")], title="Question Answering", description="Upload a file with context and ask a question. The answer will be displayed." ) if __name__ == "__main__": demo.launch()