finlytic / app.py
comethrusws's picture
Create app.py
419f138 verified
raw
history blame contribute delete
969 Bytes
import gradio as gr
from transformers import AutoModel, AutoTokenizer
import torch
# Load the model and tokenizer from Hugging Face Hub
model = AutoModel.from_pretrained("comethrusws/finlytic-compliance")
tokenizer = AutoTokenizer.from_pretrained("comethrusws/finlytic-compliance")
# Define a function to handle inference
def predict(input_data):
inputs = tokenizer(input_data, return_tensors="pt")
outputs = model(**inputs)
# Assuming the model returns logits (modify this depending on your model's architecture)
prediction = torch.argmax(outputs.logits, dim=-1).item()
return prediction
# Create a Gradio interface
interface = gr.Interface(
fn=predict,
inputs=gr.inputs.Textbox(label="Input Data"),
outputs=gr.outputs.Textbox(label="Prediction"),
title="Fintlytic Compliance Model",
description="Predict using the Finlytic compliance model",
)
# Launch the Gradio app
if __name__ == "__main__":
interface.launch()