import gradio as gr from transformers import AutoModelForCausalLM, AutoTokenizer # Load your model and tokenizer from Hugging Face repository model_name = "Rishitha0208/new-llm-for-advanced-materials" # Replace with your model repo name model = AutoModelForCausalLM.from_pretrained(model_name) tokenizer = AutoTokenizer.from_pretrained(model_name) def predict(input_text): inputs = tokenizer(input_text, return_tensors="pt") outputs = model.generate(**inputs) return tokenizer.decode(outputs[0], skip_special_tokens=True) # Create a Gradio interface interface = gr.Interface( fn=predict, inputs=gr.inputs.Textbox(lines=2, placeholder="Enter text here..."), outputs="text", ) # Launch the interface interface.launch()