from transformers import AutoTokenizer, AutoModelForCausalLM from transformers import pipeline import gradio as gr #tokenizer = AutoTokenizer.from_pretrained("Imran1/gpt2-urdu-news") #model = AutoModelForCausalLM.from_pretrained("Imran1/gpt2-urdu-news") pipe = pipeline('text-generation',"Imran1/gpt2-urdu-news") def text_generate(text): generate_text = pipe(f"{text}", max_length = 200,top_k=2000, temperature=1.0, top_p=0.92) return generate_text[0]['generated_text'] exmp=["درمدی قیمتوں میں","ایک فیصلے میں کہا کہ"] gr.Interface(fn=text_generate, inputs="text", outputs="text", examples=exmp,title= "FINE TUNE GPT2 FOR URDU NEWS TEXT GENERATION").launch()