import gradio as gr from transformers import AutoTokenizer, AutoModelWithLMHead import gc import os import pandas as pd import huggingface_hub from huggingface_hub import Repository HF_TOKEN = os.environ.get("HF_TOKEN") DATASET_NAME = "emotion_detection" DATASET_REPO_URL = f"https://huggingface.co/datasets/pragnakalp/{DATASET_NAME}" DATA_FILENAME = "emotion_detection_logs.csv" DATA_FILE = os.path.join("emotion_detection_logs", DATA_FILENAME) DATASET_REPO_ID = "pragnakalp/emotion_detection" print("is none?", HF_TOKEN is None) sentences_value = """Raj loves Simran.\nLast year I lost my Dog.\nI bought a new phone!\nShe is scared of cockroaches.\nWow! I was not expecting that.\nShe got mad at him.""" cwd = os.getcwd() model_path = os.path.join(cwd) tokenizer = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-emotion") model_base = AutoModelWithLMHead.from_pretrained(model_path) def get_emotion(text): # input_ids = tokenizer.encode(text + '', return_tensors='pt') input_ids = tokenizer.encode(text, return_tensors='pt') output = model_base.generate(input_ids=input_ids, max_length=2) dec = [tokenizer.decode(ids) for ids in output] label = dec[0] gc.collect() return label def generate_emotion(article): sen_list = article sen_list = sen_list.split('\n') sen_list_temp = sen_list[0:] print(sen_list_temp) results_dict = [] results = [] for sen in sen_list_temp: if(sen.strip()): cur_result = get_emotion(sen) results.append(cur_result) results_dict.append( { 'sentence': sen, 'emotion': cur_result } ) # result = { # 'result': results_dict, # } result = {'Input':sen_list_temp, 'Detected Emotion':results} gc.collect() add_csv = [results_dict] with open(DATA_FILE, "a") as f: writer = csv.writer(f) # write the data writer.writerow(add_csv) commit_url = repo.push_to_hub() print("commit data :",commit_url) return pd.DataFrame(result) """ Save generated details """ # def save_data_and_sendmail(article,generated_questions,num_que,result): # try: # hostname = {} # hostname = get_device_ip_address() # url = 'https://pragnakalpdev35.pythonanywhere.com/HF_space_que_gen' # # url = 'http://pragnakalpdev33.pythonanywhere.com/HF_space_question_generator' # myobj = {'article': article,'total_que': num_que,'gen_que':result,'ip_addr':hostname.get("ip_addr",""),'host':hostname.get("host","")} # x = requests.post(url, json = myobj) # add_csv = [article, generated_questions, num_que] # with open(DATA_FILE, "a") as f: # writer = csv.writer(f) # # write the data # writer.writerow(add_csv) # commit_url = repo.push_to_hub() # print("commit data :",commit_url) # # except Exception as e: # # return "Error while storing data -->" + e # # try: # # with open(DATA_FILE, "r") as file: # # data = json.load(file) # # data.append(entry) # # with open(DATA_FILE, "w") as file: # # json.dump(data, file) # # commit_url = repo.push_to_hub() # except Exception as e: # return "Error while sending mail" + e # return "Successfully save data" inputs = gr.Textbox(value=sentences_value,lines=10, label="Sentences",elem_id="inp_div") outputs = [gr.Dataframe(row_count = (2, "dynamic"), col_count=(2, "fixed"), label="Here is the Result", headers=["Input","Detected Emotion"])] demo = gr.Interface( generate_emotion, inputs, outputs, title="Emotion Detection", description="Feel free to give your feedback", css=".gradio-container {background-color: lightgray} #inp_div {background-color: #FB3D5;}" ) demo.launch()