File size: 1,622 Bytes
9ab8c93
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import requests
import streamlit as st
import time 
from transformers import pipeline
import os
from .utils import query

HF_AUTH_TOKEN = os.getenv('HF_AUTH_TOKEN')
headers = {"Authorization": f"Bearer {HF_AUTH_TOKEN}"}

def write():

	st.markdown("# Sentiment Classification")
	st.sidebar.header("Sentiment Classification")
	st.write(
		"""Here, you can perform sentiment classification using the fine-tuned TURNA classification models. """
	)

	# Sidebar

	# Taken from https://huggingface.co/spaces/flax-community/spanish-gpt2/blob/main/app.py
	st.sidebar.subheader("Configurable parameters")

	model_name = st.sidebar.selectbox(
		"Model Selector",
		options=[
			"turna_classification_17bintweet_sentiment",
			"turna_classification_17bintweet_sentiment_NLU",
			"turna_classification_17bintweet_sentiment_NLG",
			"turna_classification_17bintweet_sentiment_S2S",
			"turna_classification_tr_product_reviews",
			"turna_classification_tr_product_reviews_NLU",
			"turna_classification_tr_product_reviews_NLG",
			"turna_classification_tr_product_reviews_S2S",
		],
		index=0,
	)
	"""max_new_tokens = st.sidebar.number_input(
		"Maximum length",
		min_value=0,
		max_value=20,
		value=20,
		help="The maximum length of the sequence to be generated.",
	)"""
	
	input_text = st.text_area(label='Enter a text: ', height=100, 
			value="sonunda bugün kurtuldum senden")
	url = ("https://api-inference.huggingface.co/models/boun-tabi-LMG/" + model_name.lower())
	params = {"max_new_tokens": 4 }
	if st.button("Generate"):
		with st.spinner('Generating...'):
			output = query(input_text, url, params)
			st.success(output)