|
import re |
|
import string |
|
import nltk |
|
from fastapi import FastAPI, HTTPException |
|
from pydantic import BaseModel |
|
from typing import Optional |
|
from transformers import pipeline |
|
from pyngrok import ngrok |
|
import nest_asyncio |
|
from fastapi.responses import RedirectResponse |
|
|
|
|
|
nltk.download('punkt') |
|
nltk.download('wordnet') |
|
|
|
|
|
app = FastAPI() |
|
|
|
|
|
def remove_urls(text): |
|
return re.sub(r'http[s]?://\S+', '', text) |
|
|
|
def remove_punctuation(text): |
|
regular_punct = string.punctuation |
|
return re.sub(r'['+regular_punct+']', '', text) |
|
|
|
def lower_case(text): |
|
return text.lower() |
|
|
|
def lemmatize(text): |
|
wordnet_lemmatizer = nltk.WordNetLemmatizer() |
|
tokens = nltk.word_tokenize(text) |
|
return ' '.join([wordnet_lemmatizer.lemmatize(w) for w in tokens]) |
|
|
|
|
|
lyx_pipe = pipeline("text-classification", model="lxyuan/distilbert-base-multilingual-cased-sentiments-student") |
|
|
|
|
|
class TextInput(BaseModel): |
|
text: str |
|
|
|
|
|
@app.get('/') |
|
async def welcome(): |
|
|
|
return RedirectResponse(url="/docs") |
|
|
|
|
|
@app.post('/analyze/') |
|
async def Predict_Sentiment(text_input: TextInput): |
|
text = text_input.text |
|
|
|
|
|
text = remove_urls(text) |
|
text = remove_punctuation(text) |
|
text = lower_case(text) |
|
text = lemmatize(text) |
|
|
|
|
|
try: |
|
return lyx_pipe(text) |
|
except Exception as e: |
|
raise HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
ngrok_tunnel = ngrok.connect(7860) |
|
print('Public URL:', ngrok_tunnel.public_url) |
|
|
|
|
|
nest_asyncio.apply() |
|
|
|
|
|
import uvicorn |
|
uvicorn.run(app, port=7860) |
|
|