Update main.py
Browse files
main.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
from flask import Flask, jsonify, render_template, request, make_response
|
|
|
2 |
import transformers
|
3 |
from huggingface_hub import cached_download
|
4 |
import torch
|
@@ -12,29 +13,17 @@ from collections import OrderedDict
|
|
12 |
|
13 |
app = Flask(__name__)
|
14 |
|
15 |
-
# create a python dictionary for your models d = {<key>: <value>, <key>: <value>, ..., <key>: <value>}
|
16 |
-
model_url = "https://huggingface.co/nlptown/bert-base-multilingual-uncased-sentiment"
|
17 |
-
model_path = cached_download(model_url)
|
18 |
-
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
19 |
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
listOfKeys = []
|
26 |
-
for key in dictOfModels :
|
27 |
-
listOfKeys.append(key)
|
28 |
-
|
29 |
-
def get_prediction(message,model):
|
30 |
-
# inference
|
31 |
-
results = model(message)
|
32 |
-
return results
|
33 |
|
34 |
@app.route('/', methods=['GET'])
|
35 |
def get():
|
36 |
-
|
37 |
-
return
|
38 |
|
39 |
@app.route('/', methods=['POST'])
|
40 |
def predict():
|
|
|
1 |
from flask import Flask, jsonify, render_template, request, make_response
|
2 |
+
import requests
|
3 |
import transformers
|
4 |
from huggingface_hub import cached_download
|
5 |
import torch
|
|
|
13 |
|
14 |
app = Flask(__name__)
|
15 |
|
|
|
|
|
|
|
|
|
16 |
|
17 |
+
headers = {"Authorization": f"Bearer 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"}
|
18 |
+
API_URL = "https://api-inference.huggingface.co/models/nlptown/bert-base-multilingual-uncased-sentiment"
|
19 |
+
def query(payload):
|
20 |
+
response = requests.post(API_URL, headers=headers, json=payload)
|
21 |
+
return response.json()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
@app.route('/', methods=['GET'])
|
24 |
def get():
|
25 |
+
data = query({"inputs": "The movie is good"})
|
26 |
+
return data
|
27 |
|
28 |
@app.route('/', methods=['POST'])
|
29 |
def predict():
|