bevelapi / main.py
BeveledCube's picture
Update main.py
93a2217 verified
raw
history blame
1.33 kB
from flask import Flask, request, render_template, jsonify
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
model_name = "facebook/blenderbot-1B-distill"
# https://huggingface.co/models?sort=trending&search=facebook%2Fblenderbot
# facebook/blenderbot-3B
# facebook/blenderbot-1B-distill
# facebook/blenderbot-400M-distill
# facebook/blenderbot-90M
# facebook/blenderbot_small-90M
# https://www.youtube.com/watch?v=irjYqV6EebU
app = Flask("AI API")
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
@app.get("/")
def read_root():
return render_template("index.html")
@app.route("/test")
def test_route():
return "This is a test route."
@app.route("/api", methods=["POST"])
def receive_data():
data = request.get_json()
print("Prompt:", data["prompt"])
input_text = data["prompt"]
# Tokenize the input text
input_ids = tokenizer.encode(input_text, return_tensors="pt")
# Generate output using the model
output_ids = model.generate(input_ids, num_beams=5, no_repeat_ngram_size=2)
generated_text = tokenizer.decode(output_ids[0], skip_special_tokens=True)
answer_data = { "answer": generated_text }
print("Response:", generated_text)
return jsonify(answer_data)
app.run(host="0.0.0.0", port=25428, debug=False)