assignment / app /model_utils.py
sameer9's picture
Add FastAPI app and Docker configuration
e5b2387
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
def load_model_and_tokenizer(model_name: str):
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
return model, tokenizer
def generate_summary(prompt: str, model, tokenizer) -> str:
context_prompt = f"Provide a brief, informative article addressing the following mental health concern: {prompt}"
try:
inputs = tokenizer(context_prompt, return_tensors="pt", truncation=True, padding=True)
summary_ids = model.generate(inputs["input_ids"], max_length=150, min_length=40, length_penalty=2.0, num_beams=4, early_stopping=True)
summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True, clean_up_tokenization_spaces=True)
return summary
except Exception as e:
return str(e)