import pytorch from transformers import MllamaForConditionalGeneration, AutoProcessor # Initialize Llama model model_id = "meta-llama/Llama-3.2-11B-Vision-Instruct" model = MllamaForConditionalGeneration.from_pretrained( model_id, torch_dtype=torch.bfloat16, device_map="auto", ) processor = AutoProcessor.from_pretrained(model_id) def generate_text(prompt, max_new_tokens=200): messages = [ {"role": "user", "content": [ {"type": "text", "text": prompt} ]} ] input_text = processor.apply_chat_template(messages, add_generation_prompt=True) inputs = processor( input_text, add_special_tokens=False, return_tensors="pt" ).to(model.device) output = model.generate(**inputs, max_new_tokens=max_new_tokens) return processor.decode(output[0], skip_special_tokens=True) def generate_company_profile(user_data): prompt = f""" Generate a concise company profile based on the following information: Project Description: {user_data['project_description']} Industry: {user_data['industry']} Target Market: {user_data['market']} Location: {user_data['location']} Founders: {', '.join([f['name'] for f in user_data['founders_info']])} Company Profile: """ return generate_text(prompt, max_new_tokens=200) def calculate_fundraising_score(user_data): prompt = f""" Based on the following company information, provide a fundraising probability score between 0 and 100: Project Description: {user_data['project_description']} Industry: {user_data['industry']} Target Market: {user_data['market']} Location: {user_data['location']} Number of Founders: {len(user_data['founders_info'])} Fundraising Probability Score (0-100): """ response = generate_text(prompt, max_new_tokens=10) try: score = int(response.strip()) return max(0, min(100, score)) # Ensure the score is between 0 and 100 except ValueError: return 50 # Default score if parsing fails def generate_recommendations(user_data): prompt = f""" Based on the following company information, provide 3-5 recommendations to improve fundraising success: Project Description: {user_data['project_description']} Industry: {user_data['industry']} Target Market: {user_data['market']} Location: {user_data['location']} Number of Founders: {len(user_data['founders_info'])} Recommendations: """ response = generate_text(prompt, max_new_tokens=300) recommendations = response.strip().split('\n') return [rec.strip() for rec in recommendations if rec.strip()]