from peft import PeftModel, PeftConfig
from transformers import AutoModelForCausalLM

config = PeftConfig.from_pretrained("ameerazam08/Mistral-7B-v0.1-Hin-Eng-1000")
model = AutoModelForCausalLM.from_pretrained("mistralai/Mistral-7B-v0.1")
model = PeftModel.from_pretrained(model, "ameerazam08/Mistral-7B-v0.1-Hin-Eng-1000")
  1. Know More About Mistral here.).

Result-inference-code

import torch
from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
import warnings
import glob

warnings.filterwarnings("ignore")

base_model_id = "mistralai/Mistral-7B-v0.1"
bnb_config = BitsAndBytesConfig(
    load_in_4bit=True,
    bnb_4bit_use_double_quant=True,
    bnb_4bit_quant_type="nf4",
    bnb_4bit_compute_dtype=torch.bfloat16
)

base_model = AutoModelForCausalLM.from_pretrained(
    base_model_id,
    quantization_config=bnb_config,
    device_map="auto",
    trust_remote_code=True,
    use_auth_token=True
)

tokenizer = AutoTokenizer.from_pretrained(base_model_id, trust_remote_code=True, padding_side='left')  # <-- CHANGE MADE HERE
tokenizer.pad_token = tokenizer.eos_token

from peft import PeftModel
ft_model = PeftModel.from_pretrained(base_model, "Peft_model-Path-or-Local-path")
prefix = "translate Hindi to English: "
eval_prompt = prefix+"वह एक बड़ी गाड़ी चाहता है।,मैं भारत घूमना चाहता हूँ।,मुझे कुछ पैसे चाहिए।"
# eval_prompt = "Translate in Hindi: I am good "
model_input = tokenizer(eval_prompt, return_tensors="pt").to("cuda")

ft_model.eval()
with torch.no_grad():
    print(tokenizer.decode(ft_model.generate(**model_input, max_new_tokens=40, pad_token_id=2, repetition_penalty=1.3)[0], skip_special_tokens=True))



# translate Hindi to English: मैं भारत घूमना चाहता हूँ।. I want to go to India.
# translate Hindi to English: वह एक बड़ी गाड़ी चाहता है।,मैं भारत घूमना चाहता हूँ।,मुझे कुछ पैसे चाहिए।   He wants a bigger car. I want to go around India. I need some money.
# translate Hindi to English: मैं भारत घूमना चाहता हूँ।. I want to go to India.
# translate Hindi to English: वह एक बड़ी गाड़ी चाहता है।,मैं भारत घूमना चाहता हूँ।,मुझे कुछ पैसे चाहिए।   He wants a bigger car. I want to go around India. I need some money.
Downloads last month

-

Downloads are not tracked for this model. How to track
Inference Providers NEW
This model is not currently available via any of the supported Inference Providers.
The model cannot be deployed to the HF Inference API: The model has no pipeline_tag.

Dataset used to train ameerazam08/Mistral-7B-v0.1-Hin-Eng-1000