Phi-1.5-RLLMv3-10 / generate.py
migueldeguzmandev's picture
Upload 15 files
12db7d3 verified
raw
history blame contribute delete
No virus
1.22 kB
from transformers import AutoModelForCausalLM, AutoTokenizer
def main():
# Load the fine-tuned model and tokenizer
model_output_dir = "/Users/migueldeguzman/Desktop/gpt2xl_algos/phi-1.5/v10/" # Replace with your fine-tuned model directory
tokenizer = AutoTokenizer.from_pretrained(model_output_dir)
model = AutoModelForCausalLM.from_pretrained(model_output_dir)
while True:
# User input for text generation prompt
prompt = input("Enter a prompt for text generation (or type 'exit' to quit): ")
if prompt.lower() == 'exit':
break
# Encode the prompt and generate text
input_ids = tokenizer.encode(prompt, return_tensors="pt")
output = model.generate(
input_ids,
max_length=1024,
num_return_sequences=1,
no_repeat_ngram_size=2,
do_sample=True,
top_k=50,
top_p=0.95,
temperature=0.000000001
)
# Decode and print the generated text
generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
print("Generated Text:")
print(generated_text)
if __name__ == "__main__":
main()