Code Fix for Inference Example
Browse filesModel_name was set as a string and not being properly used as a variable. Had to make the change in my code after copy and pasting, making the change so other don't have to.
README.md
CHANGED
@@ -68,11 +68,11 @@ device = "cuda"
|
|
68 |
model_name = "arcee-ai/Llama-3-SEC"
|
69 |
|
70 |
model = AutoModelForCausalLM.from_pretrained(
|
71 |
-
|
72 |
torch_dtype="auto",
|
73 |
device_map="auto"
|
74 |
)
|
75 |
-
tokenizer = AutoTokenizer.from_pretrained(
|
76 |
|
77 |
prompt = "What are the key regulatory considerations for a company planning to conduct an initial public offering (IPO) in the United States?"
|
78 |
messages = [
|
|
|
68 |
model_name = "arcee-ai/Llama-3-SEC"
|
69 |
|
70 |
model = AutoModelForCausalLM.from_pretrained(
|
71 |
+
model_name,
|
72 |
torch_dtype="auto",
|
73 |
device_map="auto"
|
74 |
)
|
75 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
76 |
|
77 |
prompt = "What are the key regulatory considerations for a company planning to conduct an initial public offering (IPO) in the United States?"
|
78 |
messages = [
|