Spaces:
Sleeping
Sleeping
bainskarman
commited on
Commit
•
fb6c4a2
1
Parent(s):
c7996a1
Update model.py
Browse files
model.py
CHANGED
@@ -1,8 +1,6 @@
|
|
1 |
-
from transformers import pipeline
|
2 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
3 |
import os
|
4 |
|
5 |
-
)
|
6 |
def modelFeedback(ats_score, resume_data, job_description):
|
7 |
"""
|
8 |
Generate ATS feedback by utilizing a pre-configured pipeline.
|
@@ -30,12 +28,10 @@ def modelFeedback(ats_score, resume_data, job_description):
|
|
30 |
|
31 |
#### Resume Data: {resume_data}
|
32 |
#### Job Description: {job_description}
|
33 |
-
|
34 |
-
|
35 |
|
36 |
# Load the tokenizer and model
|
37 |
huggingface_token = os.environ.get("KEY2")
|
38 |
-
|
39 |
tokenizer = AutoTokenizer.from_pretrained(
|
40 |
"meta-llama/Llama-3.2-1B",
|
41 |
use_auth_token=huggingface_token
|
@@ -43,8 +39,8 @@ def modelFeedback(ats_score, resume_data, job_description):
|
|
43 |
model = AutoModelForCausalLM.from_pretrained(
|
44 |
"meta-llama/Llama-3.2-1B",
|
45 |
use_auth_token=huggingface_token
|
46 |
-
|
47 |
-
|
48 |
try:
|
49 |
# Tokenize the input
|
50 |
input_ids = tokenizer.encode(input_prompt, return_tensors="pt")
|
|
|
|
|
1 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
2 |
import os
|
3 |
|
|
|
4 |
def modelFeedback(ats_score, resume_data, job_description):
|
5 |
"""
|
6 |
Generate ATS feedback by utilizing a pre-configured pipeline.
|
|
|
28 |
|
29 |
#### Resume Data: {resume_data}
|
30 |
#### Job Description: {job_description}
|
31 |
+
"""
|
|
|
32 |
|
33 |
# Load the tokenizer and model
|
34 |
huggingface_token = os.environ.get("KEY2")
|
|
|
35 |
tokenizer = AutoTokenizer.from_pretrained(
|
36 |
"meta-llama/Llama-3.2-1B",
|
37 |
use_auth_token=huggingface_token
|
|
|
39 |
model = AutoModelForCausalLM.from_pretrained(
|
40 |
"meta-llama/Llama-3.2-1B",
|
41 |
use_auth_token=huggingface_token
|
42 |
+
)
|
43 |
+
|
44 |
try:
|
45 |
# Tokenize the input
|
46 |
input_ids = tokenizer.encode(input_prompt, return_tensors="pt")
|