Update chatbot.py
Browse files- chatbot.py +3 -4
chatbot.py
CHANGED
@@ -10,8 +10,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfi
|
|
10 |
my_model_id = os.getenv('MODEL_REPO_ID', 'Default Value')
|
11 |
token = os.getenv('HUGGINGFACEHUB_API_TOKEN')
|
12 |
|
13 |
-
template = """
|
14 |
-
<<SYS>>
|
15 |
You are an AI having conversation with a human. Below is an instruction that describes a task.
|
16 |
Write a response that appropriately completes the request.
|
17 |
Reply with the most helpful and logic answer. During the conversation you need to ask the user
|
@@ -27,8 +26,8 @@ booking process.
|
|
27 |
Current conversation:
|
28 |
{history}
|
29 |
|
30 |
-
Human: {input}
|
31 |
-
AI:"""
|
32 |
|
33 |
#@st.cache_resource
|
34 |
def load_model():
|
|
|
10 |
my_model_id = os.getenv('MODEL_REPO_ID', 'Default Value')
|
11 |
token = os.getenv('HUGGINGFACEHUB_API_TOKEN')
|
12 |
|
13 |
+
template = """<s>[INST]<<SYS>>
|
|
|
14 |
You are an AI having conversation with a human. Below is an instruction that describes a task.
|
15 |
Write a response that appropriately completes the request.
|
16 |
Reply with the most helpful and logic answer. During the conversation you need to ask the user
|
|
|
26 |
Current conversation:
|
27 |
{history}
|
28 |
|
29 |
+
Human: {{{input}}}
|
30 |
+
AI: [/INST]"""
|
31 |
|
32 |
#@st.cache_resource
|
33 |
def load_model():
|