Upload app.py
Browse files
app.py
CHANGED
@@ -10,7 +10,10 @@ from huggingface_hub import CommitScheduler
|
|
10 |
from pathlib import Path
|
11 |
from langchain.embeddings import SentenceTransformerEmbeddings
|
12 |
from langchain.vectorstores import Chroma
|
13 |
-
from langchain.llms import OpenAI
|
|
|
|
|
|
|
14 |
|
15 |
# Create Client
|
16 |
import os
|
@@ -18,7 +21,11 @@ os.environ['OPENAI_API_KEY'] = "gl-U2FsdGVkX1+0bNWD6YsVLZUYsn0m1WfLxUzrP0xUFbtW
|
|
18 |
os.environ["OPENAI_BASE_URL"] = "https://aibe.mygreatlearning.com/openai/v1" # e.g. "https://aibe.mygreatlearning.com/openai/v1";
|
19 |
|
20 |
model_name = 'gpt-4o-mini' # e.g. 'gpt-3.5-turbo'
|
21 |
-
llm_client = OpenAI()
|
|
|
|
|
|
|
|
|
22 |
|
23 |
# Define the embedding model and the vectorstore
|
24 |
embedding_model = SentenceTransformerEmbeddings(model_name='thenlper/gte-large')
|
@@ -111,11 +118,14 @@ def llm_query(user_input,company):
|
|
111 |
|
112 |
# Get response from the LLM
|
113 |
try:
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
|
|
|
|
|
|
119 |
|
120 |
llm_response = response.choices[0].message.content.strip()
|
121 |
|
|
|
10 |
from pathlib import Path
|
11 |
from langchain.embeddings import SentenceTransformerEmbeddings
|
12 |
from langchain.vectorstores import Chroma
|
13 |
+
# from langchain.llms import OpenAI
|
14 |
+
|
15 |
+
from langchain_openai import ChatOpenAI
|
16 |
+
from langchain.schema import HumanMessage, AIMessage, SystemMessage
|
17 |
|
18 |
# Create Client
|
19 |
import os
|
|
|
21 |
os.environ["OPENAI_BASE_URL"] = "https://aibe.mygreatlearning.com/openai/v1" # e.g. "https://aibe.mygreatlearning.com/openai/v1";
|
22 |
|
23 |
model_name = 'gpt-4o-mini' # e.g. 'gpt-3.5-turbo'
|
24 |
+
# llm_client = OpenAI()
|
25 |
+
# Initialize the ChatOpenAI model
|
26 |
+
llm = ChatOpenAI(model_name=model_name, temperature=0) # Set temperature to 0 for deterministic output
|
27 |
+
# Create a HumanMessage
|
28 |
+
user_message = HumanMessage(content="What's the weather like today?")
|
29 |
|
30 |
# Define the embedding model and the vectorstore
|
31 |
embedding_model = SentenceTransformerEmbeddings(model_name='thenlper/gte-large')
|
|
|
118 |
|
119 |
# Get response from the LLM
|
120 |
try:
|
121 |
+
# Call the chat model with the message
|
122 |
+
response = llm([prompt])
|
123 |
+
|
124 |
+
# response = llm_client.chat.completions.create(
|
125 |
+
# model=model_name,
|
126 |
+
# messages=prompt,
|
127 |
+
# temperature=0
|
128 |
+
# )
|
129 |
|
130 |
llm_response = response.choices[0].message.content.strip()
|
131 |
|