gizemsarsinlar commited on
Commit
63dbe21
1 Parent(s): 698d782

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +65 -52
app.py CHANGED
@@ -1,13 +1,25 @@
 
 
 
1
  import os
2
- import chainlit as cl
3
- import textract # For extracting text from documents
 
 
 
 
 
 
 
 
 
 
 
 
 
4
 
5
- # Function to extract text from uploaded documents
6
- async def extract_text_from_file(file_path):
7
- return textract.process(file_path).decode('utf-8')
8
 
9
- # Chat initialization
10
- @cl.on_chat_start
11
  async def start_chat():
12
  settings = {
13
  "model": "gpt-3.5-turbo",
@@ -17,51 +29,52 @@ async def start_chat():
17
  "frequency_penalty": 0,
18
  "presence_penalty": 0,
19
  }
 
20
  cl.user_session.set("settings", settings)
21
- await cl.Message(content="Welcome! Please upload a document to begin.").send()
22
 
23
- # Handling file upload
24
- @cl.on_message
25
  async def main(message: cl.Message):
26
- # Checking if there is an uploaded file
27
- if message.files:
28
- uploaded_file = message.files[0] # Accessing the uploaded file
29
- file_path = uploaded_file['path'] # Getting the path of the uploaded file
30
-
31
- # Extracting text from the uploaded file
32
- file_content = await extract_text_from_file(file_path)
33
-
34
- # Saving the content of the document in the user session
35
- cl.user_session.set("document_content", file_content)
36
-
37
- # Informing the user that the document was uploaded successfully
38
- await cl.Message(content=f"Document '{uploaded_file['name']}' uploaded successfully! You can now ask questions based on the document content.").send()
39
-
40
- else:
41
- document_content = cl.user_session.get("document_content", "")
42
- if not document_content:
43
- await cl.Message(content="Please upload a document first.").send()
44
- return
45
-
46
- settings = cl.user_session.get("settings")
47
- client = AsyncOpenAI()
48
-
49
- # Creating the prompt for OpenAI based on the document content and user query
50
- prompt = f"Document Content: {document_content}\n\nUser Query: {message.content}"
51
- msg = cl.Message(content="")
52
-
53
- # Sending prompt to OpenAI and streaming response
54
- async for stream_resp in await client.chat.completions.create(
55
- model=settings["model"],
56
- messages=[{"role": "system", "content": "Answer based on the provided document."},
57
- {"role": "user", "content": prompt}],
58
- stream=True,
59
- **settings,
60
- ):
61
- token = stream_resp.choices[0].delta.content
62
- if not token:
63
- token = ""
64
- await msg.stream_token(token)
65
-
66
- # Sending the final response
67
- await msg.send()
 
 
1
+ # You can find this code for Chainlit python streaming here (https://docs.chainlit.io/concepts/streaming/python)
2
+
3
+ # OpenAI Chat completion
4
  import os
5
+ from openai import AsyncOpenAI # importing openai for API usage
6
+ import chainlit as cl # importing chainlit for our app
7
+ from chainlit.prompt import Prompt, PromptMessage # importing prompt tools
8
+ from chainlit.playground.providers import ChatOpenAI # importing ChatOpenAI tools
9
+ from dotenv import load_dotenv
10
+
11
+ api_key = os.getenv("OPENAI_API_KEY")
12
+
13
+ # ChatOpenAI Templates
14
+ system_template = """You are a helpful assistant who always speaks in a pleasant tone!
15
+ """
16
+
17
+ user_template = """{input}
18
+ Think through your response step by step.
19
+ """
20
 
 
 
 
21
 
22
+ @cl.on_chat_start # marks a function that will be executed at the start of a user session
 
23
  async def start_chat():
24
  settings = {
25
  "model": "gpt-3.5-turbo",
 
29
  "frequency_penalty": 0,
30
  "presence_penalty": 0,
31
  }
32
+
33
  cl.user_session.set("settings", settings)
 
34
 
35
+
36
+ @cl.on_message # marks a function that should be run each time the chatbot receives a message from a user
37
  async def main(message: cl.Message):
38
+ settings = cl.user_session.get("settings")
39
+
40
+ client = AsyncOpenAI()
41
+
42
+ print(message.content)
43
+
44
+ prompt = Prompt(
45
+ provider=ChatOpenAI.id,
46
+ messages=[
47
+ PromptMessage(
48
+ role="system",
49
+ template=system_template,
50
+ formatted=system_template,
51
+ ),
52
+ PromptMessage(
53
+ role="user",
54
+ template=user_template,
55
+ formatted=user_template.format(input=message.content),
56
+ ),
57
+ ],
58
+ inputs={"input": message.content},
59
+ settings=settings,
60
+ )
61
+
62
+ print([m.to_openai() for m in prompt.messages])
63
+
64
+ msg = cl.Message(content="")
65
+
66
+ # Call OpenAI
67
+ async for stream_resp in await client.chat.completions.create(
68
+ messages=[m.to_openai() for m in prompt.messages], stream=True, **settings
69
+ ):
70
+ token = stream_resp.choices[0].delta.content
71
+ if not token:
72
+ token = ""
73
+ await msg.stream_token(token)
74
+
75
+ # Update the prompt object with the completion
76
+ prompt.completion = msg.content
77
+ msg.prompt = prompt
78
+
79
+ # Send and close the message stream
80
+ await msg.send()