PierreBrunelle commited on
Commit
77591ae
1 Parent(s): 2663a60

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -38
app.py CHANGED
@@ -50,7 +50,6 @@ def create_prompt(top_k_list: list[dict], question: str) -> str:
50
 
51
  {question}'''
52
 
53
- """Gradio Application"""
54
  def process_files(ground_truth_file, pdf_files):
55
  # Process ground truth file
56
  if ground_truth_file.name.endswith('.csv'):
@@ -108,44 +107,21 @@ def process_files(ground_truth_file, pdf_files):
108
  }
109
  ]
110
 
111
- def query_llm(question):
112
-
113
- # Add OpenAI response column
114
  queries_t['response'] = openai.chat_completions(
115
  model='gpt-4-0125-preview', messages=messages
116
  )
 
117
  queries_t['answer'] = queries_t.response.choices[0].message.content
118
 
119
- return "Files processed successfully!"
120
-
121
- # Perform top-k lookup
122
- context = chunks_t.top_k(question).collect()
123
-
124
- # Create prompt
125
- prompt = create_prompt(context, question)
126
-
127
- # Prepare messages for OpenAI
128
- messages = [
129
- {
130
- 'role': 'system',
131
- 'content': 'Please read the following passages and answer the question based on their contents.'
132
- },
133
- {
134
- 'role': 'user',
135
- 'content': prompt
136
- }
137
- ]
138
-
139
- # Get LLM response
140
- response = openai.chat_completions(model='gpt-4-0125-preview', messages=messages)
141
- answer = response.choices[0].message.content
142
 
143
- # Add new row to queries_t
144
- new_row = {'Question': question, 'answer': answer}
145
- queries_t.insert([new_row])
146
 
147
- # Return updated dataframe
148
- return queries_t.select(queries_t.Question, queries_t.answer).collect()
149
 
150
  # Gradio interface
151
  with gr.Blocks() as demo:
@@ -156,15 +132,14 @@ with gr.Blocks() as demo:
156
  pdf_files = gr.File(label="Upload PDF Documents", file_count="multiple")
157
 
158
  process_button = gr.Button("Process Files")
159
- process_output = gr.Textbox(label="Processing Output")
160
 
161
- question_input = gr.Textbox(label="Enter your question")
162
- query_button = gr.Button("Query LLM")
163
 
164
- output_dataframe = gr.Dataframe(label="LLM Outputs")
 
165
 
166
- process_button.click(process_files, inputs=[ground_truth_file, pdf_files], outputs=process_output)
167
- query_button.click(query_llm, inputs=question_input, outputs=output_dataframe)
168
 
169
  if __name__ == "__main__":
170
  demo.launch()
 
50
 
51
  {question}'''
52
 
 
53
  def process_files(ground_truth_file, pdf_files):
54
  # Process ground truth file
55
  if ground_truth_file.name.endswith('.csv'):
 
107
  }
108
  ]
109
 
110
+ # Add OpenAI response column
 
 
111
  queries_t['response'] = openai.chat_completions(
112
  model='gpt-4-0125-preview', messages=messages
113
  )
114
+
115
  queries_t['answer'] = queries_t.response.choices[0].message.content
116
 
117
+ df_output = queries_t.select(queries_t.Question, queries_t.correct_answer, queries_t.answer).collect().to_pandas()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
118
 
119
+ try:
120
+ #Display content
121
+ return df_output
122
 
123
+ except Exception as e:
124
+ return f"An error occurred: {str(e)}", None
125
 
126
  # Gradio interface
127
  with gr.Blocks() as demo:
 
132
  pdf_files = gr.File(label="Upload PDF Documents", file_count="multiple")
133
 
134
  process_button = gr.Button("Process Files")
 
135
 
136
+ df_output = gr.DataFrame(label="Pixeltable Table")
 
137
 
138
+ #question_input = gr.Textbox(label="Enter your question")
139
+ #query_button = gr.Button("Query LLM")
140
 
141
+ process_button.click(process_files, inputs=[ground_truth_file, pdf_files], outputs=df_output)
142
+ #query_button.click(query_llm, inputs=question_input, outputs=output_dataframe)
143
 
144
  if __name__ == "__main__":
145
  demo.launch()