padlet-summary / app.py
hlydecker's picture
Update app.py
c0529d2
raw
history blame
No virus
7.52 kB
import gradio as gr
import subprocess
import json
import requests
from bs4 import BeautifulSoup
"""
General helper functions
"""
def strip_html_tags(html_text):
# Use BeautifulSoup to parse and clean HTML content
soup = BeautifulSoup(html_text, 'html.parser')
return soup.get_text()
"""
Padlet API Interactions
"""
def api_call(input_text):
#TODO: Refactor to be one function that can get OR post
curl_command = [
'curl', '-s', '--request', 'GET',
'--url', f"https://api.padlet.dev/v1/boards/{board_id}?include=posts%2Csections",
'--header', 'X-Api-Key: pdltp_0e380a0de1ff32d77b12dbcc030b1373199b7525681ddc81bd1b9ef3e4e3dd49577a23',
'--header', 'accept: application/vnd.api+json'
]
try:
response = subprocess.check_output(curl_command, universal_newlines=True)
response_data = json.loads(response)
# Extract the contents of all posts, stripping HTML tags from bodyHtml
posts_data = response_data.get("included", [])
post_contents = []
for post in posts_data:
if post.get("type") == "post":
attributes = post.get("attributes", {}).get("content", {})
subject = attributes.get("subject", "")
body_html = attributes.get("bodyHtml", "")
if subject:
post_content = f"Subject: {subject}"
if body_html:
cleaned_body = strip_html_tags(body_html)
post_content += f"\nBody Text: {cleaned_body}"
post_contents.append(post_content)
return "\n\n".join(post_contents) if post_contents else "No post contents found."
except subprocess.CalledProcessError:
return "Error: Unable to fetch data using cURL."
def create_post(board_id, post_content):
curl_command = [
'curl', '-s', '--request', 'POST',
'--url', f"https://api.padlet.dev/v1/boards/{board_id}/posts",
'--header', 'X-Api-Key: pdltp_0e380a0de1ff32d77b12dbcc030b1373199b7525681ddc81bd1b9ef3e4e3dd49577a23',
'--header', 'accept: application/vnd.api+json',
'--header', 'content-type: application/vnd.api+json',
'--data',
json.dumps({
"data": {
"type": "post",
"attributes": {
"content": {
"subject": post_content
}
}
}
})
]
try:
response = subprocess.check_output(curl_command, universal_newlines=True)
response_data = json.loads(response)
return "Post created successfully."
except subprocess.CalledProcessError as e:
return f"Error: Unable to create post - {str(e)}"
"""
LLM Functions
"""
#Streaming endpoint
API_URL = "https://api.openai.com/v1/chat/completions" #os.getenv("API_URL") + "/generate_stream"
#Inference function
def predict(openai_gpt4_key, system_msg, api_result, top_p, temperature, chat_counter, chatbot=[], history=[]):
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {openai_gpt4_key}" #Users will provide their own OPENAI_API_KEY
}
print(f"system message is ^^ {system_msg}")
if system_msg.strip() == '':
initial_message = [{"role": "user", "content": f"{inputs}"},]
multi_turn_message = []
else:
initial_message= [{"role": "system", "content": system_msg},
{"role": "user", "content": f"{inputs}"},]
multi_turn_message = [{"role": "system", "content": system_msg},]
if chat_counter == 0 :
payload = {
"model": "gpt-4",
"messages": initial_message ,
"temperature" : 1.0,
"top_p":1.0,
"n" : 1,
"stream": True,
"presence_penalty":0,
"frequency_penalty":0,
}
print(f"chat_counter - {chat_counter}")
else: #if chat_counter != 0 :
messages=multi_turn_message # Of the type of - [{"role": "system", "content": system_msg},]
for data in chatbot:
user = {}
user["role"] = "user"
user["content"] = data[0]
assistant = {}
assistant["role"] = "assistant"
assistant["content"] = data[1]
messages.append(user)
messages.append(assistant)
temp = {}
temp["role"] = "user"
temp["content"] = inputs
messages.append(temp)
#messages
payload = {
"model": "gpt-4",
"messages": messages, # Of the type of [{"role": "user", "content": f"{inputs}"}],
"temperature" : temperature, #1.0,
"top_p": top_p, #1.0,
"n" : 1,
"stream": True,
"presence_penalty":0,
"frequency_penalty":0,}
chat_counter+=1
history.append(inputs)
print(f"Logging : payload is - {payload}")
# make a POST request to the API endpoint using the requests.post method, passing in stream=True
response = requests.post(API_URL, headers=headers, json=payload, stream=True)
print(f"Logging : response code - {response}")
token_counter = 0
partial_words = ""
counter=0
for chunk in response.iter_lines():
#Skipping first chunk
if counter == 0:
counter+=1
continue
# check whether each line is non-empty
if chunk.decode() :
chunk = chunk.decode()
# decode each line as response data is in bytes
if len(chunk) > 12 and "content" in json.loads(chunk[6:])['choices'][0]['delta']:
partial_words = partial_words + json.loads(chunk[6:])['choices'][0]["delta"]["content"]
if token_counter == 0:
history.append(" " + partial_words)
else:
history[-1] = partial_words
chat = [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2) ] # convert to tuples of list
token_counter+=1
yield chat, history, chat_counter, response # resembles {chatbot: chat, state: history}
#Resetting to blank
def reset_textbox():
return gr.update(value='')
#to set a component as visible=False
def set_visible_false():
return gr.update(visible=False)
#to set a component as visible=True
def set_visible_true():
return gr.update(visible=True)
# Define the Gradio interface
iface = gr.Interface(
fn=predict, # Use 'predict' as the function
inputs=[
gr.inputs.Textbox(label="OpenAI GPT4 Key", type="password", placeholder="sk.."),
gr.inputs.Textbox(label="System Message", default=""),
gr.inputs.Textbox(label="Input Board ID for api_call"),
gr.inputs.Textbox(label="Output Board ID for create_post"),
],
outputs=gr.outputs.Textbox(label="Summary"),
live=True,
title="Padlet API Caller with cURL and LLM",
description="Enter OpenAI GPT4 key, system message, input board ID for api_call, and output board ID for create_post",
)
# Add event handlers to call 'api_call' and 'create_post' when the "Generate Summary" and "Post Summary" buttons are clicked
iface.inputs[4].submit(api_call, [gr.inputs.Textbox])
iface.inputs[4].click(api_call, [gr.inputs.Textbox])
iface.inputs[5].submit(create_post, [gr.inputs.Textbox, gr.outputs.Textbox])
iface.inputs[5].click(create_post, [gr.inputs.Textbox, gr.outputs.Textbox])
iface.launch()