SqlParser / app.py
LegendaryToe's picture
cp
c07c602
# import streamlit as st
# from transformers import pipeline
# # Load the SQLCoder model
# sql_generator = pipeline('text-generation', model='defog/sqlcoder')
# st.title('SQL Table Extractor')
# # Text input for SQL query
# user_sql = st.text_input("Enter your SQL statement", "SELECT * FROM my_table WHERE condition;")
# # Button to parse SQL
# if st.button('Extract Tables'):
# # Generate SQL or parse directly
# results = sql_generator(user_sql)
# # Assuming results contain SQL, extract table names (this part may require custom logic based on output)
# tables = extract_tables_from_sql(results)
# # Display extracted table names
# st.write('Extracted Tables:', tables)
# def extract_tables_from_sql(sql):
# # Dummy function: Implement logic to parse table names from SQL
# return ["my_table"] # Example output
# import streamlit as st
# from transformers import pipeline
# # Load the NER model
# ner = pipeline("ner", model="dbmdz/bert-large-cased-finetuned-conll03-english", grouped_entities=True)
# st.title('Hello World NER Parser')
# # User input for text
# user_input = st.text_area("Enter a sentence to parse for named entities:", "John Smith lives in San Francisco.")
# # Parse entities
# if st.button('Parse'):
# entities = ner(user_input)
# # Display extracted entities
# for entity in entities:
# st.write(f"Entity: {entity['word']}, Entity Type: {entity['entity_group']}")
import streamlit as st
from transformers import pipeline
# Load a smaller LLaMA model with permission to run custom code
text_generator = pipeline("text-generation", model="microsoft/Phi-3-mini-128k-instruct", trust_remote_code=True)
st.title('General Query Answerer')
# User input for a general question
user_query = st.text_area("Enter your question:", "Name all 50 US states.")
# Generate answer
if st.button('Answer Question'):
answer = text_generator(user_query, max_length=150)[0]['generated_text']
# Display the answer
st.write('Answer:', answer)