Llama.ui / app.py
Yahir's picture
Update app.py
aed9fa3
# -*- coding: utf-8 -*-
import streamlit as st
import subprocess
def run_llama(input):
output = subprocess.check_output(['./main', '-m', 'ggml-vicuna-7b-1.1-q4_0.bin', '-p', "User: "+ input + ". Assistant:"])
output_str = str(output.decode('utf-8'))
response = output_str.split("Asistente:")[-1].strip()
return response
st.title("Llama Model")
input_text = st.text_input("Input Text", "")
if st.button("Run"):
output = run_llama(input_text)
st.write(output)