davila7 commited on
Commit
59492a6
1 Parent(s): 8a58b71

try gorilla

Browse files
Files changed (2) hide show
  1. app.py +54 -0
  2. requirements.txt +2 -0
app.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import openai
2
+ import urllib.parse
3
+ import streamlit as st
4
+
5
+ openai.api_key = "EMPTY" # Key is ignored and does not matter
6
+ openai.api_base = "http://34.132.127.197:8000/v1"
7
+
8
+ # Report issues
9
+ def raise_issue(e, model, prompt):
10
+ issue_title = urllib.parse.quote("[bug] Hosted Gorilla: <Issue>")
11
+ issue_body = urllib.parse.quote(f"Exception: {e}\nFailed model: {model}, for prompt: {prompt}")
12
+ issue_url = f"https://github.com/ShishirPatil/gorilla/issues/new?assignees=&labels=hosted-gorilla&projects=&template=hosted-gorilla-.md&title={issue_title}&body={issue_body}"
13
+ print(f"An exception has occurred: {e} \nPlease raise an issue here: {issue_url}")
14
+
15
+ # Query Gorilla server
16
+ def get_gorilla_response(prompt="I would like to translate from English to French.", api_provider="Huggingface"):
17
+ try:
18
+ model = "gorilla-7b-hf-v0"
19
+ if api_provider == "Huggingface":
20
+ model = "gorilla-7b-hf-v0"
21
+ if api_provider == "Torch Hub":
22
+ model = "gorilla-7b-th-v0"
23
+ if api_provider == "TensorFlow Hub":
24
+ model = "gorilla-7b-tf-v0"
25
+
26
+ completion = openai.ChatCompletion.create(
27
+ model=model,
28
+ messages=[{"role": "user", "content": prompt}]
29
+ )
30
+ return completion.choices[0].message.content
31
+ except Exception as e:
32
+ raise_issue(e, model, prompt)
33
+
34
+ st.title("Try Gorilla 🦍")
35
+ st.write("Large Language Model Connected with Massive APIs")
36
+
37
+ tab1, tab2 = st.tabs(["Intro", "Demo"])
38
+
39
+ with tab1:
40
+ st.markdown("# What is Gorilla?")
41
+ st.markdown('### Gorilla is an advanced Large Language Model (LLM) designed to effectively interact with a wide range of APIs, enhancing the capabilities of LLMs in real-world applications.')
42
+ st.write('By using self-instruction and retrieval techniques, Gorilla excels at selecting and utilizing tools with overlapping and evolving functionalities. Evaluated using the comprehensive APIBench dataset, which includes HuggingFace, TorchHub, and TensorHub APIs, Gorilla surpasses the performance of GPT-4 in generating API calls. When paired with a document retrieval system, it showcases an impressive ability to adapt to changes in API documentation, bolstering the reliability and applicability of its outputs. To explore Gorilla’s potential, visit https://gorilla.cs.berkeley.edu for code, data, and a live demo.')
43
+
44
+ with tab2:
45
+ col1, col2 = st.columns(2)
46
+ with col1:
47
+ api_provider = st.radio("Select an API Provider:", ("Huggingface", "Torch Hub", "TensorFlow Hub"))
48
+ with col2:
49
+ input = st.text_input("Ask here")
50
+
51
+ if api_provider and input:
52
+ if st.button("Run Gorilla"):
53
+ with st.spinner('Loading...'):
54
+ st.success(get_gorilla_response(input, api_provider))
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ openai
2
+ streamlit