File size: 2,030 Bytes
59492a6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9d7be77
 
59492a6
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import openai
import urllib.parse
import streamlit as st

openai.api_key = "EMPTY" # Key is ignored and does not matter
openai.api_base = "http://34.132.127.197:8000/v1"

# Report issues
def raise_issue(e, model, prompt):
    issue_title = urllib.parse.quote("[bug] Hosted Gorilla: <Issue>")
    issue_body = urllib.parse.quote(f"Exception: {e}\nFailed model: {model}, for prompt: {prompt}")
    issue_url = f"https://github.com/ShishirPatil/gorilla/issues/new?assignees=&labels=hosted-gorilla&projects=&template=hosted-gorilla-.md&title={issue_title}&body={issue_body}"
    print(f"An exception has occurred: {e} \nPlease raise an issue here: {issue_url}")

# Query Gorilla server 
def get_gorilla_response(prompt="I would like to translate from English to French.", api_provider="Huggingface"):
  try:
    model = "gorilla-7b-hf-v0"
    if api_provider == "Huggingface":
      model = "gorilla-7b-hf-v0"
    if api_provider == "Torch Hub":
      model = "gorilla-7b-th-v0"
    if api_provider == "TensorFlow Hub":
      model = "gorilla-7b-tf-v0"

    completion = openai.ChatCompletion.create(
      model=model,
      messages=[{"role": "user", "content": prompt}]
    )
    return completion.choices[0].message.content
  except Exception as e:
    raise_issue(e, model, prompt)

st.title("Try Gorilla 🦍")
st.write("Large Language Model Connected with Massive APIs")

tab1, tab2 = st.tabs(["Intro", "Demo"])

with tab1:
    st.markdown("# What is Gorilla?")
    st.write('Gorilla is an advanced Large Language Model (LLM) designed to effectively interact with a wide range of APIs, enhancing the capabilities of LLMs in real-world applications.')
    

with tab2:
  col1, col2 = st.columns(2)
  with col1:
    api_provider = st.radio("Select an API Provider:", ("Huggingface", "Torch Hub", "TensorFlow Hub"))
  with col2:
    input = st.text_input("Ask here")
  
  if api_provider and input:
    if st.button("Run Gorilla"):
      with st.spinner('Loading...'):
        st.success(get_gorilla_response(input, api_provider))