File size: 2,232 Bytes
80d3afc
9622700
be95987
80d3afc
 
 
8dad10b
 
25dfad2
6919ace
85e8517
241921c
5c795c7
 
 
6919ace
ace8c2c
9fc038c
7de5168
9fc038c
 
8e569e2
9fc038c
 
94ff692
360f345
 
9fc038c
7a49355
336cc86
241921c
563878a
80d3afc
360f345
f45a806
 
cf215da
22ebce8
cf215da
22a1dd5
cf215da
 
 
 
 
5dfb35a
cf215da
a84efc1
 
22a1dd5
4cd17a7
cf215da
22a1dd5
cf215da
aa69ed9
80d3afc
d22e91b
a4610d0
3416cce
80d3afc
cf215da
 
9fc038c
80d3afc
c4615f5
3416cce
cf215da
80d3afc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import gradio as gr
import json, os, vertexai

from dotenv import load_dotenv, find_dotenv
_ = load_dotenv(find_dotenv())

credentials = "BringYourOwn" #os.environ["CREDENTIALS"]
project = "BringYourOwn" #os.environ["PROJECT"]

config = {
    "max_output_tokens": 1000,
    "model": "gemini-pro",
    "temperature": 0.1,
    "top_k": 40,
    "top_p": 1.0,
}

#credentials = json.loads(credentials)

#from google.oauth2 import service_account
#credentials = service_account.Credentials.from_service_account_info(credentials)

#if credentials.expired:
#    credentials.refresh(Request())

vertexai.init(project = project, 
              location = "us-central1",
              credentials = "" #credentials
             )

from vertexai.preview.generative_models import GenerativeModel
generation_model = GenerativeModel(config["model"])

def invoke(prompt):
    if (prompt == ""):
        raise gr.Error("Prompt is required.")

    completion = ""
    
    try:
        completion = generation_model.generate_content(prompt,
                                                       generation_config = {
                                                           "max_output_tokens": config["max_output_tokens"],
                                                           "temperature": config["temperature"],
                                                           "top_k": config["top_k"],
                                                           "top_p": config["top_p"]})
        
        if (completion.text != None):
            completion = completion.text
    except Exception as e:
        completion = e
        
        raise gr.Error(e)
    
    return completion

description = """<a href='https://www.gradio.app/'>Gradio</a> UI using the <a href='https://cloud.google.com/vertex-ai'>Google Vertex AI</a> API 
                 with Gemini Pro model. Clone and bring your own credentials."""

gr.close_all()

demo = gr.Interface(fn = invoke, 
                    inputs = [gr.Textbox(label = "Prompt", value = "hi", lines = 1)],
                    outputs = [gr.Textbox(label = "Completion", lines = 1)],
                    title = "Generative AI - LLM",
                    description = description)

demo.launch()