File size: 2,300 Bytes
25f2837
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import gradio as gr
from huggingface_hub import InferenceClient
import weaviate.classes as wvc
import weaviate
from weaviate.auth import AuthApiKey
import logging
import os
import requests
import json

"""
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""

logging.basicConfig(level=logging.INFO)

client = weaviate.connect_to_embedded(
    headers={
        "X-Huggingface-Api-Key": os.environ["HF_TOKEN"]  # Replace with your inference API key
    }
)

if client.is_ready():
    logging.info('')
    logging.info(f'Found {len(client.cluster.nodes())} Weaviate nodes.')
    logging.info('')
    for node in client.cluster.nodes():
        logging.info(node)
        logging.info('')

client.collections.delete_all()

questions = client.collections.create(
    name="Question",
    vectorizer_config=wvc.config.Configure.Vectorizer.text2vec_huggingface(wait_for_model=True),  
    generative_config=wvc.config.Configure.Generative.openai()  
)
resp = requests.get('https://raw.githubusercontent.com/databyjp/wv_demo_uploader/main/weaviate_datasets/data/jeopardy_1k.json')
data = json.loads(resp.text)  # Load data

question_objs = list()
for i, d in enumerate(data):
    question_objs.append({
        "answer": d["Answer"],
        "question": d["Question"],
        "category": d["Category"],
        "air_date": d["Air Date"],
        "round": d["Round"],
        "value": d["Value"]
})

logging.info('Importing Questions')
questions = client.collections.get("Question")
questions.data.insert_many(question_objs)
logging.info('Finished Importing Questions')

def respond(query):

    r = ""
    if client.is_ready():
      r = f'Found {len(client.cluster.nodes())} Weaviate nodes.'

    response = questions.query.near_text(
        query=query,
        limit=2
    )

    return response.objects[0].properties 

demo = gr.Interface(fn=respond, 
                    inputs=gr.Textbox(
                    label="Search the Jeopardy Vector Database",
                        info="Query:",
                    lines=1,
                    value="Guitar",
                    ), 
                    outputs="textbox"
                   )

if __name__ == "__main__":
    demo.launch()