Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -38,10 +38,10 @@ def compute_embeddings(text_chunks):
|
|
38 |
messages=[{"role": "user", "content": chunk}],
|
39 |
model="llama3-70b-8192"
|
40 |
)
|
41 |
-
#
|
42 |
-
|
43 |
-
|
44 |
-
embeddings.append(
|
45 |
return np.array(embeddings)
|
46 |
|
47 |
# Function to build FAISS index
|
@@ -62,7 +62,8 @@ def generate_professional_content_groq(topic):
|
|
62 |
messages=[{"role": "user", "content": f"Explain '{topic}' in bullet points, highlighting key concepts, examples, and applications for electrical engineering students."}],
|
63 |
model="llama3-70b-8192"
|
64 |
)
|
65 |
-
|
|
|
66 |
|
67 |
# Function to compute query embedding using Groq's Llama3-70B-8192 model
|
68 |
def compute_query_embedding(query):
|
@@ -70,9 +71,9 @@ def compute_query_embedding(query):
|
|
70 |
messages=[{"role": "user", "content": query}],
|
71 |
model="llama3-70b-8192"
|
72 |
)
|
73 |
-
#
|
74 |
-
|
75 |
-
return np.fromstring(
|
76 |
|
77 |
# Streamlit app
|
78 |
st.title("Generative AI for Electrical Engineering Education with FAISS and Groq")
|
|
|
38 |
messages=[{"role": "user", "content": chunk}],
|
39 |
model="llama3-70b-8192"
|
40 |
)
|
41 |
+
# Access the embedding content from the response
|
42 |
+
embedding = response.choices[0].message.content
|
43 |
+
embedding_array = np.fromstring(embedding, sep=",") # Convert string to NumPy array
|
44 |
+
embeddings.append(embedding_array)
|
45 |
return np.array(embeddings)
|
46 |
|
47 |
# Function to build FAISS index
|
|
|
62 |
messages=[{"role": "user", "content": f"Explain '{topic}' in bullet points, highlighting key concepts, examples, and applications for electrical engineering students."}],
|
63 |
model="llama3-70b-8192"
|
64 |
)
|
65 |
+
# Access content from the response
|
66 |
+
return response.choices[0].message.content.strip()
|
67 |
|
68 |
# Function to compute query embedding using Groq's Llama3-70B-8192 model
|
69 |
def compute_query_embedding(query):
|
|
|
71 |
messages=[{"role": "user", "content": query}],
|
72 |
model="llama3-70b-8192"
|
73 |
)
|
74 |
+
# Access embedding content and convert it to a NumPy array
|
75 |
+
embedding = response.choices[0].message.content
|
76 |
+
return np.fromstring(embedding, sep=",").reshape(1, -1)
|
77 |
|
78 |
# Streamlit app
|
79 |
st.title("Generative AI for Electrical Engineering Education with FAISS and Groq")
|