Mbonea commited on
Commit
e046093
·
2 Parent(s): bdaa2e2 3a8352e

Merge branch 'main' of https://huggingface.co/spaces/bla/tranny

Browse files
App/Chat/utils/RAG.py CHANGED
@@ -7,6 +7,7 @@ from App.Embedding.utils.Initialize import search
7
 
8
  PALM_API = ""
9
  API_KEY = os.environ.get("PALM_API", PALM_API)
 
10
  palm.configure(api_key=API_KEY)
11
 
12
 
@@ -61,13 +62,14 @@ class GenerativeAIAssistant:
61
  async def generate_message(self, messages, task_id="ok"):
62
  user_message = messages[-1]
63
  # latest_message = messages[-1]["parts"][0]["text"]
64
- latest_message = user_message["content"]
65
- response = {
66
- "content": self.generate_template(latest_message, task_id),
67
- "role": "assistant",
 
68
  }
69
  # user_message["parts"][0]["text"] = latest_message
70
- messages.append(response)
71
  # url = f'https://generativelanguage.googleapis.com/v1beta/models/{self.model}:generateContent?key={self.api_key}'
72
  url = "https://api.groq.com/openai/v1/chat/completions"
73
  payload = {
@@ -117,7 +119,7 @@ class GenerativeAIAssistant:
117
 
118
  async with aiohttp.ClientSession() as session:
119
  async with session.post(
120
- url, json=payload, headers={"Content-Type": "application/json"}
121
  ) as response:
122
  try:
123
  temp = await response.json()
 
7
 
8
  PALM_API = ""
9
  API_KEY = os.environ.get("PALM_API", PALM_API)
10
+ GroqAPIKey = os.environ.get("Groq_API", "")
11
  palm.configure(api_key=API_KEY)
12
 
13
 
 
62
  async def generate_message(self, messages, task_id="ok"):
63
  user_message = messages[-1]
64
  # latest_message = messages[-1]["parts"][0]["text"]
65
+
66
+ user_message["content"]=self.generate_template(user_message["content"], task_id)
67
+ headers = {
68
+ "Content-Type": "application/json",
69
+ "Authorization": f"Bearer {GroqAPIKey}",
70
  }
71
  # user_message["parts"][0]["text"] = latest_message
72
+ messages[-1]=user_message
73
  # url = f'https://generativelanguage.googleapis.com/v1beta/models/{self.model}:generateContent?key={self.api_key}'
74
  url = "https://api.groq.com/openai/v1/chat/completions"
75
  payload = {
 
119
 
120
  async with aiohttp.ClientSession() as session:
121
  async with session.post(
122
+ url, json=payload, headers=headers
123
  ) as response:
124
  try:
125
  temp = await response.json()
App/Embedding/utils/Initialize.py CHANGED
@@ -1,6 +1,7 @@
1
  from langchain.embeddings import HuggingFaceEmbeddings
2
  from langchain.docstore.document import Document
3
  from langchain.vectorstores import Pinecone
 
4
  import pinecone
5
  import os
6
 
@@ -17,10 +18,11 @@ async def delete_documents(task_id):
17
  index_name = "transcript-bits"
18
  model_name = "thenlper/gte-base"
19
  embeddings = HuggingFaceEmbeddings(model_name=model_name)
 
20
 
21
 
22
- pinecone.init(api_key=PINECONE_API_KEY, environment=PINECONE_ENV)
23
- vector_index = pinecone.Index(index_name=index_name)
24
  docsearch = Pinecone.from_existing_index(index_name, embeddings)
25
 
26
  docsearch.delete(
@@ -63,14 +65,14 @@ def search(query: str, task_id: str):
63
  embeddings = HuggingFaceEmbeddings(model_name=model_name)
64
 
65
 
66
- pinecone.init(api_key=PINECONE_API_KEY, environment=PINECONE_ENV)
67
- vector_index = pinecone.Index(index_name=index_name)
68
  docsearch = Pinecone.from_existing_index(index_name, embeddings)
69
 
70
  filtering_conditions = {
71
  "task_id": {"$eq": task_id},
72
  }
73
- data =docsearch.similarity_search(query, k=10, filter=filtering_conditions)
74
  return [
75
  {"text": d.page_content, "start": d.metadata["start"], "end": d.metadata["end"]}
76
  for d in data
@@ -89,8 +91,8 @@ def encode(temp: list[Document]):
89
  embeddings = HuggingFaceEmbeddings(model_name=model_name)
90
 
91
 
92
- pinecone.init(api_key=PINECONE_API_KEY, environment=PINECONE_ENV)
93
- vector_index = pinecone.Index(index_name=index_name)
94
  docsearch = Pinecone.from_existing_index(index_name, embeddings)
95
  docsearch.add_documents(temp)
96
  # return embeddings.embed_documents(texts = [d.page_content for d in temp])
 
1
  from langchain.embeddings import HuggingFaceEmbeddings
2
  from langchain.docstore.document import Document
3
  from langchain.vectorstores import Pinecone
4
+ from pinecone import PodSpec
5
  import pinecone
6
  import os
7
 
 
18
  index_name = "transcript-bits"
19
  model_name = "thenlper/gte-base"
20
  embeddings = HuggingFaceEmbeddings(model_name=model_name)
21
+ spec = PodSpec()
22
 
23
 
24
+ pc=pinecone.Pinecone(api_key=PINECONE_API_KEY, environment=PINECONE_ENV,spec=spec)
25
+ vector_index = pc.Index(index_name)
26
  docsearch = Pinecone.from_existing_index(index_name, embeddings)
27
 
28
  docsearch.delete(
 
65
  embeddings = HuggingFaceEmbeddings(model_name=model_name)
66
 
67
 
68
+ pc=pinecone.Pinecone(api_key=PINECONE_API_KEY, environment=PINECONE_ENV)
69
+ vector_index = pc.Index(index_name)
70
  docsearch = Pinecone.from_existing_index(index_name, embeddings)
71
 
72
  filtering_conditions = {
73
  "task_id": {"$eq": task_id},
74
  }
75
+ data =docsearch.similarity_search(query, k=3, filter=filtering_conditions)
76
  return [
77
  {"text": d.page_content, "start": d.metadata["start"], "end": d.metadata["end"]}
78
  for d in data
 
91
  embeddings = HuggingFaceEmbeddings(model_name=model_name)
92
 
93
 
94
+ pc=pinecone.Pinecone(api_key=PINECONE_API_KEY, environment=PINECONE_ENV)
95
+ vector_index = pc.Index(index_name)
96
  docsearch = Pinecone.from_existing_index(index_name, embeddings)
97
  docsearch.add_documents(temp)
98
  # return embeddings.embed_documents(texts = [d.page_content for d in temp])
App/celery_config.py CHANGED
@@ -8,7 +8,7 @@ timezone = "Europe/Oslo"
8
  enable_utc = True
9
 
10
  broker_url = f"amqps://sjefunwo:[email protected]/sjefunwo"
11
- result_backend = f"db+postgresql+psycopg2://postgres:PkkneZrSFsnJR6B@db.vfhoydxvxuesxhrcdnmx.supabase.co:5432/postgres"
12
 
13
  # SSL/TLS and SNI configuration
14
  # broker_use_ssl = {
 
8
  enable_utc = True
9
 
10
  broker_url = f"amqps://sjefunwo:[email protected]/sjefunwo"
11
+ result_backend = f"db+postgresql+psycopg2://postgres.vfhoydxvxuesxhrcdnmx:PkkneZrSFsnJR6B@aws-0-us-west-1.pooler.supabase.com:5432/postgres"
12
 
13
  # SSL/TLS and SNI configuration
14
  # broker_use_ssl = {
requirements.txt CHANGED
@@ -1,14 +1,11 @@
1
  asyncpg==0.27.0
2
- click==8.1.3
3
  databases==0.7.0
4
  fastapi==0.92.0
5
- Flask==2.2.2
6
- greenlet==2.0.2
7
- itsdangerous==2.1.2
8
  orm==0.3.
9
  transformers
10
  faster-whisper
11
  aiofiles
 
12
  psycopg2-binary==2.9.5
13
  SQLAlchemy==1.4.46
14
  starlette==0.25.0
@@ -17,7 +14,6 @@ Werkzeug==2.2.2
17
  passlib # for password hashing
18
  pydantic[email]
19
  uvicorn==0.21.1
20
- gunicorn
21
  ujson
22
  yt-dlp
23
  psutil
@@ -30,15 +26,11 @@ python-multipart
30
  telethon
31
  fastapi-jwt-auth
32
  bcrypt
33
- asyncmy
34
  aiomysql
35
- pymongo
36
  sentence_transformers
37
- motor
38
- dnspython
39
  google-generativeai
40
  openai
41
  tiktoken
42
  langchain
43
- pinecone-client[grpc]
44
- poe-api-wrapper[proxy]
 
1
  asyncpg==0.27.0
 
2
  databases==0.7.0
3
  fastapi==0.92.0
 
 
 
4
  orm==0.3.
5
  transformers
6
  faster-whisper
7
  aiofiles
8
+ psycopg2
9
  psycopg2-binary==2.9.5
10
  SQLAlchemy==1.4.46
11
  starlette==0.25.0
 
14
  passlib # for password hashing
15
  pydantic[email]
16
  uvicorn==0.21.1
 
17
  ujson
18
  yt-dlp
19
  psutil
 
26
  telethon
27
  fastapi-jwt-auth
28
  bcrypt
 
29
  aiomysql
 
30
  sentence_transformers
 
 
31
  google-generativeai
32
  openai
33
  tiktoken
34
  langchain
35
+ mysqlclient
36
+ pinecone-client[grpc]