pvanand commited on
Commit
12f7069
1 Parent(s): 95f11c6

add recommendations

Browse files
Files changed (1) hide show
  1. main.py +20 -2
main.py CHANGED
@@ -24,7 +24,7 @@ SysPromptMd = "You are an expert AI who can create a structured report using inf
24
  SysPromptMdOffline = "You are an expert AI who can create a structured report using your knowledge on user request.The report should be in markdown format consists of markdown tables/lists/paragraphs as needed, structured into subtopics. Do not add any additional comments."
25
 
26
  @retry(tries=3, delay=1)
27
- def together_response(message, model = "meta-llama/Llama-3-8b-chat-hf", SysPrompt = SysPromptDefault):
28
  base_url_groq = "https://api.groq.com/openai/v1"
29
  groq_model_name="llama3-8b-8192"
30
  client = OpenAI(base_url= base_url_groq, api_key= GROQ_API_KEY)
@@ -33,7 +33,7 @@ def together_response(message, model = "meta-llama/Llama-3-8b-chat-hf", SysPromp
33
  response = client.chat.completions.create(
34
  model=groq_model_name,
35
  messages=messages,
36
- temperature=0.2,
37
  )
38
  return response.choices[0].message.content
39
 
@@ -117,3 +117,21 @@ async def create_report(input: ReportInput):
117
  async def fetch_images(input: imageInput):
118
  images = get_images(input.user_input, input.num_images)
119
  return {"images": images}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  SysPromptMdOffline = "You are an expert AI who can create a structured report using your knowledge on user request.The report should be in markdown format consists of markdown tables/lists/paragraphs as needed, structured into subtopics. Do not add any additional comments."
25
 
26
  @retry(tries=3, delay=1)
27
+ def together_response(message, model = "meta-llama/Llama-3-8b-chat-hf", SysPrompt = SysPromptDefault,temperature=0.2):
28
  base_url_groq = "https://api.groq.com/openai/v1"
29
  groq_model_name="llama3-8b-8192"
30
  client = OpenAI(base_url= base_url_groq, api_key= GROQ_API_KEY)
 
33
  response = client.chat.completions.create(
34
  model=groq_model_name,
35
  messages=messages,
36
+ temperature=temperature,
37
  )
38
  return response.choices[0].message.content
39
 
 
117
  async def fetch_images(input: imageInput):
118
  images = get_images(input.user_input, input.num_images)
119
  return {"images": images}
120
+
121
+ @app.post("/get_recommendations")
122
+ async def generate_recommendations(input: imageInput):
123
+ images = get_images(input.user_input, input.num_images)
124
+
125
+ if input.user_input:
126
+ prompt = f"""create a list of {input.num_images} questions that a user might ask following the question: {input.user_input}:"""
127
+ else:
128
+ prompt = f"""create a list of mixed {input.num_images} questions to create a report or plan or course on any of the topics product,market,research topic """
129
+
130
+ if st.session_state.user_query_full != st.session_state.recommendation_query:
131
+ response_topics = json_from_text(
132
+ together_response(
133
+ prompt, model="meta-llama/Llama-3-8b-chat-hf", SysPrompt=SysPromptList,temperature=1
134
+ )
135
+ )
136
+ return {"recommendations": response_topics}
137
+