geekyrakshit commited on
Commit
01ed12d
·
1 Parent(s): 35cb430

update: system prompt formatting in LLMClient.execute_gemini_sdk

Browse files
medrag_multi_modal/assistant/llm_client.py CHANGED
@@ -29,9 +29,14 @@ class LLMClient(weave.Model):
29
  schema: Optional[Any] = None,
30
  ) -> Union[str, Any]:
31
  import google.generativeai as genai
 
 
 
 
 
32
 
33
  genai.configure(api_key=os.environ.get("GOOGLE_API_KEY"))
34
- model = genai.GenerativeModel(self.model_name, system_instruction=system_prompt)
35
  generation_config = (
36
  None
37
  if schema is None
@@ -40,7 +45,7 @@ class LLMClient(weave.Model):
40
  )
41
  )
42
  response = model.generate_content(
43
- user_prompt, generation_config=generation_config
44
  )
45
  return response.text if schema is None else response
46
 
 
29
  schema: Optional[Any] = None,
30
  ) -> Union[str, Any]:
31
  import google.generativeai as genai
32
+
33
+ system_prompt = (
34
+ [system_prompt] if isinstance(system_prompt, str) else system_prompt
35
+ )
36
+ user_prompt = [user_prompt] if isinstance(user_prompt, str) else user_prompt
37
 
38
  genai.configure(api_key=os.environ.get("GOOGLE_API_KEY"))
39
+ model = genai.GenerativeModel(self.model_name)
40
  generation_config = (
41
  None
42
  if schema is None
 
45
  )
46
  )
47
  response = model.generate_content(
48
+ system_prompt + user_prompt, generation_config=generation_config
49
  )
50
  return response.text if schema is None else response
51