mgbam commited on
Commit
ec83649
·
verified ·
1 Parent(s): 455290d

Update models.py

Browse files
Files changed (1) hide show
  1. models.py +56 -48
models.py CHANGED
@@ -1,48 +1,56 @@
1
- import openai
2
- import google.generativeai as genai
3
- from config import OPENAI_API_KEY, GEMINI_API_KEY, OPENAI_DEFAULT_MODEL, GEMINI_DEFAULT_MODEL
4
-
5
- def configure_llms():
6
- """
7
- Call this at startup or inside your main app file to configure
8
- OpenAI and Gemini if keys are available.
9
- """
10
- if OPENAI_API_KEY:
11
- openai.api_key = OPENAI_API_KEY
12
- if GEMINI_API_KEY:
13
- genai.configure(api_key=GEMINI_API_KEY)
14
-
15
- def openai_chat(system_prompt, user_prompt, model=None, temperature=0.3):
16
- """
17
- Basic ChatCompletion with system + user roles for OpenAI.
18
- """
19
- if not OPENAI_API_KEY:
20
- return "Error: OpenAI API key not provided."
21
- chat_model = model or OPENAI_DEFAULT_MODEL
22
- try:
23
- response = openai.ChatCompletion.create(
24
- model=chat_model,
25
- messages=[
26
- {"role": "system", "content": system_prompt},
27
- {"role": "user", "content": user_prompt}
28
- ],
29
- temperature=temperature
30
- )
31
- return response.choices[0].message["content"].strip()
32
- except Exception as e:
33
- return f"Error calling OpenAI: {str(e)}"
34
-
35
- def gemini_chat(system_prompt, user_prompt, model_name=None, temperature=0.3):
36
- """
37
- Basic call to Google PaLM2 via google.generativeai.
38
- """
39
- if not GEMINI_API_KEY:
40
- return "Error: Gemini API key not provided."
41
- final_model_name = model_name or GEMINI_DEFAULT_MODEL
42
- try:
43
- model = genai.GenerativeModel(model_name=final_model_name)
44
- chat_session = model.start_chat(history=[("system", system_prompt)])
45
- reply = chat_session.send_message(user_prompt, temperature=temperature)
46
- return reply.text
47
- except Exception as e:
48
- return f"Error calling Gemini: {str(e)}"
 
 
 
 
 
 
 
 
 
1
+ import openai
2
+ import google.generativeai as genai
3
+
4
+ from config import (
5
+ OPENAI_API_KEY,
6
+ GEMINI_API_KEY,
7
+ OPENAI_DEFAULT_MODEL,
8
+ GEMINI_DEFAULT_MODEL
9
+ )
10
+
11
+
12
+ def configure_llms():
13
+ """
14
+ Configure OpenAI and Gemini if keys are provided.
15
+ """
16
+ if OPENAI_API_KEY:
17
+ openai.api_key = OPENAI_API_KEY
18
+ if GEMINI_API_KEY:
19
+ genai.configure(api_key=GEMINI_API_KEY)
20
+
21
+
22
+ def openai_chat(system_prompt, user_prompt, model=None, temperature=0.3):
23
+ """
24
+ Call OpenAI ChatCompletion with a system + user message.
25
+ """
26
+ if not OPENAI_API_KEY:
27
+ return "Error: OpenAI API key not provided."
28
+ chat_model = model or OPENAI_DEFAULT_MODEL
29
+ try:
30
+ response = openai.ChatCompletion.create(
31
+ model=chat_model,
32
+ messages=[
33
+ {"role": "system", "content": system_prompt},
34
+ {"role": "user", "content": user_prompt}
35
+ ],
36
+ temperature=temperature
37
+ )
38
+ return response.choices[0].message["content"].strip()
39
+ except Exception as e:
40
+ return f"Error calling OpenAI: {str(e)}"
41
+
42
+
43
+ def gemini_chat(system_prompt, user_prompt, model_name=None, temperature=0.3):
44
+ """
45
+ Call Google's PaLM2/Gemini via google.generativeai.
46
+ """
47
+ if not GEMINI_API_KEY:
48
+ return "Error: Gemini API key not provided."
49
+ final_model = model_name or GEMINI_DEFAULT_MODEL
50
+ try:
51
+ model = genai.GenerativeModel(model_name=final_model)
52
+ chat_session = model.start_chat(history=[("system", system_prompt)])
53
+ reply = chat_session.send_message(user_prompt, temperature=temperature)
54
+ return reply.text
55
+ except Exception as e:
56
+ return f"Error calling Gemini: {str(e)}"