mgbam commited on
Commit
81a818a
·
verified ·
1 Parent(s): f3774c1

Update models.py

Browse files
Files changed (1) hide show
  1. models.py +16 -51
models.py CHANGED
@@ -1,56 +1,21 @@
1
- import openai
2
- import google.generativeai as genai
 
3
 
4
- from config import (
5
- OPENAI_API_KEY,
6
- GEMINI_API_KEY,
7
- OPENAI_DEFAULT_MODEL,
8
- GEMINI_DEFAULT_MODEL
9
- )
10
 
11
 
12
- def configure_llms():
13
  """
14
- Configure OpenAI and Gemini if keys are provided.
15
  """
16
- if OPENAI_API_KEY:
17
- openai.api_key = OPENAI_API_KEY
18
- if GEMINI_API_KEY:
19
- genai.configure(api_key=GEMINI_API_KEY)
20
-
21
-
22
- def openai_chat(system_prompt, user_prompt, model=None, temperature=0.3):
23
- """
24
- Call OpenAI ChatCompletion with a system + user message.
25
- """
26
- if not OPENAI_API_KEY:
27
- return "Error: OpenAI API key not provided."
28
- chat_model = model or OPENAI_DEFAULT_MODEL
29
- try:
30
- response = openai.ChatCompletion.create(
31
- model=chat_model,
32
- messages=[
33
- {"role": "system", "content": system_prompt},
34
- {"role": "user", "content": user_prompt}
35
- ],
36
- temperature=temperature
37
- )
38
- return response.choices[0].message["content"].strip()
39
- except Exception as e:
40
- return f"Error calling OpenAI: {str(e)}"
41
-
42
-
43
- def gemini_chat(system_prompt, user_prompt, model_name=None, temperature=0.3):
44
- """
45
- Call Google's PaLM2/Gemini via google.generativeai.
46
- """
47
- if not GEMINI_API_KEY:
48
- return "Error: Gemini API key not provided."
49
- final_model = model_name or GEMINI_DEFAULT_MODEL
50
- try:
51
- model = genai.GenerativeModel(model_name=final_model)
52
- chat_session = model.start_chat(history=[("system", system_prompt)])
53
- reply = chat_session.send_message(user_prompt, temperature=temperature)
54
- return reply.text
55
- except Exception as e:
56
- return f"Error calling Gemini: {str(e)}"
 
1
+ from openai import OpenAI
2
+ import os
3
+ from config import OPENAI_API_KEY, OPENAI_DEFAULT_MODEL
4
 
5
+ # Initialize OpenAI client
6
+ client = OpenAI(api_key=OPENAI_API_KEY)
 
 
 
 
7
 
8
 
9
+ def chat_with_openai(system_message, user_message, model=OPENAI_DEFAULT_MODEL, temperature=0.3):
10
  """
11
+ Chat with OpenAI's model using a system message and user message.
12
  """
13
+ response = client.chat.completions.create(
14
+ model=model,
15
+ messages=[
16
+ {"role": "system", "content": system_message},
17
+ {"role": "user", "content": user_message},
18
+ ],
19
+ temperature=temperature,
20
+ )
21
+ return response.choices[0].message.content