Fahad Mattoo commited on
Commit
7f2cb09
•
1 Parent(s): 4ee4c4a

Functional breaking (#17)

Browse files

* updating readme

* updating readme

* adding a config file

* adding a logger file

* adding stream lit utils

* adding openai utils

* adding dicord hook function

* updating application

* pylint issues

* updating linting:

* updating toml file

* updating

* updating

* updating

* adding usage of the openai during chat

README.md CHANGED
@@ -28,7 +28,7 @@ This is a simple chat bot using openAI GPT models.
28
 
29
  1. Create conda env
30
  ```bash
31
- conda create -n chat_bot_env python=3.10
32
  ```
33
 
34
  2. Activate env
 
28
 
29
  1. Create conda env
30
  ```bash
31
+ conda create -n chat_bot_env python=3.10 -y
32
  ```
33
 
34
  2. Activate env
pyproject.toml CHANGED
@@ -30,6 +30,7 @@ pythonpath = [
30
  ]
31
 
32
  [tool.pylint]
 
33
  extension-pkg-whitelist= [
34
  "numpy",
35
  "torch",
@@ -50,6 +51,7 @@ suggestion-mode="yes"
50
  unsafe-load-any-extension="no"
51
 
52
  [tool.pylint.'MESSAGES CONTROL']
 
53
  enable="c-extension-no-member"
54
 
55
  [tool.pylint.'REPORTS']
@@ -174,7 +176,7 @@ max-parents=7
174
  max-public-methods=20
175
  max-returns=6
176
  max-statements=50
177
- min-public-methods=2
178
 
179
  [tool.pylint.'IMPORTS']
180
  allow-wildcard-with-all="no"
@@ -182,7 +184,7 @@ analyse-fallback-blocks="no"
182
  deprecated-modules="optparse,tkinter.tix"
183
 
184
  [tool.pylint.'EXCEPTIONS']
185
- overgeneral-exceptions= [
186
- "BaseException",
187
- "Exception"
188
  ]
 
30
  ]
31
 
32
  [tool.pylint]
33
+ disable = "E0401"
34
  extension-pkg-whitelist= [
35
  "numpy",
36
  "torch",
 
51
  unsafe-load-any-extension="no"
52
 
53
  [tool.pylint.'MESSAGES CONTROL']
54
+ disable = "W0718"
55
  enable="c-extension-no-member"
56
 
57
  [tool.pylint.'REPORTS']
 
176
  max-public-methods=20
177
  max-returns=6
178
  max-statements=50
179
+ min-public-methods=0
180
 
181
  [tool.pylint.'IMPORTS']
182
  allow-wildcard-with-all="no"
 
184
  deprecated-modules="optparse,tkinter.tix"
185
 
186
  [tool.pylint.'EXCEPTIONS']
187
+ overgeneral-exceptions = [
188
+ "builtins.BaseException",
189
+ "builtins.Exception"
190
  ]
requirements.txt CHANGED
@@ -1,4 +1,5 @@
1
- streamlit==1.31.1
2
- openai==1.13.3
3
  discord-webhook==1.3.1
4
- python-dotenv==1.0.1
 
 
1
+ streamlit==1.37.1
2
+ openai==1.40.6
3
  discord-webhook==1.3.1
4
+ python-dotenv==1.0.1
5
+ colorama==0.4.6
src/app.py CHANGED
@@ -1,147 +1,41 @@
1
  """Module doc string"""
2
 
3
- import os
4
 
5
- import openai
6
  import streamlit as st
7
- from discord_webhook import DiscordWebhook
8
- from dotenv import load_dotenv
9
- from openai import OpenAI
 
 
 
10
 
11
- load_dotenv()
12
-
13
-
14
- def discord_hook(message):
15
- """_summary_"""
16
- if os.environ.get("ENV", "NOT_LOCAL") != "LOCAL":
17
- url = os.environ.get("DISCORD_HOOK", "NO_HOOK")
18
- if url != "NO_HOOK":
19
- webhook = DiscordWebhook(
20
- url=url, username="simple-chat-bot", content=message
21
- )
22
- webhook.execute()
23
-
24
-
25
- discord_hook("Simple chat bot initiated")
26
-
27
-
28
- def return_true():
29
- """_summary_"""
30
- return True
31
-
32
-
33
- def reset_history():
34
- """_summary_"""
35
- st.session_state.openai_api_key = st.session_state.api_key
36
- st.session_state.messages = []
37
-
38
-
39
- def start_app():
40
- """_summary_"""
41
- st.session_state.start_app = True
42
- st.session_state.openai_api_key = st.session_state.api_key
43
-
44
-
45
- def check_openai_api_key():
46
- """_summary_"""
47
- try:
48
- client = OpenAI(api_key=st.session_state.openai_api_key)
49
- try:
50
- client.models.list()
51
- except openai.AuthenticationError as error:
52
- with st.chat_message("assistant"):
53
- st.error(str(error))
54
- return False
55
- return True
56
- except Exception as error:
57
- with st.chat_message("assistant"):
58
- st.error(str(error))
59
- return False
60
 
61
 
62
  def main():
63
  """_summary_"""
64
- st.set_page_config(
65
- page_title="simple-chat-bot",
66
- page_icon="👾",
67
- layout="centered",
68
- initial_sidebar_state="auto",
69
- )
70
- st.title("👾👾 Simple Chat Bot 👾👾")
71
-
72
- if "messages" not in st.session_state:
73
- st.session_state.messages = []
74
-
75
- if "openai_model" not in st.session_state:
76
- st.session_state["openai_model"] = "gpt-3.5-turbo"
77
-
78
- if "openai_api_key" not in st.session_state:
79
- st.session_state["openai_api_key"] = None
80
-
81
- if "openai_maxtokens" not in st.session_state:
82
- st.session_state["openai_maxtokens"] = 50
83
-
84
- if "start_app" not in st.session_state:
85
- st.session_state["start_app"] = False
86
-
87
  if st.session_state.start_app:
88
- print(st.session_state.openai_api_key)
89
  if (
90
  st.session_state.openai_api_key is not None
91
  and st.session_state.openai_api_key != ""
92
  ):
93
- if check_openai_api_key():
94
- client = OpenAI(api_key=st.session_state.openai_api_key)
95
-
96
- for message in st.session_state.messages:
97
- with st.chat_message(message["role"]):
98
- st.markdown(message["content"])
99
-
100
- if prompt := st.chat_input("Type your Query"):
101
- with st.chat_message("user"):
102
- st.markdown(prompt)
103
- st.session_state.messages.append(
104
- {"role": "user", "content": prompt}
105
- )
106
-
107
- with st.chat_message("assistant"):
108
- stream = client.chat.completions.create(
109
- model=st.session_state["openai_model"],
110
- messages=[
111
- {"role": m["role"], "content": m["content"]}
112
- for m in st.session_state.messages
113
- ],
114
- max_tokens=st.session_state["openai_maxtokens"],
115
- stream=True,
116
- )
117
- response = st.write_stream(stream)
118
- st.session_state.messages.append(
119
- {"role": "assistant", "content": response}
120
- )
121
  else:
122
- reset_history()
123
  else:
124
  with st.chat_message("assistant"):
125
  st.markdown("**'OpenAI API key'** is missing.")
126
 
127
- with st.sidebar:
128
- st.text_input(
129
- label="OpenAI API key",
130
- value="",
131
- help="This will not be saved or stored.",
132
- type="password",
133
- key="api_key",
134
- )
135
-
136
- st.selectbox(
137
- "Select the GPT model", ("gpt-3.5-turbo", "gpt-4-turbo"), key="openai_model"
138
- )
139
- st.slider(
140
- "Max Tokens", min_value=20, max_value=80, step=10, key="openai_maxtokens"
141
- )
142
- st.button("Start Chat", on_click=start_app, use_container_width=True)
143
- st.button("Reset History", on_click=reset_history, use_container_width=True)
144
-
145
 
146
  if __name__ == "__main__":
147
  main()
 
1
  """Module doc string"""
2
 
3
+ from datetime import datetime
4
 
 
5
  import streamlit as st
6
+ from utils import (
7
+ OpenAIFunctions,
8
+ StreamlitFunctions,
9
+ discord_hook,
10
+ logger,
11
+ )
12
 
13
+ datetime_string = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
14
+ discord_hook(f"Simple chat bot initiated {datetime_string}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
 
16
 
17
  def main():
18
  """_summary_"""
19
+ StreamlitFunctions.streamlit_page_config()
20
+ StreamlitFunctions.streamlit_initialize_variables()
21
+ StreamlitFunctions.streamlit_side_bar()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  if st.session_state.start_app:
23
+ logger.info("Application Starting Condition passed")
24
  if (
25
  st.session_state.openai_api_key is not None
26
  and st.session_state.openai_api_key != ""
27
  ):
28
+ logger.info("OpenAI key Checking condition passed")
29
+ if OpenAIFunctions.check_openai_api_key():
30
+ logger.info("Inference Started")
31
+ StreamlitFunctions.streamlit_print_messages()
32
+ StreamlitFunctions.streamlit_invoke_model()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  else:
34
+ StreamlitFunctions.reset_history()
35
  else:
36
  with st.chat_message("assistant"):
37
  st.markdown("**'OpenAI API key'** is missing.")
38
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
 
40
  if __name__ == "__main__":
41
  main()
src/utils/__init__.py CHANGED
@@ -1 +1,7 @@
1
  """Module doc string"""
 
 
 
 
 
 
 
1
  """Module doc string"""
2
+
3
+ from .constants import ConstantVariables
4
+ from .discord import discord_hook
5
+ from .logs import logger
6
+ from .openai_utils import OpenAIFunctions
7
+ from .streamlit_utils import StreamlitFunctions
src/utils/config.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Module doc string"""
2
+
3
+ import os
4
+
5
+ from dotenv import find_dotenv, load_dotenv
6
+
7
+ load_dotenv(find_dotenv(), override=True)
8
+
9
+ LOGGER_LEVEL = os.getenv("LOGGER_LEVEL", "INFO")
10
+ DISCORD_HOOK = os.getenv("DISCORD_HOOK", "NO_HOOK")
11
+ ENVIRONMENT = os.getenv("ENVIRONMENT", "NOT_LOCAL")
12
+ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "NO_KEY")
src/utils/constants.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Module doc string"""
2
+
3
+ from .config import OPENAI_API_KEY
4
+
5
+
6
+ class ConstantVariables:
7
+ """Module doc string"""
8
+
9
+ model_list_tuple = (
10
+ "gpt-4o",
11
+ "gpt-4o-mini",
12
+ "gpt-4-turbo",
13
+ "gpt-4",
14
+ "gpt-3.5-turbo",
15
+ )
16
+ default_model = "gpt-4o-mini"
17
+
18
+ max_tokens = 180
19
+ min_token = 20
20
+ step = 20
21
+ default = round(((max_tokens + min_token) / 2) / step) * step
22
+ default_token = max(min_token, min(max_tokens, default))
23
+
24
+ if OPENAI_API_KEY != "NO_KEY":
25
+ api_key = OPENAI_API_KEY
26
+ else:
27
+ api_key = ""
src/utils/discord.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Module doc string"""
2
+
3
+ from discord_webhook import DiscordWebhook
4
+
5
+ from .config import DISCORD_HOOK, ENVIRONMENT
6
+ from .logs import logger
7
+
8
+
9
+ def discord_hook(message):
10
+ """_summary_"""
11
+ logger.info(message)
12
+ if ENVIRONMENT != "LOCAL":
13
+ url = DISCORD_HOOK
14
+ if url != "NO_HOOK":
15
+ webhook = DiscordWebhook(
16
+ url=url, username="simple-chat-bot", content=message
17
+ )
18
+ webhook.execute()
19
+ logger.debug("Discord Hook Successful.")
20
+ else:
21
+ logger.debug("Discord Hook Failed.")
src/utils/logs.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Module doc string"""
2
+
3
+ import asyncio
4
+ import logging
5
+ import sys
6
+ import time
7
+ from functools import wraps
8
+
9
+ from colorama import Back, Fore, Style, init
10
+
11
+ from .config import LOGGER_LEVEL
12
+
13
+ # Initialize colorama
14
+ init(autoreset=True)
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+ if not logger.hasHandlers():
19
+ logger.propagate = False
20
+ logger.setLevel(LOGGER_LEVEL)
21
+
22
+ # Define color codes for different log levels
23
+ log_colors = {
24
+ logging.DEBUG: Fore.CYAN,
25
+ logging.INFO: Fore.GREEN,
26
+ logging.WARNING: Fore.YELLOW,
27
+ logging.ERROR: Fore.RED,
28
+ logging.CRITICAL: Fore.RED + Back.WHITE + Style.BRIGHT,
29
+ }
30
+
31
+ class ColoredFormatter(logging.Formatter):
32
+ """Module doc string"""
33
+
34
+ def format(self, record):
35
+ """Module doc string"""
36
+
37
+ levelno = record.levelno
38
+ color = log_colors.get(levelno, "")
39
+
40
+ # Format the message
41
+ message = record.getMessage()
42
+
43
+ # Format the rest of the log details
44
+ details = self._fmt % {
45
+ "asctime": self.formatTime(record, self.datefmt),
46
+ "levelname": record.levelname,
47
+ "module": record.module,
48
+ "funcName": record.funcName,
49
+ "lineno": record.lineno,
50
+ }
51
+
52
+ # Combine details and colored message
53
+ return f"{Fore.WHITE}{details} :: {color}{message}{Style.RESET_ALL}"
54
+
55
+ normal_handler = logging.StreamHandler(sys.stdout)
56
+ normal_handler.setLevel(logging.DEBUG)
57
+ normal_handler.addFilter(lambda logRecord: logRecord.levelno < logging.WARNING)
58
+
59
+ error_handler = logging.StreamHandler(sys.stderr)
60
+ error_handler.setLevel(logging.WARNING)
61
+
62
+ formatter = ColoredFormatter(
63
+ "%(asctime)s :: %(levelname)s :: %(module)s :: %(funcName)s :: %(lineno)d"
64
+ )
65
+
66
+ normal_handler.setFormatter(formatter)
67
+ error_handler.setFormatter(formatter)
68
+
69
+ logger.addHandler(normal_handler)
70
+ logger.addHandler(error_handler)
71
+
72
+
73
+ def log_execution_time(func):
74
+ """Module doc string"""
75
+
76
+ @wraps(func)
77
+ def sync_wrapper(*args, **kwargs):
78
+ start_time = time.time()
79
+ result = func(*args, **kwargs)
80
+ end_time = time.time()
81
+ execution_time = end_time - start_time
82
+ message_string = f"{func.__name__} executed in {execution_time:.4f} seconds"
83
+ logger.debug(message_string)
84
+ return result
85
+
86
+ @wraps(func)
87
+ async def async_wrapper(*args, **kwargs):
88
+ start_time = time.time()
89
+ result = await func(*args, **kwargs)
90
+ end_time = time.time()
91
+ execution_time = end_time - start_time
92
+ message_string = f"{func.__name__} executed in {execution_time:.4f} seconds"
93
+ logger.debug(message_string)
94
+ return result
95
+
96
+ if asyncio.iscoroutinefunction(func):
97
+ return async_wrapper
98
+ return sync_wrapper
src/utils/openai_utils.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Module doc string"""
2
+
3
+ import openai
4
+ import streamlit as st
5
+ from openai import OpenAI
6
+
7
+ from .logs import log_execution_time, logger
8
+
9
+
10
+ class OpenAIFunctions:
11
+ """Module doc string"""
12
+
13
+ @log_execution_time
14
+ @staticmethod
15
+ def invoke_model():
16
+ """_summary_"""
17
+ logger.debug("OpenAI invoked")
18
+ client = OpenAI(api_key=st.session_state.openai_api_key)
19
+ with st.chat_message("assistant"):
20
+ stream = client.chat.completions.create(
21
+ model=st.session_state["openai_model"],
22
+ messages=[
23
+ {"role": m["role"], "content": m["content"]}
24
+ for m in st.session_state.messages
25
+ ],
26
+ max_tokens=st.session_state["openai_maxtokens"],
27
+ stream=True,
28
+ stream_options={"include_usage": True},
29
+ )
30
+
31
+ def stream_data():
32
+ for chunk in stream:
33
+ if chunk.choices != []:
34
+ word = chunk.choices[0].delta.content
35
+ if word is not None:
36
+ yield word
37
+ if chunk.usage is not None:
38
+ yield {
39
+ "completion_tokens": chunk.usage.completion_tokens,
40
+ "prompt_tokens": chunk.usage.prompt_tokens,
41
+ "total_tokens": chunk.usage.total_tokens,
42
+ }
43
+
44
+ return st.write_stream(stream_data)
45
+
46
+ @log_execution_time
47
+ @staticmethod
48
+ def check_openai_api_key():
49
+ """_summary_"""
50
+ logger.info("Checking OpenAI Key")
51
+ try:
52
+ client = OpenAI(api_key=st.session_state.openai_api_key)
53
+ client.models.list()
54
+ logger.debug("OpenAI key Working")
55
+ return True
56
+ except openai.AuthenticationError as auth_error:
57
+ with st.chat_message("assistant"):
58
+ st.error(str(auth_error))
59
+ logger.error("AuthenticationError: %s", auth_error)
60
+ return False
61
+ except openai.OpenAIError as openai_error:
62
+ with st.chat_message("assistant"):
63
+ st.error(str(openai_error))
64
+ logger.error("OpenAIError: %s", openai_error)
65
+ return False
66
+ except Exception as general_error:
67
+ with st.chat_message("assistant"):
68
+ st.error(str(general_error))
69
+ logger.error("Unexpected error: %s", general_error)
70
+ return False
src/utils/streamlit_utils.py ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Module doc string"""
2
+
3
+ import streamlit as st
4
+
5
+ from .constants import ConstantVariables
6
+ from .logs import logger
7
+ from .openai_utils import OpenAIFunctions
8
+
9
+
10
+ class StreamlitFunctions:
11
+ """Module doc string"""
12
+
13
+ @staticmethod
14
+ def streamlit_page_config():
15
+ """_summary_"""
16
+ st.set_page_config(
17
+ page_title="simple-chat-bot",
18
+ page_icon="👾",
19
+ layout="centered",
20
+ initial_sidebar_state="auto",
21
+ )
22
+ st.title("👾👾 Simple Chat Bot 👾👾")
23
+
24
+ @staticmethod
25
+ def streamlit_side_bar():
26
+ """_summary_"""
27
+ with st.sidebar:
28
+ st.text_input(
29
+ label="OpenAI API key",
30
+ value=ConstantVariables.api_key,
31
+ help="This will not be saved or stored.",
32
+ type="password",
33
+ key="api_key",
34
+ )
35
+
36
+ st.selectbox(
37
+ "Select the GPT model",
38
+ ConstantVariables.model_list_tuple,
39
+ key="openai_model",
40
+ )
41
+ st.slider(
42
+ "Max Tokens",
43
+ min_value=ConstantVariables.min_token,
44
+ max_value=ConstantVariables.max_tokens,
45
+ step=ConstantVariables.step,
46
+ key="openai_maxtokens",
47
+ )
48
+ st.button(
49
+ "Start Chat",
50
+ on_click=StreamlitFunctions.start_app,
51
+ use_container_width=True,
52
+ )
53
+ st.button(
54
+ "Reset History",
55
+ on_click=StreamlitFunctions.reset_history,
56
+ use_container_width=True,
57
+ )
58
+
59
+ @staticmethod
60
+ def streamlit_initialize_variables():
61
+ """_summary_"""
62
+ logger.debug("Initializing Streamlit Variables")
63
+ if "messages" not in st.session_state:
64
+ st.session_state.messages = []
65
+
66
+ if "openai_model" not in st.session_state:
67
+ st.session_state["openai_model"] = ConstantVariables.default_model
68
+
69
+ if "openai_api_key" not in st.session_state:
70
+ st.session_state["openai_api_key"] = None
71
+
72
+ if "openai_maxtokens" not in st.session_state:
73
+ st.session_state["openai_maxtokens"] = ConstantVariables.default_token
74
+
75
+ if "start_app" not in st.session_state:
76
+ st.session_state["start_app"] = False
77
+
78
+ @staticmethod
79
+ def reset_history():
80
+ """_summary_"""
81
+ logger.debug("Resetting Chat State")
82
+ st.session_state.openai_api_key = st.session_state.api_key
83
+ st.session_state.messages = []
84
+
85
+ @staticmethod
86
+ def start_app():
87
+ """_summary_"""
88
+ logger.debug("Starting Application")
89
+ st.session_state.start_app = True
90
+ st.session_state.openai_api_key = st.session_state.api_key
91
+
92
+ @staticmethod
93
+ def streamlit_print_messages():
94
+ """_summary_"""
95
+ for message in st.session_state.messages:
96
+ with st.chat_message(message["role"]):
97
+ st.markdown(message["content"])
98
+
99
+ @staticmethod
100
+ def streamlit_invoke_model():
101
+ """_summary_"""
102
+ if prompt := st.chat_input("Type your Query"):
103
+ with st.chat_message("user"):
104
+ st.markdown(prompt)
105
+ st.session_state.messages.append({"role": "user", "content": prompt})
106
+ response = OpenAIFunctions.invoke_model()
107
+ logger.debug(response)
108
+ st.session_state.messages.append({"role": "assistant", "content": response[0]})
tests/test_return_true.py CHANGED
@@ -1,6 +1,3 @@
1
- import pytest
2
- from app import return_true
3
-
4
 
5
  def test_reset_history():
6
- assert return_true() == True
 
 
 
 
1
 
2
  def test_reset_history():
3
+ assert True == True