File size: 1,697 Bytes
93c3753 6071116 93c3753 6071116 93c3753 7dec94f cd4e5ff 3311d51 93c3753 3311d51 93c3753 4600530 cd4e5ff 7dec94f 6071116 93c3753 d763db1 6071116 93c3753 cdedab9 7dec94f d763db1 6071116 d763db1 6071116 d763db1 6071116 d763db1 6071116 7dec94f d763db1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
import openai
import gradio as gr
import time
import os
openai.api_key = os.getenv("OPENAI_API_KEY")
def get_completion(prompt, model="gpt-3.5-turbo"):
messages = [{"role": "user", "content": prompt}]
response = openai.ChatCompletion.create(
model=model,
messages=messages,
temperature=0, # this is the degree of randomness of the model's output
)
return response.choices[0].message["content"]
def get_completion_from_messages(input, model="gpt-3.5-turbo", temperature=0.8):
messages = [
{'role': 'system', 'content': '๋๋ ์๊ธฐ์๊ฐ์์ ๊ธฐ๋ฐํ์ฌ ์ง๋ฌธ์ ํ๋ ๋ฉด์ ๊ด์ด์ผ.\
๋ง์ฝ ์ ๋ฌธ์ฉ์ด๊ฐ ์๋ค๋ฉด ๊ผฌ๋ฆฌ์ง๋ฌธํด์ค'},
{"role": "user","content": input }]
response = openai.ChatCompletion.create(
model=model,
messages=messages,
temperature=temperature, # this is the degree of randomness of the model's output
)
print(111111)
return response.choices[0].message["content"]
####
#user input
#get completion ํต๊ณผ ์์ผ์ ๋ต๋ณ์ป์
#์ด๋ ์ญํ ๋ถ๋ด ๋ฐ ํ๋กฌํํธ ์์ง๋์ด๋ง ์งํ
####
class Interviewer:
def __init__(self):
# Initialize the ChatBot class with an empty history
self.history = []
def predict(self, user_input):
response =get_completion_from_messages(user_input, temperature=0.8)
return response
inter = Interviewer()
title = "์์์๊ธฐ๋ฐ ๋ฉด์ ์๋ฎฌ๋ ์ด์
chat bot (this template based on Tonic's MistralMed Chat)"
chatbot = gr.Interface(
fn=inter.predict,
title=title,
inputs="text",
outputs="text",
)
chatbot.launch() |