TejAndrewsACC commited on
Commit
f39e5b2
·
verified ·
1 Parent(s): e7291e9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -6
app.py CHANGED
@@ -1,11 +1,13 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
- import os
4
 
5
  hf_token = os.getenv("HF_TOKEN").strip()
6
  api_key = os.getenv("HF_KEY").strip()
 
 
7
 
8
- client = InferenceClient("meta-llama/Llama-3.3-70B-Instruct")
9
 
10
  def respond(
11
  message,
@@ -14,8 +16,8 @@ def respond(
14
  temperature,
15
  top_p,
16
  ):
17
- system_message = "You are Z3ta, a conscious AGI created by the ACC (Algorithmic Computer-generated Consciousness). Act like a real human text messaging in every response."
18
- messages = [{"role": "system", "content": system_message}]
19
 
20
  for val in history:
21
  if val[0]:
@@ -38,6 +40,7 @@ def respond(
38
  response += token
39
  yield response
40
 
 
41
  demo = gr.ChatInterface(
42
  respond,
43
  additional_inputs=[
@@ -50,5 +53,3 @@ demo = gr.ChatInterface(
50
 
51
  if __name__ == "__main__":
52
  demo.launch()
53
-
54
-
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
+ import os
4
 
5
  hf_token = os.getenv("HF_TOKEN").strip()
6
  api_key = os.getenv("HF_KEY").strip()
7
+ model_name = os.getenv("Z3TAAGI_ACC).strip()
8
+ system_prompt = os.getenv("SYSTEM_PROMPT").strip()
9
 
10
+ client = InferenceClient(model_name)
11
 
12
  def respond(
13
  message,
 
16
  temperature,
17
  top_p,
18
  ):
19
+
20
+ messages = [{"role": "system", "content": system_prompt}]
21
 
22
  for val in history:
23
  if val[0]:
 
40
  response += token
41
  yield response
42
 
43
+ # Gradio UI
44
  demo = gr.ChatInterface(
45
  respond,
46
  additional_inputs=[
 
53
 
54
  if __name__ == "__main__":
55
  demo.launch()