kwabs22 commited on
Commit
3924225
·
1 Parent(s): 6524289

Cpp to python, Testing Stable LM 2 1.6B Zephyr

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -23,6 +23,7 @@ def generate_response(user_message):
23
  """
24
 
25
  def generate_response(user_message):
 
26
  cmd = [
27
  "/app/llama.cpp/main", # Path to the executable
28
  "-m", "/app/llama.cpp/models/stablelm-2-zephyr-1_6b-Q4_0.gguf",
@@ -31,6 +32,7 @@ def generate_response(user_message):
31
  "-e"
32
  ]
33
  result = subprocess.run(cmd, capture_output=True, text=True)
 
34
  return result.stdout
35
 
36
 
@@ -43,4 +45,4 @@ iface = gr.Interface(
43
  flagging_dir="/usr/src/app/flagged",
44
  )
45
 
46
- iface.launch(share=True)
 
23
  """
24
 
25
  def generate_response(user_message):
26
+ print("Before request")
27
  cmd = [
28
  "/app/llama.cpp/main", # Path to the executable
29
  "-m", "/app/llama.cpp/models/stablelm-2-zephyr-1_6b-Q4_0.gguf",
 
32
  "-e"
33
  ]
34
  result = subprocess.run(cmd, capture_output=True, text=True)
35
+ print("After response")
36
  return result.stdout
37
 
38
 
 
45
  flagging_dir="/usr/src/app/flagged",
46
  )
47
 
48
+ iface.launch(server_name="0.0.0.0", port=7860) #share=True)