gokaygokay commited on
Commit
b05e42c
·
verified ·
1 Parent(s): d95c7e3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -1
app.py CHANGED
@@ -14,6 +14,9 @@ def run_command(command):
14
  return output.decode('utf-8')
15
 
16
  def install_packages():
 
 
 
17
  # Clone the repository with submodules
18
  run_command("git clone --recurse-submodules https://github.com/abetlen/llama-cpp-python.git")
19
 
@@ -32,10 +35,12 @@ def install_packages():
32
  run_command("pip install -e .[all]")
33
 
34
  # Clear the local build cache
 
 
35
  run_command("make clean")
36
 
37
  # Reinstall the package
38
- run_command('CMAKE_ARGS="-DGGML_CUDA=on" pip install -e .')
39
 
40
  # Install llama-cpp-agent
41
  run_command("pip install llama-cpp-agent")
 
14
  return output.decode('utf-8')
15
 
16
  def install_packages():
17
+
18
+ run_command("apt install nvidia-cuda-toolkit")
19
+
20
  # Clone the repository with submodules
21
  run_command("git clone --recurse-submodules https://github.com/abetlen/llama-cpp-python.git")
22
 
 
35
  run_command("pip install -e .[all]")
36
 
37
  # Clear the local build cache
38
+ run_command("make GGML_CUDA=1")
39
+
40
  run_command("make clean")
41
 
42
  # Reinstall the package
43
+ run_command('pip install -e .')
44
 
45
  # Install llama-cpp-agent
46
  run_command("pip install llama-cpp-agent")