vkrishnan569 commited on
Commit
8c5f27f
1 Parent(s): ffc4e5d

Server Deployment

Browse files
Files changed (2) hide show
  1. main.py +2 -0
  2. model.py +5 -7
main.py CHANGED
@@ -1,6 +1,8 @@
1
  from flask import Flask, request, jsonify
2
  from llama_cpp import Llama
3
  from huggingface_hub import hf_hub_download
 
 
4
 
5
  # Initialize the Llama model with chat format set to "llama-2"
6
  llm = Llama(model_path="./llama-2-7b-chat.Q2_K.gguf", chat_format="llama-2")
 
1
  from flask import Flask, request, jsonify
2
  from llama_cpp import Llama
3
  from huggingface_hub import hf_hub_download
4
+ from model import model_download
5
+ model_download()
6
 
7
  # Initialize the Llama model with chat format set to "llama-2"
8
  llm = Llama(model_path="./llama-2-7b-chat.Q2_K.gguf", chat_format="llama-2")
model.py CHANGED
@@ -10,12 +10,10 @@ filename = 'llama-2-7b-chat.Q2_K.gguf'
10
  # If not provided, the default cache directory will be used
11
  cache_dir = './path_to_cache_directory'
12
 
13
- # Download the file
14
- file_path = hf_hub_download(
 
15
  repo_id=repo_id,
16
  filename=filename,
17
- cache_dir=cache_dir
18
- )
19
-
20
- # The file_path variable now contains the local path to the downloaded file
21
- print(f"File downloaded to: {file_path}")
 
10
  # If not provided, the default cache directory will be used
11
  cache_dir = './path_to_cache_directory'
12
 
13
+ def model_download():
14
+ # Download the file
15
+ file_path = hf_hub_download(
16
  repo_id=repo_id,
17
  filename=filename,
18
+ cache_dir=cache_dir) # The file_path variable now contains the local path to the downloaded file
19
+ print(f"File downloaded to: {file_path}")