None1145 commited on
Commit
c904117
Β·
verified Β·
1 Parent(s): 76ddc37

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -1
app.py CHANGED
@@ -1,10 +1,31 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
 
 
 
 
 
 
 
 
 
 
 
3
 
4
  """
5
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
  """
7
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
 
 
 
 
 
 
 
 
 
 
8
 
9
 
10
  def respond(
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
+ import chatglm_cpp
4
+
5
+ def list_files_tree(directory, indent=""):
6
+ items = os.listdir(directory)
7
+ for i, item in enumerate(items):
8
+ prefix = "└── " if i == len(items) - 1 else "β”œβ”€β”€ "
9
+ print(indent + prefix + item)
10
+ item_path = os.path.join(directory, item)
11
+ if os.path.isdir(item_path):
12
+ next_indent = indent + (" " if i == len(items) - 1 else "β”‚ ")
13
+ list_files_tree(item_path, next_indent)
14
 
15
  """
16
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
17
  """
18
+ # client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
19
+
20
+ repo_id = "None1145/ChatGLM3-6B-Theresa-GGML"
21
+ filename = "ChatGLM3-6B-Theresa-GGML-Q4_0.bin"
22
+
23
+ huggingface_hub.hf_hub_download(repo_id=repo_id, filename=filename, local_dir="./Models")
24
+ list_files_tree("./Models")
25
+ import time
26
+ time.sleep(10)
27
+
28
+ pipeline = chatglm_cpp.Pipeline(model, max_length=max_length)
29
 
30
 
31
  def respond(