BlinkDL commited on
Commit
3b63245
·
verified ·
1 Parent(s): ef244b4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -20,12 +20,12 @@ gpu_h = nvmlDeviceGetHandleByIndex(0)
20
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
21
 
22
  ctx_limit = 4096
23
- gen_limit = 1000
24
 
25
  ########################## text rwkv ################################################################
26
  from rwkv.utils import PIPELINE, PIPELINE_ARGS
27
 
28
- title_v6 = "RWKV-x070-World-0.4B-v2.9-20250107-ctx4096"
29
  model_path_v6 = hf_hub_download(repo_id="BlinkDL/rwkv-7-world", filename=f"{title_v6}.pth")
30
  # model_path_v6 = f'/mnt/e/RWKV-Runner/models/{title_v6}' # conda activate torch2; cd /mnt/program/git-public/RWKV-Gradio-1; python app.py
31
  model_v6 = RWKV(model=model_path_v6.replace('.pth',''), strategy='cuda fp16')
@@ -124,10 +124,10 @@ examples = [
124
 
125
  ##################################################################################################################
126
  with gr.Blocks(title=title_v6) as demo:
127
- gr.HTML(f"<div style=\"text-align: center;\">\n<h1>{title_v6} (!!! only 0.4B !!!)</h1>\n</div>")
128
 
129
  with gr.Tab("=== Base Model (Raw Generation) ==="):
130
- gr.Markdown(f"This is [RWKV-7 World v2.9](https://huggingface.co/BlinkDL/rwkv-7-world) 0.4B (L24-D1024) - a 100% attention-free RNN [RWKV-LM](https://github.com/BlinkDL/RWKV-LM). Supports 100+ world languages and code. Check [400+ Github RWKV projects](https://github.com/search?o=desc&p=1&q=rwkv&s=updated&type=Repositories). *** Can try examples (bottom of page) *** (can edit them). Demo limited to ctxlen {ctx_limit}.")
131
  with gr.Row():
132
  with gr.Column():
133
  prompt = gr.Textbox(lines=2, label="Prompt", value="Assistant: How can we craft an engaging story featuring vampires on Mars? Let's think step by step and provide an expert response:")
 
20
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
21
 
22
  ctx_limit = 4096
23
+ gen_limit = 700
24
 
25
  ########################## text rwkv ################################################################
26
  from rwkv.utils import PIPELINE, PIPELINE_ARGS
27
 
28
+ title_v6 = "RWKV-x070-World-1.5B-v3-20250127-ctx4096"
29
  model_path_v6 = hf_hub_download(repo_id="BlinkDL/rwkv-7-world", filename=f"{title_v6}.pth")
30
  # model_path_v6 = f'/mnt/e/RWKV-Runner/models/{title_v6}' # conda activate torch2; cd /mnt/program/git-public/RWKV-Gradio-1; python app.py
31
  model_v6 = RWKV(model=model_path_v6.replace('.pth',''), strategy='cuda fp16')
 
124
 
125
  ##################################################################################################################
126
  with gr.Blocks(title=title_v6) as demo:
127
+ gr.HTML(f"<div style=\"text-align: center;\">\n<h1>{title_v6}</h1>\n</div>")
128
 
129
  with gr.Tab("=== Base Model (Raw Generation) ==="):
130
+ gr.Markdown(f"This is [RWKV-7 World v3](https://huggingface.co/BlinkDL/rwkv-7-world) 1.5B (L24-D2048) base model - a 100% attention-free RNN [RWKV-LM](https://github.com/BlinkDL/RWKV-LM). Supports 100+ world languages and code. Check [400+ Github RWKV projects](https://github.com/search?o=desc&p=1&q=rwkv&s=updated&type=Repositories). *** Can try examples (bottom of page) *** (can edit them). Demo limited to ctxlen {ctx_limit}.")
131
  with gr.Row():
132
  with gr.Column():
133
  prompt = gr.Textbox(lines=2, label="Prompt", value="Assistant: How can we craft an engaging story featuring vampires on Mars? Let's think step by step and provide an expert response:")