Sakalti commited on
Commit
c836784
·
verified ·
1 Parent(s): 3ad6576

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -19,7 +19,7 @@ MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
19
 
20
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
21
 
22
- model_id = "Sakalti/Saba1.5-Pro"
23
  tokenizer = AutoTokenizer.from_pretrained(model_id)
24
  model = AutoModelForCausalLM.from_pretrained(
25
  model_id,
@@ -89,14 +89,14 @@ demo = gr.ChatInterface(
89
  minimum=0.1,
90
  maximum=4.0,
91
  step=0.1,
92
- value=0.6,
93
  ),
94
  gr.Slider(
95
  label="Top-p (nucleus sampling)",
96
  minimum=0.05,
97
  maximum=1.0,
98
  step=0.05,
99
- value=0.9,
100
  ),
101
  gr.Slider(
102
  label="Top-k",
 
19
 
20
  device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
21
 
22
+ model_id = "Sakalti/magro-7B"
23
  tokenizer = AutoTokenizer.from_pretrained(model_id)
24
  model = AutoModelForCausalLM.from_pretrained(
25
  model_id,
 
89
  minimum=0.1,
90
  maximum=4.0,
91
  step=0.1,
92
+ value=0.7,
93
  ),
94
  gr.Slider(
95
  label="Top-p (nucleus sampling)",
96
  minimum=0.05,
97
  maximum=1.0,
98
  step=0.05,
99
+ value=0.95,
100
  ),
101
  gr.Slider(
102
  label="Top-k",