elapt1c commited on
Commit
0e9df1a
·
verified ·
1 Parent(s): 9e8e3a4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -63,7 +63,7 @@ def chat(message: str, state: List[Dict[str, str]]) -> Generator[Tuple[List[Tupl
63
  else:
64
  # Generate tokens one by one
65
  with torch.no_grad():
66
- for _ in range(50): # Limit generation to 50 tokens
67
  outputs = model(input_ids)
68
  next_token_logits = outputs.logits[:, -1, :]
69
  next_token_id = torch.argmax(next_token_logits, dim=-1)
 
63
  else:
64
  # Generate tokens one by one
65
  with torch.no_grad():
66
+ for _ in range(100): # Limit generation to 50 tokens
67
  outputs = model(input_ids)
68
  next_token_logits = outputs.logits[:, -1, :]
69
  next_token_id = torch.argmax(next_token_logits, dim=-1)