Spaces:
Runtime error
Runtime error
File size: 725 Bytes
d12b0d2 fc1301c d12b0d2 fc1301c d12b0d2 fc1301c d12b0d2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
# At the top level of your script, after initializing the tokenizer
tokenizer = AutoTokenizer.from_pretrained(tokenizer_id)
if tokenizer.pad_token == None:
tokenizer.pad_token = tokenizer.eos_token
tokenizer.pad_token_id = tokenizer.eos_token_id
@spaces.GPU
def generate(
message: str,
chat_history: list[tuple[str, str]],
max_new_tokens: int = 1024,
temperature: float = 0.6,
top_p: float = 0.9,
top_k: int = 50,
repetition_penalty: float = 1.4,
) -> Iterator[str]:
global tokenizer, model # Add this line to access global variables
input_ids = tokenizer([message], return_tensors="pt").input_ids
# ... rest of the function ...
# The rest of your code remains the same |