Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -132,11 +132,12 @@ Please provide a detailed analysis including:
|
|
132 |
inputs = self.tiny_tokenizer(prompt, return_tensors="pt", truncation=True)
|
133 |
outputs = self.tiny_model.generate(
|
134 |
inputs["input_ids"],
|
135 |
-
|
136 |
temperature=0.7,
|
137 |
top_p=0.95,
|
138 |
-
do_sample=True
|
139 |
-
|
|
|
140 |
analysis = self.tiny_tokenizer.decode(outputs[0], skip_special_tokens=True)
|
141 |
|
142 |
# Generate sentiment
|
|
|
132 |
inputs = self.tiny_tokenizer(prompt, return_tensors="pt", truncation=True)
|
133 |
outputs = self.tiny_model.generate(
|
134 |
inputs["input_ids"],
|
135 |
+
max_new_tokens=1024, # Generate up to 1024 new tokens
|
136 |
temperature=0.7,
|
137 |
top_p=0.95,
|
138 |
+
do_sample=True,
|
139 |
+
pad_token_id=self.tiny_tokenizer.eos_token_id,
|
140 |
+
repetition_penalty=1.2 )
|
141 |
analysis = self.tiny_tokenizer.decode(outputs[0], skip_special_tokens=True)
|
142 |
|
143 |
# Generate sentiment
|