File size: 142 Bytes
38e2bc2 |
1 2 3 4 5 6 7 8 9 |
{
"do_sample": true,
"max_length": 100,
"temperature": 0.7,
"top_k": null,
"transformers_version": "4.44.2",
"use_cache": false
}
|
38e2bc2 |
1 2 3 4 5 6 7 8 9 |
{
"do_sample": true,
"max_length": 100,
"temperature": 0.7,
"top_k": null,
"transformers_version": "4.44.2",
"use_cache": false
}
|