{ "architectures": [ "BayesianDetectorModel" ], "base_rate": 0.5, "model_name": "google/gemma-2b-it", "torch_dtype": "float32", "transformers_version": "4.46.0.dev0", "watermarking_config": { "context_history_size": 1024, "keys": [ 654, 400, 836, 123, 340, 443, 597, 160, 57, 29, 590, 639, 13, 715, 468, 990, 966, 226, 324, 585, 118, 504, 421, 521, 129, 669, 732, 225, 90, 960 ], "ngram_len": 5, "sampling_table_seed": 0, "sampling_table_size": 65536 }, "watermarking_depth": 30 }