Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -50,35 +50,31 @@ def progress_bar_html(label: str) -> str:
|
|
50 |
'''
|
51 |
|
52 |
# TEXT MODEL - Utiliser Napoleon 4B avec configuration modifiée
|
53 |
-
model_id = "baconnier/Napoleon_4B_V0.0"
|
54 |
-
|
55 |
# Charger la configuration
|
56 |
config = AutoConfig.from_pretrained(model_id)
|
57 |
|
58 |
-
#
|
59 |
-
if
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
if not hasattr(config, "
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
70 |
|
71 |
-
if not hasattr(config, "num_attention_heads"):
|
72 |
-
config.num_attention_heads = 10 # Valeur correcte pour Gemma 3 4B
|
73 |
-
|
74 |
-
if not hasattr(config, "sliding_window"):
|
75 |
-
config.sliding_window = 1024 # Valeur correcte pour Gemma 3
|
76 |
-
|
77 |
-
if not hasattr(config, "sliding_window_pattern"):
|
78 |
-
config.sliding_window_pattern = 6 # Valeur basée sur votre config Napoleon
|
79 |
-
|
80 |
-
if not hasattr(config, "rope_scaling"):
|
81 |
-
config.rope_scaling = {"factor": 8.0, "rope_type": "linear"} # Valeur correcte pour Gemma 3
|
82 |
|
83 |
|
84 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
|
|
50 |
'''
|
51 |
|
52 |
# TEXT MODEL - Utiliser Napoleon 4B avec configuration modifiée
|
|
|
|
|
53 |
# Charger la configuration
|
54 |
config = AutoConfig.from_pretrained(model_id)
|
55 |
|
56 |
+
# Extraire les attributs de text_config vers la configuration principale
|
57 |
+
if hasattr(config, "text_config"):
|
58 |
+
for key, value in vars(config.text_config).items():
|
59 |
+
if not hasattr(config, key):
|
60 |
+
setattr(config, key, value)
|
61 |
+
else:
|
62 |
+
# Ajouter manuellement les attributs si text_config n'existe pas
|
63 |
+
if not hasattr(config, "vocab_size"):
|
64 |
+
config.vocab_size = 262208
|
65 |
+
if not hasattr(config, "hidden_size"):
|
66 |
+
config.hidden_size = 2560
|
67 |
+
if not hasattr(config, "num_hidden_layers"):
|
68 |
+
config.num_hidden_layers = 34
|
69 |
+
if not hasattr(config, "intermediate_size"):
|
70 |
+
config.intermediate_size = 10240
|
71 |
+
if not hasattr(config, "num_attention_heads"):
|
72 |
+
config.num_attention_heads = 10
|
73 |
+
if not hasattr(config, "sliding_window"):
|
74 |
+
config.sliding_window = 1024
|
75 |
+
if not hasattr(config, "sliding_window_pattern"):
|
76 |
+
config.sliding_window_pattern = 6
|
77 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
78 |
|
79 |
|
80 |
tokenizer = AutoTokenizer.from_pretrained(model_id)
|