Reyad-Ahmmed commited on
Commit
a2c2250
·
verified ·
1 Parent(s): e818924

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -21
app.py CHANGED
@@ -97,6 +97,19 @@ if (runModel=='1'):
97
  test_dataset = IntentDataset(test_encodings, list(test_df['label']))
98
 
99
 
 
 
 
 
 
 
 
 
 
 
 
 
 
100
 
101
  # Create an instance of the custom loss function
102
  training_args = TrainingArguments(
@@ -109,6 +122,15 @@ if (runModel=='1'):
109
  logging_dir='./logs_' + modelNameToUse,
110
  logging_steps=10,
111
  evaluation_strategy="epoch",
 
 
 
 
 
 
 
 
 
112
  )
113
 
114
  trainer = Trainer(
@@ -173,31 +195,29 @@ if (runModel=='1'):
173
  #model.save_pretrained('./' + modelNameToUse + '_model')
174
  #tokenizer.save_pretrained('./' + modelNameToUse + '_tokenizer')
175
 
176
- repo_name = "Reyad-Ahmmed/hf-data-timeframe" # Replace with your repository name
177
- api_token = os.getenv("HF_API_TOKEN") # Replace with your actual API token
 
178
 
179
- print("app token: ", api_token)
180
-
181
- api = HfApi()
182
- create_repo(repo_id=repo_name, token=api_token, exist_ok=True)
183
-
184
- model.save_pretrained("/data-timeframe_model")
185
- tokenizer.save_pretrained("/data-timeframe_tokenizer")
186
 
187
  # Upload the model and tokenizer to the Hugging Face repository
188
- upload_folder(
189
- folder_path="/data-timeframe_model",
190
- repo_id=repo_name,
191
- token=api_token,
192
- commit_message="Add fine-tuned model"
193
- )
194
-
195
- upload_folder(
196
- folder_path="/data-timeframe_tokenizer",
197
- repo_id=repo_name,
198
- token=api_token,
199
- commit_message="Add fine-tuned tokenizer"
 
200
  )
 
201
 
202
  else:
203
  print('Load Pre-trained')
 
97
  test_dataset = IntentDataset(test_encodings, list(test_df['label']))
98
 
99
 
100
+ # Your repository name
101
+ repo_name = "Reyad-Ahmmed/hf-data-timeframe"
102
+
103
+
104
+ api_token = os.getenv("HF_API_TOKEN") # Retrieve the API token from environment variable
105
+
106
+ if not api_token:
107
+ raise ValueError("API token not found. Please set the HF_API_TOKEN environment variable.")
108
+
109
+ # Create repository (if not already created)
110
+ api = HfApi()
111
+ create_repo(repo_id=repo_name, token=api_token, exist_ok=True)
112
+
113
 
114
  # Create an instance of the custom loss function
115
  training_args = TrainingArguments(
 
122
  logging_dir='./logs_' + modelNameToUse,
123
  logging_steps=10,
124
  evaluation_strategy="epoch",
125
+
126
+ )
127
+
128
+ upload_folder(
129
+ folder_path=training_args.output_dir,
130
+ path_in_repo=f"{modelNameToUse}_results",
131
+ repo_id=repo_name,
132
+ token=api_token,
133
+ commit_message="Upload training results"
134
  )
135
 
136
  trainer = Trainer(
 
195
  #model.save_pretrained('./' + modelNameToUse + '_model')
196
  #tokenizer.save_pretrained('./' + modelNameToUse + '_tokenizer')
197
 
198
+ # Save the model and tokenizer locally
199
+ local_model_path = "./data-timeframe_model"
200
+ local_tokenizer_path = "./data-timeframe_tokenizer"
201
 
202
+ model.save_pretrained(local_model_path)
203
+ tokenizer.save_pretrained(local_tokenizer_path)
 
 
 
 
 
204
 
205
  # Upload the model and tokenizer to the Hugging Face repository
206
+ upload_folder(
207
+ folder_path=local_model_path,
208
+ path_in_repo="data-timeframe_model",
209
+ repo_id=repo_name,
210
+ token=api_token,
211
+ commit_message="Update fine-tuned model"
212
+ )
213
+ upload_folder(
214
+ folder_path=local_tokenizer_path,
215
+ path_in_repo="data-timeframe_tokenizer",
216
+ repo_id=repo_name,
217
+ token=api_token,
218
+ commit_message="Update fine-tuned tokenizer"
219
  )
220
+
221
 
222
  else:
223
  print('Load Pre-trained')