Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -232,6 +232,12 @@ if (runModel=='1'):
|
|
| 232 |
commit_message="Update fine-tuned model for test",
|
| 233 |
#overwrite=True # Force overwrite existing files
|
| 234 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 235 |
upload_folder(
|
| 236 |
folder_path='./data-timeframe_tokenizer',
|
| 237 |
path_in_repo="data-timeframe_tokenizer",
|
|
@@ -241,8 +247,10 @@ if (runModel=='1'):
|
|
| 241 |
#overwrite=True # Force overwrite existing files
|
| 242 |
)
|
| 243 |
|
| 244 |
-
|
| 245 |
-
|
|
|
|
|
|
|
| 246 |
else:
|
| 247 |
print('Load Pre-trained')
|
| 248 |
|
|
|
|
| 232 |
commit_message="Update fine-tuned model for test",
|
| 233 |
#overwrite=True # Force overwrite existing files
|
| 234 |
)
|
| 235 |
+
|
| 236 |
+
api.delete_file(repo_name, path_or_id="data-timeframe_tokenizer")
|
| 237 |
+
tokenizer_files = os.listdir(tokenizer_path)
|
| 238 |
+
tokenizer_files = [file for file in tokenizer_files]
|
| 239 |
+
print("Specific files in tokenizer directory After Delete:", tokenizer_files)
|
| 240 |
+
|
| 241 |
upload_folder(
|
| 242 |
folder_path='./data-timeframe_tokenizer',
|
| 243 |
path_in_repo="data-timeframe_tokenizer",
|
|
|
|
| 247 |
#overwrite=True # Force overwrite existing files
|
| 248 |
)
|
| 249 |
|
| 250 |
+
tokenizer_files = os.listdir(tokenizer_path)
|
| 251 |
+
tokenizer_files = [file for file in tokenizer_files]
|
| 252 |
+
print("Specific files in tokenizer directory After Commit:", tokenizer_files)
|
| 253 |
+
|
| 254 |
else:
|
| 255 |
print('Load Pre-trained')
|
| 256 |
|