SushantGautam commited on
Commit
425bbbe
Β·
1 Parent(s): 56f0aa0

Refactor task 2 script: enhance imports, streamline submission process, and improve error handling

Browse files
medvqa/competitions/gi-2025/task_2.py CHANGED
@@ -1,19 +1,116 @@
1
- import sys
 
2
  import argparse
 
 
 
 
 
 
 
3
 
 
 
4
 
5
- def main(repo, task_name, verbose=False):
6
- print(f"Running {task_name} with repository: {repo}")
7
- if verbose:
8
- print("Verbose mode is enabled")
 
 
 
9
 
 
 
 
10
 
11
- if __name__ == '__main__':
12
- parser = argparse.ArgumentParser(description='Run GI-2025 Task 1 (VQA)')
13
- parser.add_argument('repo2', type=str, help='Repository path')
14
- parser.add_argument('task_name2', type=str, help='Name of the task')
15
- parser.add_argument('--verbose2', action='store_true',
16
- help='Enable verbose mode')
17
 
18
- args = parser.parse_args()
19
- main(args.repo2, args.task_name2, args.verbose2)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from gradio_client import Client, handle_file
2
+ from huggingface_hub import snapshot_download, login, whoami
3
  import argparse
4
+ import os
5
+ import subprocess as sp
6
+ import time
7
+ from datetime import datetime, timezone
8
+ import shutil # Add this import
9
+ import json
10
+ from huggingface_hub import HfApi, grant_access
11
 
12
+ HF_GATE_ACESSLIST = ["SushantGautam",
13
+ "stevenah", "vlbthambawita"]
14
 
15
+ MEDVQA_SUBMIT = True if os.environ.get(
16
+ '_MEDVQA_SUBMIT_FLAG_', 'FALSE') == 'TRUE' else False
17
+ parser = argparse.ArgumentParser(
18
+ description='Run GI-1015 Task 2 (Image Generation)')
19
+ parser.add_argument('--repo_id', type=str, required=True,
20
+ help='Path to the HF submission repository')
21
+ args, _ = parser.parse_known_args()
22
 
23
+ os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
24
+ submission_file = "submission_task1.py"
25
+ file_from_validation = "predictions_2.json"
26
 
27
+ min_library = ["datasets>=3.4.1", "transformers", "evaluate", "scipy", "scikit-learn"
28
+ "rouge_score", 'tqdm', "gradio_client>=1.8.0", "medvqa"]
 
 
 
 
29
 
30
+ print("🌟 ImageCLEFmed-MEDVQA-GI-2025 🌟",
31
+ "https://github.com/simula/ImageCLEFmed-MEDVQA-GI-2025")
32
+ print("πŸ” Subtask 2: Creation of High-Fidelity Synthetic GI Images")
33
+ print(f"πŸ‘€ Analyzing submission repository: {args.repo_id} πŸ‘€")
34
+
35
+ try:
36
+ print(f"Logged in to HuggingFace as: {whoami()['name']}")
37
+ except Exception:
38
+ print("⚠️⚠️ Not logged in to HuggingFace! Please get your login token from https://huggingface.co/settings/tokens 🌐")
39
+ login()
40
+
41
+ client = Client("SimulaMet/medvqa")
42
+ print("πŸ’“ Communicating with the Submission Server: Ping!")
43
+ result = client.predict(
44
+ api_name="/refresh_page"
45
+ )
46
+ print(result)
47
+
48
+
49
+ hf_username = whoami()['name']
50
+ assert len(hf_username) > 0, "🚫 HuggingFace login failed for some reason"
51
+ current_timestamp = int(time.time())
52
+
53
+ snap_dir = snapshot_download(
54
+ repo_id=args.repo_id, allow_patterns=[submission_file, "requirements.txt"])
55
+
56
+ if not os.path.isfile(os.path.join(snap_dir, submission_file)):
57
+ raise FileNotFoundError(
58
+ f"Submission file '{submission_file}' not found in the repository!")
59
+
60
+ if os.path.isfile(os.path.join(snap_dir, file_from_validation)):
61
+ os.remove(os.path.join(snap_dir, file_from_validation))
62
+
63
+ print("πŸ“¦ Making sure of the minimum requirements to run the script πŸ“¦")
64
+ sp.run(["python", "-m", "pip", "install", "-q"] + min_library, check=True)
65
+
66
+ if os.path.isfile(os.path.join(snap_dir, "requirements.txt")):
67
+ print(
68
+ f"πŸ“¦ Installing requirements from the submission repo: {args.repo_id}/requirements.txt")
69
+ sp.run(["python", "-m", "pip", "install", "-q", "-r",
70
+ f"{snap_dir}/requirements.txt"], cwd=snap_dir, check=True)
71
+
72
+ sp.run(["python", f"{snap_dir}/{submission_file}"],
73
+ cwd=snap_dir, check=True)
74
+ print(
75
+ f"πŸŽ‰ The submission script ran successfully, the intermediate files are at {snap_dir}")
76
+
77
+ if not MEDVQA_SUBMIT:
78
+ print("\n You can now run medvqa validate_and_submit .... command to submit the task.")
79
+ else:
80
+ print("πŸš€ Preparing for submission πŸš€")
81
+ file_path_to_upload = os.path.join(
82
+ snap_dir, f"{hf_username}-_-_-{current_timestamp}-_-_-task1.json")
83
+ shutil.copy(os.path.join(snap_dir, file_from_validation),
84
+ file_path_to_upload) # Use shutil.copy here
85
+ # add repo_id to the submission file
86
+ with open(file_path_to_upload, 'r', encoding='utf-8') as f:
87
+ data = json.load(f)
88
+ data['repo_id'] = args.repo_id
89
+ with open(file_path_to_upload, 'w', encoding='utf-8') as f:
90
+ json.dump(data, f, ensure_ascii=False)
91
+ api = HfApi()
92
+ api.update_repo_visibility(args.repo_id, private=False) # Make public
93
+ api.update_repo_settings(
94
+ args.repo_id, gated='manual') # Enable gated access
95
+ for user in HF_GATE_ACESSLIST:
96
+ try:
97
+ grant_access(args.repo_id, user) # Grant access
98
+ except Exception as e:
99
+ print(user, ":", e)
100
+ print(
101
+ f'''βœ… {args.repo_id} model is now made public, but gated, and is shared with organizers.
102
+ You should not make the model private or remove/update it until the competition results are announced.
103
+ Feel feel to re-submit the task if you change the model on the repository.
104
+ We will notify you if there are any issues with the submission.
105
+ ''')
106
+
107
+ result = client.predict(
108
+ file=handle_file(file_path_to_upload),
109
+ api_name="/add_submission"
110
+ )
111
+ print({"User": hf_username, "Task": "task1",
112
+ "Submitted_time": str(datetime.fromtimestamp(int(current_timestamp), tz=timezone.utc)) + " UTC"
113
+ })
114
+ print(result)
115
+ print("Visit this URL to see the entry: πŸ‘‡")
116
+ Client("SimulaMet/medvqa")
medvqa/submission_samples/gi-2025/submission_task2.py CHANGED
@@ -105,21 +105,7 @@ total_time, final_mem = round(
105
  time.time() - start_time, 4), round(get_mem() - post_model_mem, 2)
106
  model_mem_used = round(post_model_mem - initial_mem, 2)
107
 
108
- # start calculating metrics
109
  DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
110
-
111
- # weights = Inception_V3_Weights.DEFAULT
112
- # inception = inception_v3(weights=weights).to(DEVICE)
113
- # inception.eval()
114
-
115
- # # --- Preprocessing ---
116
- # IMG_SIZE = 299
117
- # preprocess = transforms.Compose([
118
- # transforms.Resize((IMG_SIZE, IMG_SIZE)),
119
- # transforms.ToTensor(),
120
- # transforms.Normalize([0.5]*3, [0.5]*3),
121
- # ])
122
-
123
  modelx = AutoModel.from_pretrained(
124
  "ikim-uk-essen/BiomedCLIP_ViT_patch16_224", trust_remote_code=True).to(DEVICE)
125
  processor = AutoProcessor.from_pretrained(
 
105
  time.time() - start_time, 4), round(get_mem() - post_model_mem, 2)
106
  model_mem_used = round(post_model_mem - initial_mem, 2)
107
 
 
108
  DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
 
 
 
 
 
 
 
 
 
 
 
 
 
109
  modelx = AutoModel.from_pretrained(
110
  "ikim-uk-essen/BiomedCLIP_ViT_patch16_224", trust_remote_code=True).to(DEVICE)
111
  processor = AutoProcessor.from_pretrained(