1
- from monai .bundle import get_all_bundles_list , get_bundle_info , get_bundle_versions , download
1
+ from monai .bundle import download
2
2
from monai .bundle .config_parser import ConfigParser
3
3
from jinja2 import Environment , FileSystemLoader
4
4
from bs4 import BeautifulSoup
5
5
import markdown
6
6
import tempfile
7
7
import os
8
8
9
- def get_bundle_web_data (bundle_name , download_dir ):
9
+ def _get_all_bundles_info (repo : str = "Project-MONAI/model-zoo" , tag : str = "hosting_storage_v1" ):
10
+ request_url = f"https://api.github.com/repos/{ repo } /releases"
11
+ resp = requests .get (request_url )
12
+ resp .raise_for_status ()
13
+ releases_list = json .loads (resp .text )
14
+ bundle_name_pattern = re .compile (r"_v\d*." )
15
+ bundles_info : Dict = {}
16
+
17
+ for release in releases_list :
18
+ if release ["tag_name" ] == tag :
19
+ for asset in release ["assets" ]:
20
+ asset_name = bundle_name_pattern .split (asset ["name" ])[0 ]
21
+ if asset_name not in bundles_info :
22
+ bundles_info [asset_name ] = {}
23
+ asset_version = asset ["name" ].split (f"{ asset_name } _v" )[- 1 ].replace (".zip" , "" )
24
+ bundles_info [asset_name ][asset_version ] = {
25
+ "id" : asset ["id" ],
26
+ "name" : asset ["name" ],
27
+ "size" : asset ["size" ],
28
+ "download_count" : asset ["download_count" ],
29
+ "browser_download_url" : asset ["browser_download_url" ],
30
+ "created_at" : asset ["created_at" ],
31
+ "updated_at" : asset ["updated_at" ],
32
+ }
33
+
34
+ return bundles_info
35
+
36
+ def get_bundle_web_data (bundle_name , download_dir , all_bundles_info ):
37
+ current_bundle = all_bundles_info [bundle_name ]
38
+
10
39
web_data = {}
11
40
web_data ["bundle_name" ] = bundle_name
12
- versions_info = get_bundle_versions (bundle_name = bundle_name )
13
- # calculate download count
41
+
14
42
download_count = 0
15
- for version in versions_info ["all_versions" ]:
16
- download_count += get_bundle_info (bundle_name = bundle_name , version = version )["download_count" ]
43
+
44
+ for key , item in current_bundle .items ():
45
+ download_count += item ["download_count" ]
17
46
web_data ["downloads" ] = download_count
18
- # get latest update time, after this PR: https://github.com/Project-MONAI/MONAI/pull/5141 is merged,
19
- # the data can be achieved.
20
- latest_info = get_bundle_info (bundle_name = bundle_name , version = versions_info ["latest_version" ])
21
- if "updated_at" in latest_info :
22
- web_data ["latest_update" ] = latest_info ["updated_at" ]
47
+
48
+ latest_version = sorted (current_bundle .keys ())[- 1 ]
49
+
50
+ if "updated_at" in current_bundle [latest_version ]:
51
+ web_data ["latest_update" ] = current_bundle [latest_version ]["updated_at" ]
52
+
23
53
# download zip file and get readme, license, metadata path
24
- download (name = bundle_name , version = versions_info [ " latest_version" ] , bundle_dir = download_dir )
54
+ download (name = bundle_name , version = latest_version , bundle_dir = download_dir )
25
55
26
56
readme_path = os .path .join (download_dir , bundle_name , "docs" , "README.md" )
27
57
if os .path .exists (readme_path ):
@@ -45,96 +75,96 @@ def get_bundle_web_data(bundle_name, download_dir):
45
75
metadata_path = os .path .join (download_dir , bundle_name , "configs" , "metadata.json" )
46
76
if os .path .exists (metadata_path ):
47
77
web_data ["metadata" ] = ConfigParser .load_config_file (metadata_path )
48
-
49
- web_data ["download_url" ] = latest_info ["browser_download_url" ]
50
- web_data ["size" ] = latest_info ["size" ]
51
- web_data ["image_path" ] = "https://raw.githubusercontent.com/Project-MONAI/model-zoo/dev/models/" + bundle_name + "/docs/"
78
+
79
+ web_data ["download_url" ] = current_bundle [ latest_version ] ["browser_download_url" ]
80
+ web_data ["size" ] = current_bundle [ latest_version ] ["size" ]
81
+ web_data ["image_path" ] = "https://raw.githubusercontent.com/Project-MONAI/model-zoo/dev/models/" + bundle_name + "/docs/"
52
82
53
83
return web_data
54
84
55
85
def main ():
56
- all_bundle_list = get_all_bundles_list ()
57
- temp_dir = tempfile .mkdtemp ()
86
+ all_bundle_list = _get_all_bundles_info ()
87
+ temp_dir = tempfile .mkdtemp ()
88
+
89
+ all_models = {}
90
+ for item in all_bundle_list .items ():
91
+ bundle_name = item [0 ]
92
+ bundle_web_data = get_bundle_web_data (bundle_name , temp_dir , all_bundle_list )
93
+ all_models [bundle_name ] = bundle_web_data
58
94
59
- all_models = {}
60
- for item in all_bundle_list :
61
- bundle_name = item [0 ]
62
- bundle_web_data = get_bundle_web_data (bundle_name , temp_dir )
63
- all_models [bundle_name ] = bundle_web_data
64
95
65
- environment = Environment (loader = FileSystemLoader ("./templates/" ))
66
- template = environment .get_template ("model-template.html" )
67
- template_string = ""
68
- final_string = ""
96
+ environment = Environment (loader = FileSystemLoader ("./templates/" ))
97
+ template = environment .get_template ("model-template.html" )
98
+ template_string = ""
99
+ final_string = ""
69
100
70
- for model in all_models :
71
- model_values = {}
72
- try :
73
- model_values ["model_name" ] = model .replace ("_" , " " ).capitalize ()
74
- except KeyError :
75
- model_values ["model_name" ] = ""
76
-
77
- try :
78
- model_values ["description" ] = all_models [model ]["metadata" ]["description" ]
79
- except KeyError :
80
- model_values ["description" ] = ""
81
-
82
- try :
83
- model_values ["authors" ] = all_models [model ]["metadata" ]["authors" ]
84
- except KeyError :
85
- model_values ["authors" ] = ""
86
-
87
- try :
88
- model_values ["papers" ] = all_models [model ]["metadata" ]["references" ]
89
- except KeyError :
90
- model_values ["papers" ] = []
91
-
92
- try :
93
- model_values ["download_url" ] = all_models [model ]["download_url" ]
94
- except KeyError :
95
- model_values ["download_url" ] = ""
96
-
97
- try :
98
- model_values ["downloads" ] = all_models [model ]["downloads" ]
99
- except KeyError :
100
- model_values ["downloads" ] = 0
101
-
102
- try :
103
- model_values ["last_updated" ] = all_models [model ]["last_updated" ]
104
- except KeyError :
105
- model_values ["last_updated" ] = ""
106
-
107
- try :
108
- model_values ["readme" ] = all_models [model ]["readme_html" ]
109
- except KeyError :
110
- model_values ["readme" ] = ""
111
-
112
- try :
113
- file_size = all_models [model ]["size" ]
114
- model_values ["size" ] = str (round (file_size / 1048576 , 1 )) + "MB"
115
- except KeyError :
116
- model_values ["size" ] = ""
117
-
118
- content = template .render (model_values )
119
- template_string += str (content )
101
+ for model in all_models :
102
+ model_values = {}
103
+ try :
104
+ model_values ["model_name" ] = model .replace ("_" , " " ).capitalize ()
105
+ except KeyError :
106
+ model_values ["model_name" ] = ""
107
+
108
+ try :
109
+ model_values ["description" ] = all_models [model ]["metadata" ]["description" ]
110
+ except KeyError :
111
+ model_values ["description" ] = ""
112
+
113
+ try :
114
+ model_values ["authors" ] = all_models [model ]["metadata" ]["authors" ]
115
+ except KeyError :
116
+ model_values ["authors" ] = ""
117
+
118
+ try :
119
+ model_values ["papers" ] = all_models [model ]["metadata" ]["references" ]
120
+ except KeyError :
121
+ model_values ["papers" ] = []
122
+
123
+ try :
124
+ model_values ["download_url" ] = all_models [model ]["download_url" ]
125
+ except KeyError :
126
+ model_values ["download_url" ] = ""
127
+
128
+ try :
129
+ model_values ["downloads" ] = all_models [model ]["downloads" ]
130
+ except KeyError :
131
+ model_values ["downloads" ] = 0
132
+
133
+ try :
134
+ model_values ["last_updated" ] = all_models [model ]["last_updated" ]
135
+ except KeyError :
136
+ model_values ["last_updated" ] = ""
137
+
138
+ try :
139
+ model_values ["readme" ] = all_models [model ]["readme_html" ]
140
+ except KeyError :
141
+ model_values ["readme" ] = ""
142
+
143
+ try :
144
+ file_size = all_models [model ]["size" ]
145
+ model_values ["size" ] = str (round (file_size / 1048576 , 1 )) + "MB"
146
+ except KeyError :
147
+ model_values ["size" ] = ""
148
+
149
+ content = template .render (model_values )
150
+ template_string += str (content )
120
151
121
- with open ("model-zoo.html" , "r" , encoding = "utf-8" ) as f :
122
- contents = f .read ()
123
-
124
- model_zoo_soup = BeautifulSoup (contents , "html.parser" )
125
- generated_model_soup = BeautifulSoup (template_string , "html.parser" )
126
-
127
- model_entry = model_zoo_soup .find (id = "all_models" )
128
- model_entry .clear ()
129
- model_entry .append (generated_model_soup )
130
- final_string = model_zoo_soup .prettify ()
131
- f .close ()
152
+ with open ("model-zoo.html" , "r" , encoding = "utf-8" ) as f :
153
+ contents = f .read ()
154
+
155
+ model_zoo_soup = BeautifulSoup (contents , "html.parser" )
156
+ generated_model_soup = BeautifulSoup (template_string , "html.parser" )
157
+
158
+ model_entry = model_zoo_soup .find (id = "all_models" )
159
+ model_entry .clear ()
160
+ model_entry .append (generated_model_soup )
161
+ final_string = model_zoo_soup .prettify ()
162
+ f .close ()
163
+
164
+ with open ("model-zoo.html" , "w" , encoding = "utf-8" ) as f :
165
+ f .write (final_string )
166
+ f .close ()
132
167
133
- with open ("model-zoo.html" , "w" , encoding = "utf-8" ) as f :
134
- f .write (final_string )
135
- f .close ()
136
-
137
-
138
168
139
169
if __name__ == '__main__' :
140
170
main ()
0 commit comments