Skip to content

Commit fc3e1a9

Browse files
committed
Fix 403 error for API Releases issue. Fixed Label center. Fixed code pre-wrap issue with readme markdown.
1 parent 632432d commit fc3e1a9

File tree

7 files changed

+368
-500
lines changed

7 files changed

+368
-500
lines changed

.github/workflows/fetch.py

+123-93
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,57 @@
1-
from monai.bundle import get_all_bundles_list, get_bundle_info, get_bundle_versions, download
1+
from monai.bundle import download
22
from monai.bundle.config_parser import ConfigParser
33
from jinja2 import Environment, FileSystemLoader
44
from bs4 import BeautifulSoup
55
import markdown
66
import tempfile
77
import os
88

9-
def get_bundle_web_data(bundle_name, download_dir):
9+
def _get_all_bundles_info(repo: str = "Project-MONAI/model-zoo", tag: str = "hosting_storage_v1"):
10+
request_url = f"https://api.github.com/repos/{repo}/releases"
11+
resp = requests.get(request_url)
12+
resp.raise_for_status()
13+
releases_list = json.loads(resp.text)
14+
bundle_name_pattern = re.compile(r"_v\d*.")
15+
bundles_info: Dict = {}
16+
17+
for release in releases_list:
18+
if release["tag_name"] == tag:
19+
for asset in release["assets"]:
20+
asset_name = bundle_name_pattern.split(asset["name"])[0]
21+
if asset_name not in bundles_info:
22+
bundles_info[asset_name] = {}
23+
asset_version = asset["name"].split(f"{asset_name}_v")[-1].replace(".zip", "")
24+
bundles_info[asset_name][asset_version] = {
25+
"id": asset["id"],
26+
"name": asset["name"],
27+
"size": asset["size"],
28+
"download_count": asset["download_count"],
29+
"browser_download_url": asset["browser_download_url"],
30+
"created_at": asset["created_at"],
31+
"updated_at": asset["updated_at"],
32+
}
33+
34+
return bundles_info
35+
36+
def get_bundle_web_data(bundle_name, download_dir, all_bundles_info):
37+
current_bundle = all_bundles_info[bundle_name]
38+
1039
web_data = {}
1140
web_data["bundle_name"] = bundle_name
12-
versions_info = get_bundle_versions(bundle_name=bundle_name)
13-
# calculate download count
41+
1442
download_count = 0
15-
for version in versions_info["all_versions"]:
16-
download_count += get_bundle_info(bundle_name=bundle_name, version=version)["download_count"]
43+
44+
for key, item in current_bundle.items():
45+
download_count += item["download_count"]
1746
web_data["downloads"] = download_count
18-
# get latest update time, after this PR: https://github.com/Project-MONAI/MONAI/pull/5141 is merged,
19-
# the data can be achieved.
20-
latest_info = get_bundle_info(bundle_name=bundle_name, version=versions_info["latest_version"])
21-
if "updated_at" in latest_info:
22-
web_data["latest_update"] = latest_info["updated_at"]
47+
48+
latest_version = sorted(current_bundle.keys())[-1]
49+
50+
if "updated_at" in current_bundle[latest_version]:
51+
web_data["latest_update"] = current_bundle[latest_version]["updated_at"]
52+
2353
# download zip file and get readme, license, metadata path
24-
download(name=bundle_name, version=versions_info["latest_version"], bundle_dir=download_dir)
54+
download(name=bundle_name, version=latest_version, bundle_dir=download_dir)
2555

2656
readme_path = os.path.join(download_dir, bundle_name, "docs", "README.md")
2757
if os.path.exists(readme_path):
@@ -45,96 +75,96 @@ def get_bundle_web_data(bundle_name, download_dir):
4575
metadata_path = os.path.join(download_dir, bundle_name, "configs", "metadata.json")
4676
if os.path.exists(metadata_path):
4777
web_data["metadata"] = ConfigParser.load_config_file(metadata_path)
48-
49-
web_data["download_url"] = latest_info["browser_download_url"]
50-
web_data["size"] = latest_info["size"]
51-
web_data["image_path"] = "https://raw.githubusercontent.com/Project-MONAI/model-zoo/dev/models/" + bundle_name + "/docs/"
78+
79+
web_data["download_url"] = current_bundle[latest_version]["browser_download_url"]
80+
web_data["size"] = current_bundle[latest_version]["size"]
81+
web_data["image_path"] = "https://raw.githubusercontent.com/Project-MONAI/model-zoo/dev/models/" + bundle_name + "/docs/"
5282

5383
return web_data
5484

5585
def main():
56-
all_bundle_list = get_all_bundles_list()
57-
temp_dir = tempfile.mkdtemp()
86+
all_bundle_list = _get_all_bundles_info()
87+
temp_dir = tempfile.mkdtemp()
88+
89+
all_models = {}
90+
for item in all_bundle_list.items():
91+
bundle_name = item[0]
92+
bundle_web_data = get_bundle_web_data(bundle_name, temp_dir, all_bundle_list)
93+
all_models[bundle_name] = bundle_web_data
5894

59-
all_models = {}
60-
for item in all_bundle_list:
61-
bundle_name = item[0]
62-
bundle_web_data = get_bundle_web_data(bundle_name, temp_dir)
63-
all_models[bundle_name] = bundle_web_data
6495

65-
environment = Environment(loader=FileSystemLoader("./templates/"))
66-
template = environment.get_template("model-template.html")
67-
template_string = ""
68-
final_string = ""
96+
environment = Environment(loader=FileSystemLoader("./templates/"))
97+
template = environment.get_template("model-template.html")
98+
template_string = ""
99+
final_string = ""
69100

70-
for model in all_models:
71-
model_values = {}
72-
try:
73-
model_values["model_name"] = model.replace("_", " ").capitalize()
74-
except KeyError:
75-
model_values["model_name"] = ""
76-
77-
try:
78-
model_values["description"] = all_models[model]["metadata"]["description"]
79-
except KeyError:
80-
model_values["description"] = ""
81-
82-
try:
83-
model_values["authors"] = all_models[model]["metadata"]["authors"]
84-
except KeyError:
85-
model_values["authors"] = ""
86-
87-
try:
88-
model_values["papers"] = all_models[model]["metadata"]["references"]
89-
except KeyError:
90-
model_values["papers"] = []
91-
92-
try:
93-
model_values["download_url"] = all_models[model]["download_url"]
94-
except KeyError:
95-
model_values["download_url"] = ""
96-
97-
try:
98-
model_values["downloads"] = all_models[model]["downloads"]
99-
except KeyError:
100-
model_values["downloads"] = 0
101-
102-
try:
103-
model_values["last_updated"] = all_models[model]["last_updated"]
104-
except KeyError:
105-
model_values["last_updated"] = ""
106-
107-
try:
108-
model_values["readme"] = all_models[model]["readme_html"]
109-
except KeyError:
110-
model_values["readme"] = ""
111-
112-
try:
113-
file_size = all_models[model]["size"]
114-
model_values["size"] = str(round(file_size / 1048576, 1)) + "MB"
115-
except KeyError:
116-
model_values["size"] = ""
117-
118-
content = template.render(model_values)
119-
template_string += str(content)
101+
for model in all_models:
102+
model_values = {}
103+
try:
104+
model_values["model_name"] = model.replace("_", " ").capitalize()
105+
except KeyError:
106+
model_values["model_name"] = ""
107+
108+
try:
109+
model_values["description"] = all_models[model]["metadata"]["description"]
110+
except KeyError:
111+
model_values["description"] = ""
112+
113+
try:
114+
model_values["authors"] = all_models[model]["metadata"]["authors"]
115+
except KeyError:
116+
model_values["authors"] = ""
117+
118+
try:
119+
model_values["papers"] = all_models[model]["metadata"]["references"]
120+
except KeyError:
121+
model_values["papers"] = []
122+
123+
try:
124+
model_values["download_url"] = all_models[model]["download_url"]
125+
except KeyError:
126+
model_values["download_url"] = ""
127+
128+
try:
129+
model_values["downloads"] = all_models[model]["downloads"]
130+
except KeyError:
131+
model_values["downloads"] = 0
132+
133+
try:
134+
model_values["last_updated"] = all_models[model]["last_updated"]
135+
except KeyError:
136+
model_values["last_updated"] = ""
137+
138+
try:
139+
model_values["readme"] = all_models[model]["readme_html"]
140+
except KeyError:
141+
model_values["readme"] = ""
142+
143+
try:
144+
file_size = all_models[model]["size"]
145+
model_values["size"] = str(round(file_size / 1048576, 1)) + "MB"
146+
except KeyError:
147+
model_values["size"] = ""
148+
149+
content = template.render(model_values)
150+
template_string += str(content)
120151

121-
with open("model-zoo.html", "r", encoding="utf-8") as f:
122-
contents = f.read()
123-
124-
model_zoo_soup = BeautifulSoup(contents, "html.parser")
125-
generated_model_soup = BeautifulSoup(template_string, "html.parser")
126-
127-
model_entry = model_zoo_soup.find(id="all_models")
128-
model_entry.clear()
129-
model_entry.append(generated_model_soup)
130-
final_string = model_zoo_soup.prettify()
131-
f.close()
152+
with open("model-zoo.html", "r", encoding="utf-8") as f:
153+
contents = f.read()
154+
155+
model_zoo_soup = BeautifulSoup(contents, "html.parser")
156+
generated_model_soup = BeautifulSoup(template_string, "html.parser")
157+
158+
model_entry = model_zoo_soup.find(id="all_models")
159+
model_entry.clear()
160+
model_entry.append(generated_model_soup)
161+
final_string = model_zoo_soup.prettify()
162+
f.close()
163+
164+
with open("model-zoo.html", "w", encoding="utf-8") as f:
165+
f.write(final_string)
166+
f.close()
132167

133-
with open("model-zoo.html", "w", encoding="utf-8") as f:
134-
f.write(final_string)
135-
f.close()
136-
137-
138168

139169
if __name__ == '__main__':
140170
main()

.github/workflows/fetch.yml

+4
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,10 @@ on:
88
schedule:
99
- cron: "0 0 * * 0"
1010

11+
push:
12+
branches:
13+
- master
14+
1115
# Allows you to run this workflow manually from the Actions tab
1216
workflow_dispatch:
1317

0 commit comments

Comments
 (0)