Skip to content

Commit 6b567f3

Browse files
authored
Update emit_static_api.py
1 parent 3b97975 commit 6b567f3

File tree

1 file changed

+44
-38
lines changed

1 file changed

+44
-38
lines changed

scripts/API/emit_static_api.py

Lines changed: 44 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -171,26 +171,29 @@ def sha256_file(path: pathlib.Path) -> str:
171171
h.update(chunk)
172172
return h.hexdigest()
173173

174+
def _parse_iso_date(val: Any) -> datetime | None:
175+
"""
176+
Accepts 'YYYY-MM-DD' or ISO datetime; returns datetime or None.
177+
"""
178+
if not val:
179+
return None
180+
s = str(val).strip()
181+
try:
182+
return datetime.fromisoformat(s)
183+
except ValueError:
184+
# Support "2024-10-23T12:34:56Z" style if it ever shows up
185+
if s.endswith("Z"):
186+
try:
187+
return datetime.fromisoformat(s[:-1] + "+00:00")
188+
except ValueError:
189+
return None
190+
return None
191+
174192
# ========================= main =========================
175193

176194
def main():
177195
ensure_src()
178196

179-
# Load previous index (for new/recent detection) BEFORE we overwrite it
180-
prev_full: Dict[str, Any] = {}
181-
prev_by_slug: Dict[str, Dict[str, Any]] = {}
182-
prev_index_path = API / "index.json"
183-
if prev_index_path.exists():
184-
try:
185-
prev_full = json.loads(prev_index_path.read_text())
186-
except Exception as exc:
187-
sys.stderr.write(f"[emit_static_api] warning: failed to parse previous index.json: {exc}\n")
188-
prev_full = {}
189-
if prev_full:
190-
for prev_item in get_items(prev_full):
191-
prev_slug = item_slug(prev_item)
192-
prev_by_slug[prev_slug] = prev_item
193-
194197
# 1) Full dataset passthrough
195198
dst_index = copy_index()
196199
data_full = json.loads(dst_index.read_text())
@@ -204,17 +207,6 @@ def main():
204207
# 3) Aggregate stats (using normalized, expanded versions)
205208
items = get_items(data_full)
206209

207-
# Determine change state for each item vs previous index
208-
change_map: Dict[str, str] = {}
209-
if prev_by_slug:
210-
for it in items:
211-
slug = item_slug(it)
212-
prev_it = prev_by_slug.get(slug)
213-
if prev_it is None:
214-
change_map[slug] = "new"
215-
elif prev_it != it:
216-
change_map[slug] = "updated"
217-
218210
byType = {
219211
"plugin": len(plugins),
220212
"theme": len(themes),
@@ -230,15 +222,14 @@ def main():
230222
versions = item_versions(it) # normalized to canonical list
231223
creators = item_creators(it)
232224
slug = item_slug(it)
233-
change_state = change_map.get(slug, "")
234225

235226
for v in versions:
236227
versions_present.add(v)
237228
byVersion[v] = byVersion.get(v, 0) + 1
238229
for c in creators:
239230
creators_count[c] = creators_count.get(c, 0) + 1
240231

241-
enriched.append({**it, "_slug": slug, "_versions": versions, "_creators": creators, "_change": change_state})
232+
enriched.append({**it, "_slug": slug, "_versions": versions, "_creators": creators})
242233

243234
# 4) stats.json
244235
stats = {
@@ -262,8 +253,7 @@ def main():
262253

263254
# 7) Per-item docs + expose normalized fields to make consumption easier
264255
search_index: List[Dict[str, Any]] = []
265-
new_items_docs: List[Dict[str, Any]] = []
266-
recent_items_docs: List[Dict[str, Any]] = []
256+
all_docs: List[Dict[str, Any]] = []
267257

268258
for it in enriched:
269259
slug = it["_slug"]
@@ -280,11 +270,7 @@ def main():
280270
if it["_creators"]:
281271
doc["creator_slug"] = it["_creators"][0].lower()
282272

283-
change_state = it.get("_change") or ""
284-
if change_state == "new":
285-
new_items_docs.append(doc)
286-
elif change_state == "updated":
287-
recent_items_docs.append(doc)
273+
all_docs.append(doc)
288274

289275
# write per-item
290276
item_path = API / "items" / f"{slugify(slug)}.json"
@@ -326,9 +312,29 @@ def main():
326312
# 10) search-index.json (compact for client-side search)
327313
write_json(API / "search-index.json", search_index)
328314

329-
# 11) new/recent activity endpoints
330-
write_json(API / "new.json", new_items_docs)
331-
write_json(API / "recent.json", recent_items_docs)
315+
# 11) new.json / recent.json based on added_at / updated_at
316+
new_candidates: List[Tuple[datetime, Dict[str, Any]]] = []
317+
recent_candidates: List[Tuple[datetime, Dict[str, Any]]] = []
318+
319+
for doc in all_docs:
320+
added_dt = _parse_iso_date(doc.get("added_at"))
321+
updated_dt = _parse_iso_date(doc.get("updated_at"))
322+
323+
if added_dt is not None:
324+
new_candidates.append((added_dt, doc))
325+
326+
key_dt = updated_dt or added_dt
327+
if key_dt is not None:
328+
recent_candidates.append((key_dt, doc))
329+
330+
new_candidates.sort(key=lambda pair: pair[0], reverse=True)
331+
recent_candidates.sort(key=lambda pair: pair[0], reverse=True)
332+
333+
new_docs = [doc for _, doc in new_candidates]
334+
recent_docs = [doc for _, doc in recent_candidates]
335+
336+
write_json(API / "new.json", new_docs)
337+
write_json(API / "recent.json", recent_docs)
332338

333339
# 12) manifest.json (sizes + sha256)
334340
manifest_entries = []

0 commit comments

Comments
 (0)