|
| 1 | +import os |
| 2 | +import requests |
| 3 | +from rich.progress import track |
| 4 | +from hashlib import sha256 |
| 5 | +from ..env import HOME_PATH, console |
| 6 | +from .base import RemoteFile |
| 7 | + |
| 8 | +REMOTE_URLS = { |
| 9 | + "locomo": [ |
| 10 | + RemoteFile( |
| 11 | + url="https://github.com/snap-research/locomo/raw/refs/heads/main/data/locomo10.json", |
| 12 | + name="locomo10.json", |
| 13 | + hash="79fa87e90f04081343b8c8debecb80a9a6842b76a7aa537dc9fdf651ea698ff4", |
| 14 | + ) |
| 15 | + ] |
| 16 | +} |
| 17 | + |
| 18 | + |
| 19 | +def exist_or_download(dataset: str): |
| 20 | + if check_local_dataset_exist(dataset): |
| 21 | + console.log(f"Dataset {dataset} already exists") |
| 22 | + else: |
| 23 | + download_from_github(dataset) |
| 24 | + |
| 25 | + |
| 26 | +def check_local_dataset_exist(dataset: str) -> bool: |
| 27 | + assert ( |
| 28 | + dataset in REMOTE_URLS |
| 29 | + ), f"Dataset {dataset} not found in {list(REMOTE_URLS.keys())}" |
| 30 | + |
| 31 | + dataset_path = os.path.join(HOME_PATH, "datasets") |
| 32 | + if not os.path.exists(dataset_path): |
| 33 | + return False |
| 34 | + |
| 35 | + local_path = os.path.join(dataset_path, dataset) |
| 36 | + if not os.path.exists(local_path): |
| 37 | + return False |
| 38 | + |
| 39 | + for df in REMOTE_URLS[dataset]: |
| 40 | + local_file = os.path.join(local_path, df.name) |
| 41 | + if not os.path.exists(local_file): |
| 42 | + return False |
| 43 | + with open(local_file, "rb") as f: |
| 44 | + file_hash = sha256(f.read()).hexdigest() |
| 45 | + if file_hash != df.hash: |
| 46 | + return False |
| 47 | + return True |
| 48 | + |
| 49 | + |
| 50 | +def download_from_github(dataset: str): |
| 51 | + assert ( |
| 52 | + dataset in REMOTE_URLS |
| 53 | + ), f"Dataset {dataset} not found in {list(REMOTE_URLS.keys())}" |
| 54 | + |
| 55 | + dataset_path = os.path.join(HOME_PATH, "datasets") |
| 56 | + if not os.path.exists(dataset_path): |
| 57 | + os.makedirs(dataset_path) |
| 58 | + |
| 59 | + local_path = os.path.join(dataset_path, dataset) |
| 60 | + if not os.path.exists(local_path): |
| 61 | + os.makedirs(local_path) |
| 62 | + |
| 63 | + # download the file |
| 64 | + console.log(f"Downloading {dataset} from GitHub") |
| 65 | + for df in track( |
| 66 | + REMOTE_URLS[dataset], |
| 67 | + description=f"Downloading {len(REMOTE_URLS[dataset])} files...", |
| 68 | + ): |
| 69 | + local_file = os.path.join(local_path, df.name) |
| 70 | + response = requests.get(df.url) |
| 71 | + response.raise_for_status() |
| 72 | + |
| 73 | + file_hash = sha256(response.content).hexdigest() |
| 74 | + assert ( |
| 75 | + file_hash == df.hash |
| 76 | + ), f"Hash of {df.name}, {dataset} does not match expected hash" |
| 77 | + |
| 78 | + with open(local_file, "wb") as f: |
| 79 | + f.write(response.content) |
0 commit comments