-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathretrieval_colbert_postprocess.py
73 lines (60 loc) · 2.15 KB
/
retrieval_colbert_postprocess.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import csv
import json
import logging
from collections import defaultdict
from dataclasses import dataclass, field
from logging.config import fileConfig
from pathlib import Path
from typing import Literal
import simple_parsing
from tqdm.auto import tqdm
from tqdm.contrib.logging import logging_redirect_tqdm
from peerqa.utils import url_save_hash
fileConfig("logging.ini")
logger = logging.getLogger(__name__)
@dataclass
class Args:
output_dir: Path = field(default=Path("out"))
granularity: Literal["sentences", "paragraphs"] = "sentences"
template: str = None
override: bool = True
def main(args):
if args.template is not None:
template_hash = url_save_hash(args.template)
out_path_suffix = f"-{template_hash}"
else:
out_path_suffix = ""
out_path = (
args.output_dir / f"run-{args.granularity}-colbert-maxsim{out_path_suffix}.json"
)
if (not args.override) and out_path.exists():
logger.info(f"Skipping since {out_path=} already exists.")
return
logger.info(f"Will write results to {out_path=}.")
subdir = f"colbert-{args.granularity}"
if args.template is not None:
template_hash = url_save_hash(args.template)
logger.info(f"Adding template hash {template_hash} to subdir.")
subdir += f"-{template_hash}"
# experiment_dir = str(args.output_dir / subdir)
experiment_dir = args.output_dir / subdir
# ranking_files = list(
# (
# Path(experiment_dir).glob(
# "*/peer_qa_experiments.search_colbert/*/*/*/paper.nbits=2.ranking.tsv"
# )
# )
# )
ranking_files = list(experiment_dir.glob("**/paper.nbits=2.ranking.tsv"))
run = defaultdict(dict)
for ranking_file in tqdm(ranking_files, ncols=80):
with open(ranking_file, "r") as fh:
for qid, doc_id, rank, score in csv.reader(fh, delimiter="\t"):
run[qid][doc_id] = float(score)
with open(out_path, "w") as f:
json.dump(run, f, indent=2)
if __name__ == "__main__":
args, _ = simple_parsing.parse_known_args(Args)
with logging_redirect_tqdm():
logger.info(args)
main(args)