diff --git a/.github/actions/config/action.yml b/.github/actions/config/action.yml index 87161eec..00cbcd4a 100644 --- a/.github/actions/config/action.yml +++ b/.github/actions/config/action.yml @@ -32,8 +32,9 @@ runs: --trigger-default-branch='${{ github.event.repository.default_branch }}' \ --trigger-branch='${{ github.head_ref || github.ref_name }}' --trigger-commit='${{ github.sha }}' \ --github-token='${{ inputs.FM_CI_TOKEN }}'" \ + --log-level='WARN' \ | tee -a "$GITHUB_OUTPUT" - echo ci-script="uv --project dev/ci/gen/ run dev/ci/gen/ci.py" \ + echo ci-script="uv -q --project dev/ci/gen/ run dev/ci/gen/ci.py" \ | tee -a "$GITHUB_OUTPUT" - id: checkout name: "Initial Checkout" @@ -41,7 +42,14 @@ runs: working-directory: ${{ env.WORKDIR }} run: | # export GIT_PYTHON_TRACE=1 - ${{ steps.parse.outputs.ci-script }} checkout_workspace ${{ steps.parse.outputs.args }} + ${{ steps.parse.outputs.ci-script }} checkout_workspace ${{ steps.parse.outputs.args }} \ + 2> >(tee ${{ env.SCRATCHDIR }}/checkout.log >&2) + if [[ -s ${{ env.SCRATCHDIR }}/checkout.log ]]; then + echo -e '# `checkout_workspace`\n' >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + cat ${{ env.SCRATCHDIR}}/checkout.log >> $GITHUB_STEP_SUMMARY + echo -e '```\n' >> $GITHUB_STEP_SUMMARY + fi - id: shallow_clones name: "Shallow Clones" shell: bash @@ -49,7 +57,7 @@ runs: run: | make lightweight-clone -j - id: config - name: "Commit Files" + name: "Build commit files" shell: bash working-directory: ${{ env.WORKDIR }} run: | @@ -57,9 +65,29 @@ runs: ${{ steps.parse.outputs.ci-script }} config ${{ steps.parse.outputs.args }} \ --output-file-job="${{ env.COMMITS_JOB }}" \ --output-file-base="${{ env.COMMITS_BASE }}" \ - --output-file-github="$GITHUB_OUTPUT" + --output-file-github="$GITHUB_OUTPUT" \ + 2> >(tee ${{ env.SCRATCHDIR }}/config.log >&2) + if [[ -s ${{ env.SCRATCHDIR }}/checkout.log ]]; then + echo -e '# `config`\n' >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + cat ${{ env.SCRATCHDIR}}/config.log >> $GITHUB_STEP_SUMMARY + echo -e '```\n' >> $GITHUB_STEP_SUMMARY + fi + - name: "Output commit files" + shell: bash + working-directory: ${{ env.WORKDIR }} + run: | echo "==== JOB COMMITS ====" - cat ${{ env.COMMITS_JOB }} + echo -e '\n# Commits\n\n' >> $GITHUB_STEP_SUMMARY + echo -e '\n
Job Commits\n\n```' >> $GITHUB_STEP_SUMMARY + cat ${{ env.COMMITS_JOB }} | tee -a $GITHUB_STEP_SUMMARY + echo -e '```\n\n
' >> $GITHUB_STEP_SUMMARY + if [[ "${{ steps.config.outputs.compare }}" = "1" ]]; then + echo "==== BASE COMMITS ====" + echo -e '\n
Base Commits\n\n```' >> $GITHUB_STEP_SUMMARY + cat ${{ env.COMMITS_BASE }} | tee -a $GITHUB_STEP_SUMMARY + echo -e '```\n\n
' >> $GITHUB_STEP_SUMMARY + fi - id: artifact uses: actions/upload-artifact@v4 name: "Upload Commit Files" diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 3e9871af..1115cba1 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -55,6 +55,7 @@ jobs: with: FM_CI_TOKEN: ${{ secrets.FM_CI_TOKEN }} - id: config + name: "Generate job config" uses: ./workspace_checkout/.github/actions/config with: FM_CI_TOKEN: ${{ secrets.FM_CI_TOKEN }} @@ -263,6 +264,10 @@ jobs: source ${{ env.BOILERPLATE }} ulimit -S -s + - name: "Build dune-report tool" + run: | + opam install -y fmdeps/skylabs-fm/ocaml-dune-report/ + - id: build-job-asts name: "Build Job ASTs" run: | @@ -288,6 +293,7 @@ jobs: needs.gen-job.outputs.compare == '1' run: | source ${{ env.BOILERPLATE }} + dune-report --json > ${{ env.SCRATCHDIR }}/dune-log-job.json # Print information on the size of the _build directory. du -hs _build du -hc $(find _build -type f -name "*.v") | tail -n 1 @@ -296,10 +302,11 @@ jobs: # Extract data. find _build/ -name '*.vo'| sort | xargs md5sum > ${{ env.SCRATCHDIR }}/md5sums.txt dune exec -- globfs.extract-all ${NJOBS} _build/default - # echo -e "\e[0Ksection_start:`date +%s`:section_9[collapsed=true]\r\e[0KGenerate code quality report" - (cd _build/default; dune exec -- coqc-perf.report .) | tee -a coq_codeq.log - cat coq_codeq.log | dune exec -- coqc-perf.code-quality-report > ${{ env.SCRATCHDIR }}/gl-code-quality-report.json || true - # echo -e "\e[0Ksection_end:`date +%s`:section_9\r\e[0K" + # Logs from glob files + mkdir ${{ env.SCRATCHDIR }}/globs-job/ + find _build/ -type f -! -empty -name '*.glob.std*' > files_to_rsync + rsync -a --prune-empty-dirs --files-from=files_to_rsync ./ ${{ env.SCRATCHDIR}}/globs-job/ + # dune exec -- coqc-perf.extract-all _build/default perf-data dune exec -- hint-data.extract-all ${NJOBS} perf-data du -hs _build @@ -337,6 +344,7 @@ jobs: needs.gen-job.outputs.compare == '1' run: | source ${{ env.BOILERPLATE }} + dune-report --json > ${{ env.SCRATCHDIR }}/dune-log-base.json # Print information on the size of the _build directory. du -hs _build du -hc $(find _build -type f -name "*.v") | tail -n 1 @@ -345,6 +353,11 @@ jobs: # Extract data. find _build/ -name '*.vo'| sort | xargs md5sum > ${{ env.SCRATCHDIR }}/md5sums_ref.txt dune exec -- globfs.extract-all ${NJOBS} _build/default + # Logs from glob files + mkdir ${{ env.SCRATCHDIR }}/globs-base/ + find _build/ -type f -! -empty -name '*.glob.std*' > files_to_rsync + rsync -a --prune-empty-dirs --files-from=files_to_rsync ./ ${{ env.SCRATCHDIR}}/globs-base/ + # dune exec -- coqc-perf.extract-all _build/default perf-data dune exec -- hint-data.extract-all ${NJOBS} perf-data du -hs _build @@ -374,9 +387,12 @@ jobs: dune exec -- coqc-perf.summary-diff --no-colors --instr-threshold 1 --csv perf-data_ref/perf_summary.csv perf-data/perf_summary.csv > ${{ env.SCRATCHDIR }}/perf_analysis.csv dune exec -- coqc-perf.summary-diff --no-colors --instr-threshold 1 --gitlab --diff-base-url "https://skylabs_ai.gitlab.io/-/FM/fm-ci/-/jobs/${CI_JOB_ID}/artifacts/perf-report" perf-data_ref/perf_summary.csv perf-data/perf_summary.csv > ${{ env.SCRATCHDIR }}/perf_analysis_gitlab.md + echo "# Performance Report" >> $GITHUB_STEP_SUMMARY + echo "Performance summary for ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" > ${{ env.SCRATCHDIR }}/perf_analysis_comment.md echo "" >> ${{ env.SCRATCHDIR }}/perf_analysis_comment.md dune exec -- coqc-perf.summary-diff --no-colors --instr-threshold 1 --github perf-data_ref/perf_summary.csv perf-data/perf_summary.csv | tee | tee -a ${{ env.SCRATCHDIR }}/perf_analysis_comment.md >> $GITHUB_STEP_SUMMARY + echo -e "\n" >> $GITHUB_STEP_SUMMARY dune exec -- coqc-perf.html-diff-all perf-data_ref perf-data ${{ env.SCRATCHDIR }}/perf-report # Adding hint data diff @@ -393,6 +409,20 @@ jobs: echo -e '\n\n' >> ${{ env.SCRATCHDIR }}/perf_analysis_gitlab.md # python3 support/fm-perf/post_fm_perf.py --access-token ${PROOF_PERF_TOKEN} --project-id 74911021 --mr-id 3913 -f ${{ env.SCRATCHDIR }}/perf_analysis_gitlab.md --pipe-url "https://gitlab.com/skylabs_ai/FM/auto/-/pipelines/2116778517" + # Code Quality diff + mkdir -p ${{ env.SCRATCHDIR }}/globs-job/_build/default + find ${{ env.SCRATCHDIR }}/globs-job -type f -! -empty -name '*.glob.std*' > ${{ env.SCRATCHDIR }}/globs-job/_build/default/files + mkdir -p ${{ env.SCRATCHDIR }}/globs-base/_build/default + find ${{ env.SCRATCHDIR }}/globs-base -type f -! -empty -name '*.glob.std*' > ${{ env.SCRATCHDIR }}/globs-base/_build/default/files + (dune exec -- coqc-perf.code-quality-diff \ + --before-globs-from-file ${{ env.SCRATCHDIR }}/globs-base/_build/default/files \ + --after-globs-from-file ${{ env.SCRATCHDIR }}/globs-job/_build/default/files \ + --before-dune ${{ env.SCRATCHDIR }}/dune-log-base.json \ + --after-dune ${{ env.SCRATCHDIR }}/dune-log-job.json \ + >> $GITHUB_STEP_SUMMARY) \ + || true + echo -e "\n" >> $GITHUB_STEP_SUMMARY + - name: "Post Performance Analysis Comment" if: | !cancelled() && @@ -413,13 +443,13 @@ jobs: -d @${{ env.SCRATCHDIR }}/perf_analysis_comment.json - - name: "Upload Artifacts" + - name: "Upload Summary Artifacts" if: | !cancelled() && github.event_name != 'push' uses: actions/upload-artifact@v4 with: - name: "reports" + name: "summary-reports" if-no-files-found: ignore path: | ${{ env.SCRATCHDIR }}/commits.txt @@ -437,3 +467,15 @@ jobs: ${{ env.SCRATCHDIR }}/hint_data_diff.html ${{ env.SCRATCHDIR }}/hint-data_ref.csv ${{ env.SCRATCHDIR }}/perf_summary_ref.csv + ${{ env.SCRATCHDIR }}/gl-code-quality-report.json + + # - name: "Upload Detailed Performance Artifacts" + # if: | + # !cancelled() && + # github.event_name != 'push' + # uses: actions/upload-artifact@v4 + # with: + # name: "performance-reports" + # if-no-files-found: ignore + # path: | + # ${{ env.SCRATCHDIR }}/perf-report diff --git a/dev/ci/gen/ci.py b/dev/ci/gen/ci.py index f4d12088..93d9a686 100644 --- a/dev/ci/gen/ci.py +++ b/dev/ci/gen/ci.py @@ -25,10 +25,7 @@ def reconfigure_logging(level): - logging.basicConfig(level=logging.INFO, format=log_format, force=True) - - -reconfigure_logging(logging.INFO) + logging.basicConfig(level=level, format=log_format, force=True) GITHUB_ORGA = "SkylabsAI" @@ -126,6 +123,8 @@ def git_repo(self): return git.Repo(os.path.join(os.getcwd(), self.dir_path)) def ensure_fetched(self, obj, depth=None): + if self.has_commit(obj): + return if isinstance(obj, str) and obj.startswith("origin/"): obj = obj.removeprefix("origin/") try: @@ -311,7 +310,7 @@ async def branch_pr(self, repo, branch): class PRData: number: int head_ref: str - base_ref: str + base_commit: str mergeable: bool | None # can be null in github's API merge_commit: str | None labels: Labels @@ -323,7 +322,7 @@ def of_api_response(cls, response): return cls( number=response["number"], head_ref=response["head"]["ref"], - base_ref=response["base"]["ref"], + base_commit=response["base"]["ref"], mergeable=mergeable, merge_commit=response["merge_commit_sha"] if mergeable else None, labels=Labels.of_set(map(lambda x: x["name"], response["labels"])), @@ -337,10 +336,10 @@ class RepoData: job_branch: ( str | None ) # this is always a branch name but not necessarily an unambiguous reference; does not include [origin/] prefix - job_ref: ( + job_commit: ( str | None ) # this can be a commit in the case of the trigger and also whenever we run pull_request pipelines where the actual code might be a merge commit of job_branch and the target - base_ref: str | None + base_commit: str | None @classmethod def empty(cls): @@ -351,6 +350,7 @@ def empty(cls): class ReposData: def __init__(self): self.data = {} + self.missing_prs_warned = set([]) def __getitem__(self, repo): key = (repo.url, repo.dir_path) @@ -362,15 +362,15 @@ def print(self): for r in self.data: print(f"{r}: {self.data[r]}") - async def workspace_job_ref(self, trigger): + async def workspace_job_commit(self, trigger): ws = Workspace().repo data = self[ws] - if data.job_ref is not None: - return data.job_ref + if data.job_commit is not None: + return data.job_commit if trigger.repo == ws: logger.info(f"Pipeline triggered on {WORKSPACE_REPO}") logger.info(f"Using trigger commit {trigger.commit}") - data.job_ref = trigger.commit + data.job_commit = trigger.commit elif trigger.labels.same_branch: logger.info(f"Pipeline trigger from outside of {WORKSPACE_REPO}") job_choices = await self.job_choices(trigger, ws) @@ -379,15 +379,15 @@ async def workspace_job_ref(self, trigger): ) choice = git_first_existing_choice(ws, job_choices) logger.info(f"Using first existing choice for {WORKSPACE_REPO}: {choice}") - data.job_ref = choice + data.job_commit = choice else: logger.info( f"Pipeline trigger from outside of {WORKSPACE_REPO} with {Label.NO_SAME_BRANCH} " ) default = ws.default_branch logger.info(f"Using default branch {default}") - data.job_ref = default - return data.job_ref + data.job_commit = default + return data.job_commit # compute the branch names/hashes to try based on the trigger configuration async def job_choices(self, trigger, repo): @@ -404,7 +404,7 @@ async def job_choices(self, trigger, repo): # without same branch we just pick the default branch of the current repo return [repo.default_branch] - async def compute_job_refs(self, trigger, repos): + async def compute_job_commits(self, trigger, repos): choices = await asyncio.gather( *map(lambda r: self.job_choices(trigger, r), repos) ) @@ -412,11 +412,12 @@ async def compute_job_refs(self, trigger, repos): data = self[repo] branch = git_first_existing_choice(repo, cs) data.job_branch = branch + result = None if trigger.is_trigger_repo(repo): # for the triggering repo, the trigger commit is the most precise ref we have # it will also already be a merge commit for pull_request triggers assert trigger.branch in cs - data.job_ref = trigger.commit + result = trigger.commit elif ( trigger.event_type == EventType.PULL_REQUEST and branch == trigger.branch @@ -426,7 +427,7 @@ async def compute_job_refs(self, trigger, repos): repo.git_repo.remotes.origin.fetch( pr.merge_commit ) # we don't get merge commits automatically - data.job_ref = pr.merge_commit + result = pr.merge_commit elif pr is not None: status = ( "the mergeability of the PR has not yet been computed by GitHub" @@ -436,35 +437,38 @@ async def compute_job_refs(self, trigger, repos): logger.warning( f"Repo {repo.github_path} has a PR for {data.job_branch} but {status}. Falling back to {data.job_branch}." ) - data.job_ref = f"origin/{data.job_branch}" + result = f"origin/{data.job_branch}" else: logger.warning( f"Repo {repo.github_path} has a branch {trigger.branch} but no PR exists for it. Cannot determine merge commit. Falling back to {data.job_branch}." ) - data.job_ref = f"origin/{data.job_branch}" + result = f"origin/{data.job_branch}" else: - data.job_ref = f"origin/{data.job_branch}" + result = f"origin/{data.job_branch}" + assert result is not None + repo.ensure_fetched(result) + data.job_commit = repo.git_repo.commit(result).hexsha - def default_base_ref(self, repo) -> str: + def default_base_commit(self, repo) -> str: return f"origin/{repo.default_branch}" # base ref of pr or default branch - async def pr_base_ref(self, repo) -> str: + async def pr_base_commit(self, repo) -> str: branch = self[repo].job_branch pr = await self.pr(repo, branch=branch) if pr is not None: - return f"origin/{pr.base_ref}" + return f"origin/{pr.base_commit}" else: - return self.default_base_ref(repo) + return self.default_base_commit(repo) - # needs to run after [compute_job_refs] - async def base_ref(self, trigger, repo): + # needs to run after [compute_job_commits] + async def base_commit(self, trigger, repo): data = self[repo] - job_ref = data.job_ref + job_commit = data.job_commit job_branch = data.job_branch - if data.base_ref is not None: - return data.base_ref - has_job = job_ref is not None and job_branch is not None + if data.base_commit is not None: + return data.base_commit + has_job = job_commit is not None and job_branch is not None if not has_job: logger.info(f"{repo.github_path}: Repo has been deleted.") nondefault_pr_base = trigger.non_default_trigger_pr_base() @@ -474,55 +478,55 @@ async def base_ref(self, trigger, repo): and repo.mode != RepoMode.OWNED and job_branch == trigger.branch ): - base_ref = await self.pr_base_ref(repo) - assert base_ref == self.default_base_ref( + base_commit = await self.pr_base_commit(repo) + assert base_commit == self.default_base_commit( repo ) # TODO: support for non-default target branches - data.base_ref = base_ref + data.base_commit = base_commit elif trigger.repo == repo: assert has_job # we should not trigger CI from repos that are no longer part of the workspace - base_ref = await self.pr_base_ref(repo) - merge_base = repo.uniq_merge_base(base_ref, job_ref) + base_commit = await self.pr_base_commit(repo) + merge_base = repo.uniq_merge_base(base_commit, job_commit) logger.info( - f"{repo.github_path}: Using merge base of {base_ref} and job ref {job_ref}: {merge_base}" + f"{repo.github_path}: Using merge base of {base_commit} and job ref {job_commit}: {merge_base}" ) - data.base_ref = merge_base + data.base_commit = merge_base elif trigger.labels.same_branch and job_branch == trigger.branch: if has_job: - base_ref = await self.pr_base_ref(repo) - merge_base = repo.uniq_merge_base(base_ref, job_ref) + base_commit = await self.pr_base_commit(repo) + merge_base = repo.uniq_merge_base(base_commit, job_commit) logger.info( - f"{repo.github_path}: Using merge base of {base_ref} and job ref {job_ref}: {merge_base}" + f"{repo.github_path}: Using merge base of {base_commit} and job ref {job_commit}: {merge_base}" ) - data.base_ref = merge_base + data.base_commit = merge_base else: - data.base_ref = await self.pr_base_ref(repo) - logger.info(f"{repo.github_path}: Using: {data.base_ref}") - repo.ensure_fetched(data.base_ref, depth=None) + data.base_commit = await self.pr_base_commit(repo) + logger.info(f"{repo.github_path}: Using: {data.base_commit}") + repo.ensure_fetched(data.base_commit, depth=None) elif trigger.labels.same_branch and job_branch == nondefault_pr_base: if has_job: - data.base_ref = "origin/nondefault_pr_base" + data.base_commit = "origin/nondefault_pr_base" logger.info( - f"{repo.github_path}: Using non-default target branch {nondefault_pr_base} of triggering PR: {data.base_ref}" + f"{repo.github_path}: Using non-default target branch {nondefault_pr_base} of triggering PR: {data.base_commit}" ) else: - data.base_ref = f"origin/{repo.default_branch}" - repo.ensure_fetched(data.base_ref, depth=None) + data.base_commit = f"origin/{repo.default_branch}" + repo.ensure_fetched(data.base_commit, depth=None) logger.info( - f"{repo.github_path}: Using default branch: {data.base_ref}" + f"{repo.github_path}: Using default branch: {data.base_commit}" ) else: # whatever we used for the main job, we'll use it for the comparison base assert has_job - data.base_ref = job_ref - return data.base_ref + data.base_commit = job_commit + return data.base_commit - async def workspace_base_ref(self, trigger): - return await self.base_ref(trigger, Workspace().repo) + async def workspace_base_commit(self, trigger): + return await self.base_commit(trigger, Workspace().repo) - async def compute_base_refs(self, trigger, repos): + async def compute_base_commits(self, trigger, repos): for repo in repos: - await self.base_ref(trigger, repo) + await self.base_commit(trigger, repo) async def check_invariants(self, trigger, repos): fail = False @@ -539,7 +543,10 @@ async def check_invariants(self, trigger, repos): trigger_pr = await self.pr( trigger.repo ) # guaranteed != None but the typechecker does not know - if trigger_pr is not None and pr.base_ref != trigger_pr.base_ref: + if ( + trigger_pr is not None + and pr.base_commit != trigger_pr.base_commit + ): wrong_targets[repo] = pr for repo in missing_prs: @@ -549,7 +556,7 @@ async def check_invariants(self, trigger, repos): fail = True for repo, pr in wrong_targets.items(): logger.error( - f"Repo {repo.github_path} has a branch {trigger.branch} and PR {pr.number} but the PR's target branch is {pr.base_ref}, not {trigger.branch}. {generic_msg}" + f"Repo {repo.github_path} has a branch {trigger.branch} and PR {pr.number} but the PR's target branch is {pr.base_commit}, not {trigger.branch}. {generic_msg}" ) fail = True @@ -564,7 +571,7 @@ async def check_invariants(self, trigger, repos): generic_msg = "All repos participating in a \"pull_request\" pipeline must each have either mergeable PRs, or the triggering PR must target the default branch and every participating repo's branch must be fully rebased on the repo's own default branch." if pr is None and not repo.git_repo.is_ancestor( repo.git_repo.commit(f"origin/{repo.default_branch}"), - repo.git_repo.commit(self[repo].job_ref), + repo.git_repo.commit(self[repo].job_commit), ): missing_prs_and_not_rebased.append(repo) elif pr is not None and not pr.mergeable: @@ -589,6 +596,15 @@ async def check_invariants(self, trigger, repos): if fail: raise Exception("Invariants violated. See error messages above.") + def warn_missing_pr(self, repo, branch): + key = (repo, branch) + if key in self.missing_prs_warned: + return + logger.warning( + f"Repo {repo.github_path}: Unable to find PR for branch {branch}" + ) + self.missing_prs_warned.add(key) + async def pr( self, repo: Repo, @@ -611,9 +627,7 @@ async def pr( data.pr = PRData.of_api_response(gh_pr) initial_pr_number = data.pr.number else: - logger.warning( - f"Repo {repo.github_path}: Unable to find PR for branch {branch}" - ) + self.warn_missing_pr(repo, branch) return None assert initial_pr_number is not None gh_pr = await GH.pr(repo.github_path, initial_pr_number) @@ -626,18 +640,18 @@ async def pr( exit(1) return data.pr - def output_job_refs(self, fname, repos): + def output_job_commits(self, fname, repos): with open(fname, "w") as f: for r in repos: - f.write(f"{r.dir_path}: {r.commit_of(self[r].job_ref)}\n") + f.write(f"{r.dir_path}: {r.commit_of(self[r].job_commit)}\n") - def output_base_refs(self, fname, repos): + def output_base_commits(self, fname, repos): with open(fname, "w") as f: for r in repos: - if self[r].base_ref is None: + if self[r].base_commit is None: logger.warning(f"Repo {r.github_path} does not have a base commit.") continue - f.write(f"{r.dir_path}: {r.commit_of(self[r].base_ref)}\n") + f.write(f"{r.dir_path}: {r.commit_of(self[r].base_commit)}\n") DATA = ReposData() @@ -707,8 +721,8 @@ async def backfill_labels(self): def non_default_trigger_pr_base(self): if self.pr is None: return None - if self.pr.base_ref and self.repo.default_branch != self.pr.base_ref: - return self.pr.base_ref + if self.pr.base_commit and self.repo.default_branch != self.pr.base_commit: + return self.pr.base_commit return None @@ -724,7 +738,7 @@ def log_level(level_str): def add_common_args(parser): parser.add_argument( - "--debug-level", + "--log-level", choices=["DEBUG", "INFO", "WARN", "ERROR"], type=log_level, default="INFO", @@ -804,11 +818,11 @@ def git_first_existing_choice(git_repo, choices): # checkout workspace for job build (not the reference) async def checkout_workspace_job(trigger): - job_ref = await DATA.workspace_job_ref(trigger) + job_commit = await DATA.workspace_job_commit(trigger) workspace_git = git.Repo(Workspace().repo.dir_path) - logger.info(f"Checking out {Workspace().repo}") - Workspace().repo.ensure_fetched(job_ref, depth=None) - workspace_git.git.checkout(job_ref) + logger.info(f"Checking out {Workspace().repo.github_path}") + Workspace().repo.ensure_fetched(job_commit, depth=None) + workspace_git.git.checkout(job_commit) async def make_checkout_workspace(parser, context, args): @@ -858,15 +872,17 @@ async def make_config(parser, context, args): args = parser.parse_args(args) repos = Repos.make() trigger = await Trigger.of_args(repos.find_github_path, args) - await DATA.workspace_job_ref(trigger) # just to initialize everything - await DATA.compute_job_refs(trigger, repos.repos) + await DATA.workspace_job_commit(trigger) # just to initialize everything + await DATA.compute_job_commits(trigger, repos.repos) DATA.print() - DATA.output_job_refs(args.output_file_job, repos.repos) + DATA.output_job_commits(args.output_file_job, repos.repos) await DATA.check_invariants(trigger, repos.repos) github_output = ( - open(args.output_file_github, "a") if args.output_file_github is not None else None + open(args.output_file_github, "a") + if args.output_file_github is not None + else None ) trigger_pr = trigger.pr @@ -874,15 +890,15 @@ async def make_config(parser, context, args): github_output.write(f"pr={trigger_pr.number}\n") if trigger.labels.compare: - base = await DATA.workspace_base_ref(trigger) + base = await DATA.workspace_base_commit(trigger) workspace_git = git.Repo(Workspace().repo.dir_path) workspace_git.git.checkout(base) pexpect.run("make -j nuke CONFIRM=yes") pexpect.run("make -j lightweight-clone") repos = Repos.make() - await DATA.compute_base_refs(trigger, repos.repos) + await DATA.compute_base_commits(trigger, repos.repos) DATA.print() - DATA.output_base_refs(args.output_file_base, repos.repos) + DATA.output_base_commits(args.output_file_base, repos.repos) if github_output is not None: github_output.write("compare=1\n")