diff --git a/CHANGELOG.md b/CHANGELOG.md index 7afaa520..ee870134 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Added option to follow symbolic links in the `POST /utilities/compress` and `POST /storage/xfer-internal/compress` endpoints - Added new "general" section to status/parameters describing `FIRECREST_VERSION` and `FIRECREST_BUILD` timestamp - Environment variable `F7T_HOME_ENABLED` to set `False` if `$HOME` is not mounted on systems executing FirecREST commands +- Add support in the `GET /compute/jobs` endopint to poll for jobs of any user ### Changed diff --git a/doc/openapi/firecrest-api.yaml b/doc/openapi/firecrest-api.yaml index 8669ac58..7af222a9 100644 --- a/doc/openapi/firecrest-api.yaml +++ b/doc/openapi/firecrest-api.yaml @@ -1654,6 +1654,12 @@ paths: type: array items: type: string + - name: userJobs + in: query + description: When True the ouput will include only the user's jobs. Information about job's files is not available when False. + schema: + type: boolean + default: true - $ref: '#/components/parameters/pageSize' - $ref: '#/components/parameters/pageNumber' responses: diff --git a/doc/openapi/firecrest-developers-api.yaml b/doc/openapi/firecrest-developers-api.yaml index 8ecc9460..d2319f88 100644 --- a/doc/openapi/firecrest-developers-api.yaml +++ b/doc/openapi/firecrest-developers-api.yaml @@ -1642,6 +1642,12 @@ paths: type: array items: type: string + - name: userJobs + in: query + description: When True the ouput will include only the user's jobs. Information about job's files is not available when False. + schema: + type: boolean + default: true - $ref: '#/components/parameters/pageSize' - $ref: '#/components/parameters/pageNumber' responses: diff --git a/src/common/schedulers/slurm.py b/src/common/schedulers/slurm.py index 7c955fb0..44216591 100644 --- a/src/common/schedulers/slurm.py +++ b/src/common/schedulers/slurm.py @@ -128,7 +128,10 @@ def parse_job_info(self, output): def poll(self, user, jobids=None): # In Slurm we implement this with the squeue command - cmd = ["squeue", f"--user={user}"] + cmd = ["squeue"] + if user: + cmd.append(f"--user={user}") + if jobids: cmd.append(f"--jobs='{','.join(jobids)}'") diff --git a/src/compute/compute.py b/src/compute/compute.py index 84f4dedc..c71b6ea6 100644 --- a/src/compute/compute.py +++ b/src/compute/compute.py @@ -571,19 +571,23 @@ def list_jobs(): return jsonify(description="Failed to retrieve jobs information"), 404, header return jsonify(description="Failed to retrieve jobs information"), 400, header - is_username_ok = get_username(headers[AUTH_HEADER_NAME]) - - if not is_username_ok["result"]: - return jsonify(description=is_username_ok["reason"],error="Failed to retrieve jobs information"), 401 - - username = is_username_ok["username"] - - app.logger.info(f"Getting information of jobs from {system_name} ({system_addr})") - # job list comma separated: jobs = request.args.get("jobs", None) pageSize = request.args.get("pageSize", None) pageNumber = request.args.get("pageNumber", None) + userJobs = get_boolean_var(request.args.get("userJobs", True)) + + if userJobs: + is_username_ok = get_username(headers[AUTH_HEADER_NAME]) + + if not is_username_ok["result"]: + return jsonify(description=is_username_ok["reason"],error="Failed to retrieve jobs information"), 401 + + username = is_username_ok["username"] + else: + username = None + + app.logger.info(f"Getting information of jobs from {system_name} ({system_addr})") if pageSize is not None or pageNumber is not None: if pageSize is not None: @@ -638,7 +642,7 @@ def list_jobs(): # asynchronous task creation aTask = threading.Thread(target=list_job_task, name=ID, - args=(headers, system_name, system_addr, action, task_id, pageSize, pageNumber)) + args=(headers, system_name, system_addr, action, task_id, pageSize, pageNumber, userJobs)) aTask.start() @@ -653,7 +657,7 @@ def list_jobs(): -def list_job_task(headers,system_name, system_addr,action,task_id,pageSize,pageNumber): +def list_job_task(headers,system_name, system_addr,action,task_id,pageSize,pageNumber,userJobs=True): # exec command resp = exec_remote_command(headers, system_name, system_addr, action) @@ -710,7 +714,15 @@ def list_job_task(headers,system_name, system_addr,action,task_id,pageSize,pageN jobs = {} for job_index, jobinfo in enumerate(jobList): # now looking for log and err files location - jobinfo = get_job_files(headers, system_name, system_addr, jobinfo, output=True) + if userJobs: + jobinfo = get_job_files(headers, system_name, system_addr, jobinfo, output=True) + else: + jobinfo["job_file_out"] = "" + jobinfo["job_file_err"] = "" + jobinfo["job_file"] = "" + jobinfo["job_data_out"] = "" + jobinfo["job_data_err"] = "" + jobinfo["job_info_extra"] = "Job files information is not available when `userJobs` is False" # add jobinfo to the array jobs[str(job_index)]=jobinfo