Skip to content
This repository was archived by the owner on Feb 3, 2021. It is now read-only.

Commit 0f99c24

Browse files
committed
Add get-all command to get all job info in one go
1 parent 0fc5c76 commit 0f99c24

File tree

3 files changed

+76
-13
lines changed

3 files changed

+76
-13
lines changed

cli/spark/endpoints/job/get_all.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
import argparse
2+
import typing
3+
import time
4+
import aztk.spark
5+
from cli import config
6+
from cli import utils
7+
8+
def setup_parser(parser: argparse.ArgumentParser):
9+
parser.add_argument('--id',
10+
dest='job_id',
11+
required=True,
12+
help='The unique id of your AZTK job')
13+
parser.add_argument('--logs',
14+
action='store_true',
15+
help='Also print logs for completed applications')
16+
17+
18+
def execute(args: typing.NamedTuple):
19+
spark_client = aztk.spark.Client(config.load_aztk_screts())
20+
21+
utils.print_job_verbose(spark_client, spark_client.get_job(args.job_id), args.logs)

cli/spark/endpoints/job/job.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from . import get_app_logs
55
from . import get_app
66
from . import get
7+
from . import get_all
78
from . import list
89
from . import list_apps
910
from . import stop_app
@@ -16,6 +17,7 @@ class ClusterAction:
1617
get_app = "get-app"
1718
delete = "delete"
1819
get = "get"
20+
get_all = "get-all"
1921
list = "list"
2022
list_apps = "list-apps"
2123
stop_app = "stop-app"
@@ -36,6 +38,8 @@ def setup_parser(parser: argparse.ArgumentParser):
3638
ClusterAction.delete, help="Delete a Job")
3739
get_parser = subparsers.add_parser(
3840
ClusterAction.get, help="Get information about a Job")
41+
get_all_parser = subparsers.add_parser(
42+
ClusterAction.get_all, help="Get all information about a Job including applications")
3943
list_parser = subparsers.add_parser(
4044
ClusterAction.list, help="List Jobs in your account")
4145
list_apps_parser = subparsers.add_parser(
@@ -52,6 +56,7 @@ def setup_parser(parser: argparse.ArgumentParser):
5256
get_app.setup_parser(get_app_parser)
5357
delete.setup_parser(delete_parser)
5458
get.setup_parser(get_parser)
59+
get_all.setup_parser(get_all_parser)
5560
list.setup_parser(list_parser)
5661
list_apps.setup_parser(list_apps_parser)
5762
stop_app.setup_parser(stop_app_parser)
@@ -66,6 +71,7 @@ def execute(args: typing.NamedTuple):
6671
actions[ClusterAction.get_app] = get_app.execute
6772
actions[ClusterAction.delete] = delete.execute
6873
actions[ClusterAction.get] = get.execute
74+
actions[ClusterAction.get_all] = get_all.execute
6975
actions[ClusterAction.list] = list.execute
7076
actions[ClusterAction.list_apps] = list_apps.execute
7177
actions[ClusterAction.stop_app] = stop_app.execute

cli/utils.py

Lines changed: 49 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
import threading
55
import time
66
from subprocess import call
7-
from typing import List
7+
from typing import Dict, List
88
import azure.batch.models as batch_models
99
import aztk.spark
1010
from aztk import error
@@ -247,6 +247,13 @@ def print_jobs(jobs: List[aztk.spark.models.Job]):
247247
)
248248

249249

250+
def get_applications(client, job: aztk.spark.models.Job):
251+
if job.applications:
252+
return {j.name: j for j in job.applications}
253+
else:
254+
return client.list_applications(job.id)
255+
256+
250257
def print_job(client, job: aztk.spark.models.Job):
251258
print_format = '{:<36}| {:<15}'
252259

@@ -267,11 +274,41 @@ def print_job(client, job: aztk.spark.models.Job):
267274
log.info(print_format.format("Cluster", "Provisioning"))
268275
log.info("")
269276

270-
if job.applications:
271-
application_summary(job.applications)
277+
application_summary(get_applications(client, job))
278+
log.info("")
279+
280+
281+
def print_job_verbose(client, job: aztk.spark.models.Job, print_logs: bool):
282+
print_format = '{:<36}| {:<15}'
283+
284+
log.info("")
285+
log.info("Job %s", job.id)
286+
log.info("------------------------------------------")
287+
log.info("State: %s", job.state)
288+
log.info("Transition Time: %s", utc_to_local(job.state_transition_time))
289+
log.info("")
290+
291+
if job.cluster:
292+
print_cluster_summary(job.cluster)
272293
else:
273-
application_summary(client.list_applications(job.id))
294+
if job.state == 'completed':
295+
log.info("Cluster %s", "Job completed, cluster deallocated.")
296+
log.info("")
297+
else:
298+
log.info(print_format.format("Cluster", "Provisioning"))
299+
log.info("")
300+
301+
applications = get_applications(client, job)
302+
application_summary(applications)
274303
log.info("")
304+
print_applications(applications)
305+
log.info("")
306+
307+
if print_logs:
308+
for name, app in applications.items():
309+
if app and app.exit_code is not None:
310+
log.info("Logs for %s:", name)
311+
print(client.get_job_application_log(job.id, name).log)
275312

276313

277314
def node_state_count(cluster: aztk.spark.models.Cluster):
@@ -296,22 +333,22 @@ def print_cluster_summary(cluster: aztk.spark.models.Cluster):
296333
log.info("")
297334

298335

299-
def application_summary(applications):
336+
def application_summary(applications: Dict[str, aztk.spark.models.Application]):
300337
states = {"scheduling": 0}
301338
for state in batch_models.TaskState:
302339
states[state.name] = 0
303340

304341
warn_scheduling = False
305342

306-
for application in applications:
307-
if type(application) == str:
343+
for name, application in applications.items():
344+
if application is None:
308345
states["scheduling"] += 1
309346
warn_scheduling = True
310347
else:
311348
states[application.state] += 1
312349

313350
print_format = '{:<17} {:<14}'
314-
log.info("Applications")
351+
log.info("Application States")
315352
log.info("-"*42)
316353
for state in states:
317354
if states[state] > 0:
@@ -320,15 +357,15 @@ def application_summary(applications):
320357
if warn_scheduling:
321358
log.warning("\nNo Spark applications will be scheduled until the master is selected.")
322359

323-
def print_applications(applications):
360+
def print_applications(applications: Dict[str, aztk.spark.models.Application]):
324361
print_format = '{:<36}| {:<15}| {:<16} | {:^9} |'
325362
print_format_underline = '{:-<36}|{:-<16}|{:-<18}|{:-<11}|'
326363
log.info(print_format.format("Applications", "State", "Transition Time", "Exit Code"))
327364
log.info(print_format_underline.format('', '', '', ''))
328365

329366
warn_scheduling = False
330-
for name in applications:
331-
if applications[name] is None:
367+
for name, application in applications.items():
368+
if application is None:
332369
log.info(
333370
print_format.format(
334371
name,
@@ -339,10 +376,9 @@ def print_applications(applications):
339376
)
340377
warn_scheduling = True
341378
else:
342-
application = applications[name]
343379
log.info(
344380
print_format.format(
345-
application.name,
381+
name,
346382
application.state,
347383
utc_to_local(application.state_transition_time),
348384
application.exit_code if application.exit_code is not None else "-"

0 commit comments

Comments
 (0)