...
 
Commits (3)
......@@ -31,6 +31,26 @@ import yaml
from lavacli.utils import loader, print_u, exc2str
def configure_log_options(parser):
parser.add_argument(
"--filters",
default=None,
type=str,
help="comma seperated list of levels to show",
)
parser.add_argument(
"--polling",
default=5,
type=int,
help="polling interval in seconds [default=5s]",
)
parser.add_argument(
"--raw", default=False, action="store_true", help="print raw logs"
)
parser.add_argument("--start", default=0, type=int, help="start at the given line")
parser.add_argument("--end", default=None, type=int, help="end at the given line")
def configure_parser(parser, version):
sub = parser.add_subparsers(dest="sub_sub_command", help="Sub commands")
sub.required = True
......@@ -121,27 +141,7 @@ def configure_parser(parser, version):
action="store_true",
help="do not keep polling until the end of the job",
)
jobs_logs.add_argument(
"--filters",
default=None,
type=str,
help="comma seperated list of levels to show",
)
jobs_logs.add_argument(
"--polling",
default=5,
type=int,
help="polling interval in seconds [default=5s]",
)
jobs_logs.add_argument(
"--raw", default=False, action="store_true", help="print raw logs"
)
jobs_logs.add_argument(
"--start", default=0, type=int, help="start at the given line"
)
jobs_logs.add_argument(
"--end", default=None, type=int, help="end at the given line"
)
configure_log_options(jobs_logs)
# "queue"
if version >= (2019, 1):
......@@ -183,12 +183,6 @@ def configure_parser(parser, version):
default=False,
help="Print the full url",
)
jobs_resubmit.add_argument(
"--filters",
default=None,
type=str,
help="comma seperated list of levels to show",
)
jobs_resubmit.add_argument(
"--follow",
default=True,
......@@ -196,21 +190,7 @@ def configure_parser(parser, version):
action="store_false",
help="resubmit and poll for the logs",
)
jobs_resubmit.add_argument(
"--polling",
default=5,
type=int,
help="polling interval in seconds [default=5s]",
)
jobs_resubmit.add_argument(
"--raw", default=False, action="store_true", help="print raw logs"
)
jobs_resubmit.add_argument(
"--start", default=0, type=int, help="start at the given line"
)
jobs_resubmit.add_argument(
"--end", default=None, type=int, help="end at the given line"
)
configure_log_options(jobs_resubmit)
# "run"
jobs_run = sub.add_parser("run", help="run the job")
......@@ -276,6 +256,14 @@ def configure_parser(parser, version):
default=False,
help="Print the full url",
)
jobs_submit.add_argument(
"--follow",
default=True,
dest="no_follow",
action="store_false",
help="resubmit and poll for the logs",
)
configure_log_options(jobs_submit)
if version >= (2019, 3):
# "validate"
......@@ -615,29 +603,33 @@ def handle_resubmit(proxy, options, config):
options.filters,
)
# Add the job_id to options for handle_logs
# For multinode, print something and loop on all jobs
if isinstance(job_id, list):
for job in job_id:
print_logs(
[
{
"dt": datetime.datetime.utcnow().isoformat(),
"lvl": "info",
"msg": "[lavacli] Seeing %s logs" % job,
}
],
options.raw,
options.filters,
)
options.job_id = job
handle_logs(proxy, options, config)
else:
options.job_id = str(job_id)
handle_logs(proxy, options, config)
follow_logs(job_id, proxy, options, config)
return 0
def follow_logs(job_id, proxy, options, config):
# Add the job_id to options for handle_logs
# For multinode, print something and loop on all jobs
if isinstance(job_id, list):
for job in job_id:
print_logs(
[
{
"dt": datetime.datetime.utcnow().isoformat(),
"lvl": "info",
"msg": "[lavacli] Seeing %s logs" % job,
}
],
options.raw,
options.filters,
)
options.job_id = job
handle_logs(proxy, options, config)
else:
options.job_id = str(job_id)
handle_logs(proxy, options, config)
def handle_run(proxy, options, config):
job_id = proxy.scheduler.jobs.submit(options.definition.read())
print_logs(
......@@ -711,7 +703,7 @@ def handle_show(proxy, options, config):
return 0
def handle_submit(proxy, options, _):
def handle_submit(proxy, options, config):
prefix = ""
if options.print_url:
parsed = urlparse(options.uri)
......@@ -721,14 +713,18 @@ def handle_submit(proxy, options, _):
for definition in options.definition:
try:
job_id = proxy.scheduler.jobs.submit(definition.read())
if isinstance(job_id, list):
for job in job_id:
print(prefix + str(job))
else:
print(prefix + str(job_id))
if options.no_follow:
if isinstance(job_id, list):
for job in job_id:
print(prefix + str(job))
else:
print(prefix + str(job_id))
except xmlrpc.client.Error as exc:
print("Unable to submit %s: %s" % (definition.name, exc2str(exc)))
if not options.no_follow:
follow_logs(job_id, proxy, options, config)
return 0
......
......@@ -1914,3 +1914,56 @@ def test_jobs_wait(setup, monkeypatch, capsys):
assert main() == 0 # nosec
assert capsys.readouterr()[0] == "Submitted\nRunning.\n" # nosec
def test_jobs_submit_follow(setup, monkeypatch, capsys, tmpdir):
version = "2018.4"
with (tmpdir / "job.yaml").open("w") as f_out:
f_out.write("job definition as yaml")
monkeypatch.setattr(
sys, "argv", ["lavacli", "jobs", "submit", "--follow", str(tmpdir / "job.yaml")]
)
now = xmlrpc.client.DateTime("20180128T01:01:01")
monkeypatch.setattr(
xmlrpc.client.ServerProxy,
"data",
[
{"request": "system.version", "args": (), "ret": version},
{
"request": "scheduler.jobs.submit",
"args": ("job definition as yaml",),
"ret": 5689,
},
{
"request": "scheduler.jobs.logs",
"args": ("5689", 0),
"ret": (
True,
'- {"dt": "2018-04-23T12:07:02.569264", "lvl": "info", "msg": "lava-dispatcher, installed at version: 2018.4-1"}',
),
},
{
"request": "scheduler.jobs.show",
"args": ("5689",),
"ret": {
"id": "5689",
"description": "desc",
"device": "qemu01",
"device_type": "qemu",
"health_check": False,
"pipeline": True,
"health": "Complete",
"state": "Finished",
"submitter": "lava-admin",
"submit_time": now,
"start_time": now,
"end_time": now,
"tags": [],
"visibility": "Publicly visible",
"failure_comment": None,
},
},
],
)
assert main() == 0 # nosec
assert "lava-dispatcher, installed at version: 2018.4" in capsys.readouterr()[0]