job: update list last run to match the UI (#959)
This commit is contained in:
@@ -21,7 +21,17 @@ logger = util.get_logger(__name__)
|
||||
emitter = emitting.FlatEmitter()
|
||||
|
||||
DEFAULT_TIMEOUT = 180
|
||||
METRONOME_EMBEDDED = '?embed=activeRuns&embed=schedules&embed=history'
|
||||
|
||||
# single job, not a lot of data
|
||||
EMBEDS_FOR_JOB_HISTORY = [
|
||||
metronome.EMBED_ACTIVE_RUNS,
|
||||
metronome.EMBED_SCHEDULES,
|
||||
metronome.EMBED_HISTORY]
|
||||
|
||||
# unknown number of jobs, using history summary
|
||||
EMBEDS_FOR_JOBS_HISTORY = [
|
||||
metronome.EMBED_ACTIVE_RUNS,
|
||||
metronome.EMBED_HISTORY_SUMMARY]
|
||||
|
||||
|
||||
def main(argv):
|
||||
@@ -239,16 +249,12 @@ def _kill(job_id, run_id, all=False):
|
||||
|
||||
|
||||
def _list(json_flag=False):
|
||||
"""
|
||||
""" Provides a list of jobs along with their active runs and history summary
|
||||
:returns: process return code
|
||||
:rtype: int
|
||||
"""
|
||||
|
||||
try:
|
||||
client = metronome.create_client()
|
||||
json_list = client.get_jobs()
|
||||
except DCOSException as e:
|
||||
raise DCOSException(e)
|
||||
client = metronome.create_client()
|
||||
json_list = client.get_jobs(EMBEDS_FOR_JOBS_HISTORY)
|
||||
|
||||
if json_flag:
|
||||
emitter.publish(json_list)
|
||||
@@ -266,44 +272,32 @@ def _history(job_id, json_flag=False, show_failures=False):
|
||||
:returns: process return code
|
||||
:rtype: int
|
||||
"""
|
||||
response = None
|
||||
url = urllib.parse.urljoin(_get_api_url('v1/jobs/'),
|
||||
job_id + METRONOME_EMBEDDED)
|
||||
try:
|
||||
response = _do_request(url, 'GET')
|
||||
except DCOSHTTPException as e:
|
||||
raise DCOSException("Job ID does NOT exist.")
|
||||
except DCOSException as e:
|
||||
raise DCOSException(e)
|
||||
|
||||
client = metronome.create_client()
|
||||
json_history = client.get_job(job_id, EMBEDS_FOR_JOB_HISTORY)
|
||||
|
||||
if 'history' not in json_history:
|
||||
return 0
|
||||
|
||||
if json_flag:
|
||||
emitter.publish(json_history)
|
||||
else:
|
||||
emitter.publish(_get_history_message(json_history, job_id))
|
||||
table = tables.job_history_table(
|
||||
json_history['history']['successfulFinishedRuns'])
|
||||
output = six.text_type(table)
|
||||
if output:
|
||||
emitter.publish(output)
|
||||
|
||||
if response.status_code is not 200:
|
||||
raise DCOSException("Job ID does NOT exist.")
|
||||
|
||||
json_history = _read_http_response_body(response)
|
||||
|
||||
if 'history' not in json_history:
|
||||
return 0
|
||||
|
||||
if json_flag:
|
||||
emitter.publish(json_history)
|
||||
else:
|
||||
emitter.publish(_get_history_message(json_history, job_id))
|
||||
if show_failures:
|
||||
emitter.publish(_get_history_message(
|
||||
json_history, job_id, False))
|
||||
table = tables.job_history_table(
|
||||
json_history['history']['successfulFinishedRuns'])
|
||||
json_history['history']['failedFinishedRuns'])
|
||||
output = six.text_type(table)
|
||||
if output:
|
||||
emitter.publish(output)
|
||||
|
||||
if show_failures:
|
||||
emitter.publish(_get_history_message(
|
||||
json_history, job_id, False))
|
||||
table = tables.job_history_table(
|
||||
json_history['history']['failedFinishedRuns'])
|
||||
output = six.text_type(table)
|
||||
if output:
|
||||
emitter.publish(output)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
|
||||
@@ -275,21 +275,13 @@ def job_table(job_list):
|
||||
|
||||
fields = OrderedDict([
|
||||
('id', lambda s: s['id']),
|
||||
('Description', lambda s:
|
||||
s['description'] if 'description' in s else ''),
|
||||
('Status', lambda s: _job_status(s)),
|
||||
('Last Succesful Run', lambda s: s['history']['lastSuccessAt']
|
||||
if 'history' in s else 'N/A'),
|
||||
('Last Run', lambda s: _last_run_status(s)),
|
||||
])
|
||||
|
||||
limits = {
|
||||
"Description": 35
|
||||
}
|
||||
|
||||
tb = truncate_table(fields, job_list, limits, sortby="ID")
|
||||
tb.align['ID'] = 'l'
|
||||
tb.align["DESCRIPTION"] = 'l'
|
||||
tb = truncate_table(fields, job_list, None, sortby="ID")
|
||||
tb.align["STATUS"] = 'l'
|
||||
tb.align["LAST RUN"] = 'l'
|
||||
|
||||
return tb
|
||||
|
||||
@@ -308,7 +300,8 @@ def job_history_table(schedule_list):
|
||||
('finished', lambda s: s['finishedAt']),
|
||||
])
|
||||
tb = table(fields, schedule_list, sortby="STARTED")
|
||||
tb.align['ID'] = 'l'
|
||||
tb.align["STARTED"] = 'l'
|
||||
tb.align["FINISHED"] = 'l'
|
||||
|
||||
return tb
|
||||
|
||||
@@ -326,12 +319,14 @@ def schedule_table(schedule_list):
|
||||
('id', lambda s: s['id']),
|
||||
('cron', lambda s: s['cron']),
|
||||
('enabled', lambda s: s['enabled']),
|
||||
('next run', lambda s: s['nextRunAt']),
|
||||
('concurrency policy', lambda s: s['concurrencyPolicy']),
|
||||
('next run', lambda s: s['nextRunAt']),
|
||||
])
|
||||
tb = table(fields, schedule_list)
|
||||
tb.align['ID'] = 'l'
|
||||
tb.align['CRON'] = 'l'
|
||||
tb.align['ENABLED'] = 'l'
|
||||
tb.align['NEXT RUN'] = 'l'
|
||||
tb.align['CONCURRENCY POLICY'] = 'l'
|
||||
|
||||
return tb
|
||||
|
||||
@@ -345,17 +340,52 @@ def job_runs_table(runs_list):
|
||||
:rtype: PrettyTable
|
||||
"""
|
||||
fields = OrderedDict([
|
||||
('job id', lambda s: s['jobId']),
|
||||
('id', lambda s: s['id']),
|
||||
('job id', lambda s: s['jobId']),
|
||||
('started at', lambda s: s['createdAt']),
|
||||
])
|
||||
tb = table(fields, runs_list)
|
||||
tb.align['ID'] = 'l'
|
||||
tb.align['JOB ID'] = 'l'
|
||||
tb.align['STARTED AT'] = 'l'
|
||||
|
||||
return tb
|
||||
|
||||
|
||||
def _str_to_datetime(datetime_str):
|
||||
""" Takes a JSON date of `2017-03-30T15:50:16.187+0000` format and
|
||||
Returns a datetime.
|
||||
|
||||
:param datetime_str: JSON date
|
||||
:type datetime_str: str
|
||||
:rtype: datetime
|
||||
"""
|
||||
if not datetime_str:
|
||||
return None
|
||||
datetime_str = datetime_str.split('+')[0]
|
||||
return datetime.datetime.strptime(datetime_str, "%Y-%m-%dT%H:%M:%S.%f")
|
||||
|
||||
|
||||
def _last_run_status(job):
|
||||
""" Provided a job with embedded history it Returns a status based on the
|
||||
following rules:
|
||||
0 Runs = 'N/A'
|
||||
last success is > last failure = 'Success' otherwise 'Failed'
|
||||
|
||||
:param job: JSON job with embedded history
|
||||
:type job: dict
|
||||
:rtype: str
|
||||
"""
|
||||
last_success = _str_to_datetime(job['historySummary']['lastSuccessAt'])
|
||||
last_failure = _str_to_datetime(job['historySummary']['lastFailureAt'])
|
||||
if not last_success and not last_failure:
|
||||
return 'N/A'
|
||||
elif ((last_success and not last_failure) or
|
||||
(last_success and last_success > last_failure)):
|
||||
return 'Success'
|
||||
else:
|
||||
return 'Failed'
|
||||
|
||||
|
||||
def _job_status(job):
|
||||
"""Utility function that returns the status of a job
|
||||
|
||||
|
||||
136
cli/tests/fixtures/metronome.py
vendored
Normal file
136
cli/tests/fixtures/metronome.py
vendored
Normal file
@@ -0,0 +1,136 @@
|
||||
|
||||
def job_list_fixture():
|
||||
"""Job list fixture
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
return [
|
||||
{
|
||||
"description": "sleeping is what is do",
|
||||
"historySummary": {
|
||||
"failureCount": 0,
|
||||
"lastFailureAt": None,
|
||||
"lastSuccessAt": None,
|
||||
"successCount": 0
|
||||
},
|
||||
"id": "snorlax",
|
||||
"labels": {},
|
||||
"run": {
|
||||
"artifacts": [],
|
||||
"cmd": "sleep 10",
|
||||
"cpus": 0.01,
|
||||
"disk": 0,
|
||||
"env": {},
|
||||
"maxLaunchDelay": 3600,
|
||||
"mem": 32,
|
||||
"placement": {
|
||||
"constraints": []
|
||||
},
|
||||
"restart": {
|
||||
"policy": "NEVER"
|
||||
},
|
||||
"volumes": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "electrifying rodent",
|
||||
"historySummary": {
|
||||
"failureCount": 0,
|
||||
"lastFailureAt": None,
|
||||
"lastSuccessAt": "2017-03-31T14:22:01.541+0000",
|
||||
"successCount": 1
|
||||
},
|
||||
"id": "pikachu",
|
||||
"labels": {},
|
||||
"run": {
|
||||
"artifacts": [],
|
||||
"cmd": "sleep 10",
|
||||
"cpus": 0.01,
|
||||
"disk": 0,
|
||||
"env": {},
|
||||
"maxLaunchDelay": 3600,
|
||||
"mem": 32,
|
||||
"placement": {
|
||||
"constraints": []
|
||||
},
|
||||
"restart": {
|
||||
"policy": "NEVER"
|
||||
},
|
||||
"volumes": []
|
||||
}
|
||||
}]
|
||||
|
||||
|
||||
def job_run_fixture():
|
||||
"""Job run fixture
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
return [
|
||||
{
|
||||
"completedAt": None,
|
||||
"createdAt": "2017-03-31T21:05:30.613+0000",
|
||||
"id": "20170331210530QHpRU",
|
||||
"jobId": "pikachu",
|
||||
"status": "ACTIVE",
|
||||
"tasks": [
|
||||
{
|
||||
"id": "pikachu_20170331210530QHpRU.c5e4b1e7-1655-11e7-8bd5-6ef119b8e20f", # NOQA
|
||||
"startedAt": "2017-03-31T21:05:31.499+0000",
|
||||
"status": "TASK_RUNNING"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"completedAt": None,
|
||||
"createdAt": "2017-03-31T21:05:32.422+0000",
|
||||
"id": "20170331210532uxgVF",
|
||||
"jobId": "pikachu",
|
||||
"status": "ACTIVE",
|
||||
"tasks": [
|
||||
{
|
||||
"id": "pikachu_20170331210532uxgVF.c8e324d8-1655-11e7-8bd5-6ef119b8e20f", # NOQA
|
||||
"startedAt": "2017-03-31T21:05:36.417+0000",
|
||||
"status": "TASK_RUNNING"
|
||||
}
|
||||
]
|
||||
}]
|
||||
|
||||
|
||||
def job_history_fixture():
|
||||
"""Job history fixture
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
return [
|
||||
{
|
||||
"createdAt": "2017-03-31T21:05:32.422+0000",
|
||||
"finishedAt": "2017-03-31T21:05:46.805+0000",
|
||||
"id": "20170331210532uxgVF"
|
||||
},
|
||||
{
|
||||
"createdAt": "2017-03-31T21:05:30.613+0000",
|
||||
"finishedAt": "2017-03-31T21:05:41.740+0000",
|
||||
"id": "20170331210530QHpRU"
|
||||
}]
|
||||
|
||||
|
||||
def job_schedule_fixture():
|
||||
"""Job schedule fixture
|
||||
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
return [
|
||||
{
|
||||
"concurrencyPolicy": "ALLOW",
|
||||
"cron": "20 0 * * *",
|
||||
"enabled": True,
|
||||
"id": "nightly",
|
||||
"nextRunAt": "2017-04-01T00:20:00.000+0000",
|
||||
"startingDeadlineSeconds": 900,
|
||||
"timezone": "UTC"
|
||||
}]
|
||||
3
cli/tests/unit/data/job_history.txt
Normal file
3
cli/tests/unit/data/job_history.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
ID STARTED FINISHED
|
||||
20170331210530QHpRU 2017-03-31T21:05:30.613+0000 2017-03-31T21:05:41.740+0000
|
||||
20170331210532uxgVF 2017-03-31T21:05:32.422+0000 2017-03-31T21:05:46.805+0000
|
||||
3
cli/tests/unit/data/job_list.txt
Normal file
3
cli/tests/unit/data/job_list.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
ID STATUS LAST RUN
|
||||
pikachu Unscheduled Success
|
||||
snorlax Unscheduled N/A
|
||||
3
cli/tests/unit/data/job_runs.txt
Normal file
3
cli/tests/unit/data/job_runs.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
ID JOB ID STARTED AT
|
||||
20170331210530QHpRU pikachu 2017-03-31T21:05:30.613+0000
|
||||
20170331210532uxgVF pikachu 2017-03-31T21:05:32.422+0000
|
||||
2
cli/tests/unit/data/job_schedule.txt
Normal file
2
cli/tests/unit/data/job_schedule.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
ID CRON ENABLED CONCURRENCY POLICY NEXT RUN
|
||||
nightly 20 0 * * * True ALLOW 2017-04-01T00:20:00.000+0000
|
||||
@@ -17,6 +17,8 @@ from ..fixtures.marathon import (app_fixture, app_task_fixture,
|
||||
from ..fixtures.metrics import (agent_metrics_node_details_fixture,
|
||||
agent_metrics_node_summary_fixture,
|
||||
agent_metrics_task_details_fixture)
|
||||
from ..fixtures.metronome import (job_history_fixture, job_list_fixture,
|
||||
job_run_fixture, job_schedule_fixture)
|
||||
from ..fixtures.node import slave_fixture
|
||||
from ..fixtures.package import package_fixture, search_result_fixture
|
||||
from ..fixtures.service import framework_fixture
|
||||
@@ -83,6 +85,30 @@ def test_auth_providers_table():
|
||||
'tests/unit/data/auth_provider.txt')
|
||||
|
||||
|
||||
def test_job_list_table():
|
||||
_test_table(tables.job_table,
|
||||
job_list_fixture(),
|
||||
'tests/unit/data/job_list.txt')
|
||||
|
||||
|
||||
def test_job_runs_table():
|
||||
_test_table(tables.job_runs_table,
|
||||
job_run_fixture(),
|
||||
'tests/unit/data/job_runs.txt')
|
||||
|
||||
|
||||
def test_job_history_table():
|
||||
_test_table(tables.job_history_table,
|
||||
job_history_fixture(),
|
||||
'tests/unit/data/job_history.txt')
|
||||
|
||||
|
||||
def test_job_schedule_table():
|
||||
_test_table(tables.schedule_table,
|
||||
job_schedule_fixture(),
|
||||
'tests/unit/data/job_schedule.txt')
|
||||
|
||||
|
||||
def test_pod_table():
|
||||
_test_table(tables.pod_table,
|
||||
pod_list_fixture(),
|
||||
|
||||
@@ -70,6 +70,19 @@ class DCOSAuthorizationException(DCOSHTTPException):
|
||||
return "You are not authorized to perform this operation"
|
||||
|
||||
|
||||
class DCOSConnectionError(DCOSException):
|
||||
"""An Error object for when a connection attempt fails.
|
||||
|
||||
:param url: URL for the Request
|
||||
:type url: str
|
||||
"""
|
||||
def __init__(self, url):
|
||||
self.url = url
|
||||
|
||||
def __str__(self):
|
||||
return 'URL [{0}] is unreachable.'.format(self.url)
|
||||
|
||||
|
||||
class DCOSBadRequest(DCOSHTTPException):
|
||||
"""A wrapper around Response objects for HTTP Bad Request (400).
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import requests
|
||||
|
||||
from requests.auth import AuthBase
|
||||
|
||||
from six.moves.urllib.parse import urlparse
|
||||
@@ -6,7 +7,7 @@ from six.moves.urllib.parse import urlparse
|
||||
from dcos import config, util
|
||||
from dcos.errors import (DCOSAuthenticationException,
|
||||
DCOSAuthorizationException, DCOSBadRequest,
|
||||
DCOSException, DCOSHTTPException,
|
||||
DCOSConnectionError, DCOSException, DCOSHTTPException,
|
||||
DCOSUnprocessableException)
|
||||
|
||||
|
||||
@@ -107,7 +108,7 @@ def _request(method,
|
||||
raise DCOSException(msg)
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
logger.exception("HTTP Connection Error")
|
||||
raise DCOSException('URL [{0}] is unreachable: {1}'.format(url, e))
|
||||
raise DCOSConnectionError(url)
|
||||
except requests.exceptions.Timeout as e:
|
||||
logger.exception("HTTP Timeout")
|
||||
raise DCOSException('Request to URL [{0}] timed out.'.format(url))
|
||||
|
||||
@@ -7,6 +7,11 @@ from dcos.errors import DCOSException
|
||||
|
||||
logger = util.get_logger(__name__)
|
||||
|
||||
EMBED_ACTIVE_RUNS = 'activeRuns'
|
||||
EMBED_SCHEDULES = 'schedules'
|
||||
EMBED_HISTORY = 'history'
|
||||
EMBED_HISTORY_SUMMARY = 'historySummary'
|
||||
|
||||
|
||||
def create_client(toml_config=None):
|
||||
"""Creates a Metronome client with the supplied configuration.
|
||||
@@ -28,6 +33,10 @@ def create_client(toml_config=None):
|
||||
return Client(rpc_client)
|
||||
|
||||
|
||||
def _get_embed_query_string(embed_list):
|
||||
return '?{}'.format('&'.join('embed=%s' % (item) for item in embed_list))
|
||||
|
||||
|
||||
def _get_metronome_url(toml_config=None):
|
||||
"""
|
||||
:param toml_config: configuration dictionary
|
||||
@@ -71,32 +80,35 @@ class Client(object):
|
||||
|
||||
return response.json()
|
||||
|
||||
def get_job(self, job_id):
|
||||
"""Returns a representation of the requested application version. If
|
||||
version is None the return the latest version.
|
||||
def get_job(self, job_id, embed_with=None):
|
||||
"""Returns a representation of the requested job.
|
||||
|
||||
:param app_id: the ID of the application
|
||||
:type app_id: str
|
||||
:param version: application version as a ISO8601 datetime
|
||||
:type version: str
|
||||
:param job_id: the ID of the application
|
||||
:type job_id: str
|
||||
:param embed_with: list of strings to ?embed=str&embed=str2...
|
||||
:type embed_with: [str]
|
||||
:returns: the requested Metronome job
|
||||
:rtype: dict
|
||||
"""
|
||||
# refactor util name it isn't marathon specific
|
||||
job_id = util.normalize_marathon_id_path(job_id)
|
||||
path = 'v1/jobs{}'.format(job_id)
|
||||
|
||||
response = self._rpc.http_req(http.get, path)
|
||||
embeds = _get_embed_query_string(embed_with) if embed_with else None
|
||||
url = ('v1/jobs{}{}'.format(job_id, embeds)
|
||||
if embeds else 'v1/jobs{}'.format(job_id))
|
||||
response = self._rpc.http_req(http.get, url)
|
||||
return response.json()
|
||||
|
||||
def get_jobs(self):
|
||||
def get_jobs(self, embed_with=None):
|
||||
"""Get a list of known jobs.
|
||||
|
||||
:param embed_with: list of strings to ?embed=str&embed=str2...
|
||||
:type embed_with: [str]
|
||||
:returns: list of known jobs
|
||||
:rtype: [dict]
|
||||
"""
|
||||
|
||||
response = self._rpc.http_req(http.get, 'v1/jobs')
|
||||
embeds = _get_embed_query_string(embed_with) if embed_with else None
|
||||
url = 'v1/jobs{}'.format(embeds) if embeds else 'v1/jobs'
|
||||
response = self._rpc.http_req(http.get, url)
|
||||
return response.json()
|
||||
|
||||
def add_job(self, job_resource):
|
||||
|
||||
@@ -8,7 +8,8 @@ from six.moves import urllib
|
||||
from dcos import cosmos, emitting, util
|
||||
from dcos.errors import (DCOSAuthenticationException,
|
||||
DCOSAuthorizationException, DCOSBadRequest,
|
||||
DCOSException, DCOSHTTPException, DefaultError)
|
||||
DCOSConnectionError, DCOSException, DCOSHTTPException,
|
||||
DefaultError)
|
||||
|
||||
logger = util.get_logger(__name__)
|
||||
emitter = emitting.FlatEmitter()
|
||||
@@ -71,6 +72,8 @@ class PackageManager:
|
||||
raise
|
||||
except DCOSAuthorizationException:
|
||||
raise
|
||||
except DCOSConnectionError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
return False
|
||||
|
||||
Reference in New Issue
Block a user