tests: move basic functionality tests to "test_sanity" (#919)
This commit is contained in:
4
cli/tests/fixtures/marathon.py
vendored
4
cli/tests/fixtures/marathon.py
vendored
@@ -1,6 +1,6 @@
|
||||
import os
|
||||
|
||||
from ..integrations.common import file_json_ast
|
||||
from ..integrations.helpers.common import file_json_ast
|
||||
|
||||
PODS_FILE_PATH_BASE = 'tests/data/marathon/pods'
|
||||
|
||||
@@ -10,8 +10,6 @@ UNGOOD_POD_FILE_PATH = \
|
||||
os.path.join(PODS_FILE_PATH_BASE, 'ungood.json')
|
||||
UPDATED_GOOD_POD_FILE_PATH = \
|
||||
os.path.join(PODS_FILE_PATH_BASE, 'updated_good.json')
|
||||
GOOD_POD_STATUS_FILE_PATH = \
|
||||
os.path.join(PODS_FILE_PATH_BASE, 'good_status.json')
|
||||
|
||||
DOUBLE_POD_ID = 'double-pod'
|
||||
DOUBLE_POD_FILE_PATH = os.path.join(PODS_FILE_PATH_BASE, 'double.json')
|
||||
|
||||
@@ -1,872 +0,0 @@
|
||||
import base64
|
||||
import collections
|
||||
import contextlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import time
|
||||
import zipfile
|
||||
|
||||
import six
|
||||
from six.moves import urllib
|
||||
|
||||
from dcos import config, http
|
||||
|
||||
|
||||
def exec_command(cmd, env=None, stdin=None):
|
||||
"""Execute CLI command
|
||||
|
||||
:param cmd: Program and arguments
|
||||
:type cmd: [str]
|
||||
:param env: Environment variables
|
||||
:type env: dict | None
|
||||
:param stdin: File to use for stdin
|
||||
:type stdin: file
|
||||
:returns: A tuple with the returncode, stdout and stderr
|
||||
:rtype: (int, bytes, bytes)
|
||||
"""
|
||||
|
||||
print('CMD: {!r}'.format(cmd))
|
||||
|
||||
process = subprocess.Popen(
|
||||
cmd,
|
||||
stdin=stdin,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
env=env)
|
||||
|
||||
# This is needed to get rid of '\r' from Windows's lines endings.
|
||||
stdout, stderr = [std_stream.replace(b'\r', b'')
|
||||
for std_stream in process.communicate()]
|
||||
|
||||
# We should always print the stdout and stderr
|
||||
print('STDOUT: {}'.format(_truncate(stdout.decode('utf-8'))))
|
||||
print('STDERR: {}'.format(_truncate(stderr.decode('utf-8'))))
|
||||
|
||||
return (process.returncode, stdout, stderr)
|
||||
|
||||
|
||||
def _truncate(s, length=8000):
|
||||
if len(s) > length:
|
||||
return s[:length-3] + '...'
|
||||
else:
|
||||
return s
|
||||
|
||||
|
||||
def assert_command(
|
||||
cmd,
|
||||
returncode=0,
|
||||
stdout=b'',
|
||||
stderr=b'',
|
||||
env=None,
|
||||
stdin=None):
|
||||
"""Execute CLI command and assert expected behavior.
|
||||
|
||||
:param cmd: Program and arguments
|
||||
:type cmd: list of str
|
||||
:param returncode: Expected return code
|
||||
:type returncode: int
|
||||
:param stdout: Expected stdout
|
||||
:type stdout: bytes
|
||||
:param stderr: Expected stderr
|
||||
:type stderr: bytes
|
||||
:param env: Environment variables
|
||||
:type env: dict of str to str
|
||||
:param stdin: File to use for stdin
|
||||
:type stdin: file
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
returncode_, stdout_, stderr_ = exec_command(cmd, env, stdin)
|
||||
|
||||
assert returncode_ == returncode, (returncode_, returncode)
|
||||
assert stdout_ == stdout, (stdout_, stdout)
|
||||
assert stderr_ == stderr, (stderr_, stderr)
|
||||
|
||||
|
||||
def watch_deployment(deployment_id, count):
|
||||
"""Wait for a deployment to complete.
|
||||
|
||||
:param deployment_id: deployment id
|
||||
:type deployment_id: str
|
||||
:param count: max number of seconds to wait
|
||||
:type count: int
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'marathon', 'deployment', 'watch',
|
||||
'--max-count={}'.format(count), deployment_id])
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
|
||||
def watch_job_deployments(count=300):
|
||||
"""Wait for all deployments to complete.
|
||||
|
||||
:param count: max number of seconds to wait
|
||||
:type count: int
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
deps = list_job_deployments()
|
||||
for dep in deps:
|
||||
watch_deployment(dep['id'], count)
|
||||
|
||||
|
||||
def watch_all_deployments(count=300):
|
||||
"""Wait for all deployments to complete.
|
||||
|
||||
:param count: max number of seconds to wait
|
||||
:type count: int
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
deps = list_deployments()
|
||||
for dep in deps:
|
||||
watch_deployment(dep['id'], count)
|
||||
|
||||
|
||||
def wait_for_service(service_name, number_of_services=1, max_count=300):
|
||||
"""Wait for service to register with Mesos
|
||||
|
||||
:param service_name: name of service
|
||||
:type service_name: str
|
||||
:param number_of_services: number of services with that name
|
||||
:type number_of_services: int
|
||||
:param max_count: max number of seconds to wait
|
||||
:type max_count: int
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
count = 0
|
||||
while count < max_count:
|
||||
services = get_services()
|
||||
|
||||
if (len([service for service in services
|
||||
if service['name'] == service_name]) >= number_of_services):
|
||||
return
|
||||
|
||||
count += 1
|
||||
|
||||
|
||||
def add_job(job_path):
|
||||
""" Add a job, and wait for it to deploy
|
||||
|
||||
:param job_path: path to job's json definition
|
||||
:type job_path: str
|
||||
:param wait: whether to wait for the deploy
|
||||
:type wait: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
assert_command(['dcos', 'job', 'add', job_path])
|
||||
|
||||
|
||||
def add_app(app_path, wait=True):
|
||||
""" Add an app, and wait for it to deploy
|
||||
|
||||
:param app_path: path to app's json definition
|
||||
:type app_path: str
|
||||
:param wait: whether to wait for the deploy
|
||||
:type wait: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'marathon', 'app', 'add', app_path]
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
assert returncode == 0
|
||||
assert re.fullmatch('Created deployment \S+\n', stdout.decode('utf-8'))
|
||||
assert stderr == b''
|
||||
|
||||
if wait:
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def remove_group(group_id):
|
||||
assert_command(['dcos', 'marathon', 'group', 'remove', group_id])
|
||||
|
||||
# Let's make sure that we don't return until the deployment has finished
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def remove_app(app_id):
|
||||
""" Remove an app
|
||||
|
||||
:param app_id: id of app to remove
|
||||
:type app_id: str
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
assert_command(['dcos', 'marathon', 'app', 'remove', '--force', app_id])
|
||||
|
||||
|
||||
def remove_pod(pod_id, force=True):
|
||||
""" Remove a pod
|
||||
|
||||
:param pod_id: id of app to remove
|
||||
:type pod_id: str
|
||||
:param force: whether to force a remove
|
||||
:type force: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'marathon', 'pod', 'remove', pod_id]
|
||||
if force:
|
||||
cmd += ['--force']
|
||||
assert_command(cmd)
|
||||
|
||||
|
||||
def remove_job(job_id):
|
||||
""" Remove a job
|
||||
|
||||
:param job_id: id of job to remove
|
||||
:type job_id: str
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
assert_command(['dcos', 'job', 'remove',
|
||||
'--stop-current-job-runs', job_id])
|
||||
|
||||
|
||||
def package_install(package, deploy=False, args=[]):
|
||||
""" Calls `dcos package install`
|
||||
|
||||
:param package: name of the package to install
|
||||
:type package: str
|
||||
:param deploy: whether or not to wait for the deploy
|
||||
:type deploy: bool
|
||||
:param args: extra CLI args
|
||||
:type args: [str]
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'package', 'install', '--yes', package] + args)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
if deploy:
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def package_uninstall(package_name, args=[], stderr=b''):
|
||||
""" Calls `dcos package uninstall`
|
||||
|
||||
:param package_name: name of the package to uninstall
|
||||
:type package_name: str
|
||||
:param args: extra CLI args
|
||||
:type args: [str]
|
||||
:param stderr: expected string in stderr for package uninstall
|
||||
:type stderr: bytes
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
assert_command(
|
||||
['dcos', 'package', 'uninstall', package_name] + args,
|
||||
stderr=stderr)
|
||||
|
||||
|
||||
def get_services(expected_count=None, args=[]):
|
||||
"""Get services
|
||||
|
||||
:param expected_count: assert exactly this number of services are
|
||||
running
|
||||
:type expected_count: int | None
|
||||
:param args: cli arguments
|
||||
:type args: [str]
|
||||
:returns: services
|
||||
:rtype: [dict]
|
||||
"""
|
||||
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'service', '--json'] + args)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
services = json.loads(stdout.decode('utf-8'))
|
||||
assert isinstance(services, collections.Sequence)
|
||||
if expected_count is not None:
|
||||
assert len(services) == expected_count
|
||||
|
||||
return services
|
||||
|
||||
|
||||
def list_deployments(expected_count=None, app_id=None):
|
||||
"""Get all active deployments.
|
||||
|
||||
:param expected_count: assert that number of active deployments
|
||||
equals `expected_count`
|
||||
:type expected_count: int
|
||||
:param app_id: only get deployments for this app
|
||||
:type app_id: str
|
||||
:returns: active deployments
|
||||
:rtype: [dict]
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'marathon', 'deployment', 'list', '--json']
|
||||
if app_id is not None:
|
||||
cmd.append(app_id)
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
result = json.loads(stdout.decode('utf-8'))
|
||||
|
||||
assert returncode == 0
|
||||
if expected_count is not None:
|
||||
assert len(result) == expected_count
|
||||
assert stderr == b''
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def list_job_deployments(expected_count=None, app_id=None):
|
||||
"""Get all active deployments.
|
||||
|
||||
:param expected_count: assert that number of active deployments
|
||||
equals `expected_count`
|
||||
:type expected_count: int
|
||||
:param app_id: only get deployments for this app
|
||||
:type app_id: str
|
||||
:returns: active deployments
|
||||
:rtype: [dict]
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'job', 'list', '--json']
|
||||
if app_id is not None:
|
||||
cmd.append(app_id)
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
result = json.loads(stdout.decode('utf-8'))
|
||||
|
||||
assert returncode == 0
|
||||
if expected_count is not None:
|
||||
assert len(result) == expected_count
|
||||
assert stderr == b''
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def show_app(app_id, version=None):
|
||||
"""Show details of a Marathon application.
|
||||
|
||||
:param app_id: The id for the application
|
||||
:type app_id: str
|
||||
:param version: The version, either absolute (date-time) or relative
|
||||
:type version: str
|
||||
:returns: The requested Marathon application
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
if version is None:
|
||||
cmd = ['dcos', 'marathon', 'app', 'show', app_id]
|
||||
else:
|
||||
cmd = ['dcos', 'marathon', 'app', 'show',
|
||||
'--app-version={}'.format(version), app_id]
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
result = json.loads(stdout.decode('utf-8'))
|
||||
assert isinstance(result, dict)
|
||||
assert result['id'] == '/' + app_id
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def show_job(app_id):
|
||||
"""Show details of a Metronome job.
|
||||
|
||||
:param app_id: The id for the application
|
||||
:type app_id: str
|
||||
:returns: The requested Metronome job.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'job', 'show', app_id]
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
result = json.loads(stdout.decode('utf-8'))
|
||||
assert isinstance(result, dict)
|
||||
assert result['id'] == app_id
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def show_job_schedule(app_id, schedule_id):
|
||||
"""Show details of a Metronome schedule.
|
||||
|
||||
:param app_id: The id for the job
|
||||
:type app_id: str
|
||||
:param schedule_id: The id for the schedule
|
||||
:type schedule_id: str
|
||||
:returns: The requested Metronome job.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'job', 'schedule', 'show', app_id, '--json']
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
result = json.loads(stdout.decode('utf-8'))
|
||||
assert isinstance(result[0], dict)
|
||||
assert result[0]['id'] == schedule_id
|
||||
|
||||
return result[0]
|
||||
|
||||
|
||||
def service_shutdown(service_id):
|
||||
"""Shuts down a service using the command line program
|
||||
|
||||
:param service_id: the id of the service
|
||||
:type: service_id: str
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
assert_command(['dcos', 'service', 'shutdown', service_id])
|
||||
|
||||
|
||||
def delete_zk_nodes():
|
||||
"""Delete Zookeeper nodes that were created during the tests
|
||||
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
for znode in ['universe', 'cassandra-mesos', 'chronos']:
|
||||
delete_zk_node(znode)
|
||||
|
||||
|
||||
def delete_zk_node(znode):
|
||||
"""Delete Zookeeper node
|
||||
|
||||
:param znode: znode to delete
|
||||
:type znode: str
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
dcos_url = config.get_config_val('core.dcos_url')
|
||||
znode_url = urllib.parse.urljoin(
|
||||
dcos_url,
|
||||
'/exhibitor/exhibitor/v1/explorer/znode/{}'.format(znode))
|
||||
http.delete(znode_url)
|
||||
|
||||
|
||||
def assert_lines(cmd, num_lines, greater_than=False):
|
||||
""" Assert stdout contains the expected number of lines
|
||||
|
||||
:param cmd: program and arguments
|
||||
:type cmd: [str]
|
||||
:param num_lines: expected number of lines for stdout
|
||||
:type num_lines: int
|
||||
:param greater_than: if True assert that there are at least num_lines
|
||||
:type greater_than: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
lines = len(stdout.decode('utf-8').split('\n')) - 1
|
||||
if greater_than:
|
||||
assert lines >= num_lines
|
||||
return
|
||||
assert lines == num_lines
|
||||
|
||||
|
||||
def fetch_valid_json(cmd):
|
||||
"""Assert stdout contains valid JSON
|
||||
|
||||
:param cmd: program and arguments
|
||||
:type cmd: [str]
|
||||
:returns: parsed JSON AST
|
||||
"""
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
try:
|
||||
return json.loads(stdout.decode('utf-8'))
|
||||
except json.JSONDecodeError:
|
||||
error_text = 'Command "{}" returned invalid JSON'.format(' '.join(cmd))
|
||||
raise Exception(error_text)
|
||||
|
||||
|
||||
def file_json_ast(path):
|
||||
"""Returns the JSON AST parsed from file
|
||||
:param path: path to file
|
||||
:type path: str
|
||||
:returns: parsed JSON AST
|
||||
"""
|
||||
with open(path) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def json_ast_format(ast):
|
||||
"""Returns the given JSON AST formatted as bytes
|
||||
|
||||
:param ast: JSON AST
|
||||
:returns: formatted JSON
|
||||
:rtype: bytes
|
||||
"""
|
||||
return six.b(
|
||||
json.dumps(ast,
|
||||
sort_keys=True,
|
||||
indent=2,
|
||||
separators=(',', ': '))) + b'\n'
|
||||
|
||||
|
||||
def file_json(path):
|
||||
""" Returns formatted json from file
|
||||
|
||||
:param path: path to file
|
||||
:type path: str
|
||||
:returns: formatted json
|
||||
:rtype: bytes
|
||||
"""
|
||||
return json_ast_format(file_json_ast(path))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def app(path, app_id, wait=True):
|
||||
"""Context manager that deploys an app on entrance, and removes it on
|
||||
exit.
|
||||
|
||||
:param path: path to app's json definition:
|
||||
:type path: str
|
||||
:param app_id: app id
|
||||
:type app_id: str
|
||||
:param wait: whether to wait for the deploy
|
||||
:type wait: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
add_app(path, wait)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
remove_app(app_id)
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def add_pod(pod_path, wait=True):
|
||||
"""Add a pod, and wait for it to deploy
|
||||
|
||||
:param pod_path: path to pod's json definition
|
||||
:type pod_path: str
|
||||
:param wait: whether to wait for the deploy
|
||||
:type wait: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'marathon', 'pod', 'add', pod_path]
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
assert returncode == 0
|
||||
assert re.fullmatch('Created deployment \S+\n', stdout.decode('utf-8'))
|
||||
assert stderr == b''
|
||||
|
||||
if wait:
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def pod(path, pod_id, wait=True):
|
||||
"""Context manager that deploys an pod on entrance, and removes it on exit
|
||||
|
||||
:param path: path to pod's json definition:
|
||||
:type path: str
|
||||
:param pod_id: pod id
|
||||
:type pod_id: str
|
||||
:param wait: whether to wait for the deploy
|
||||
:type wait: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
add_pod(path, wait)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
remove_pod(pod_id)
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def pods(pods):
|
||||
"""Context manager that deploys pods on entrance, and removes
|
||||
them on exit.
|
||||
|
||||
:param pods: dict of path/to/pod/json -> pod id
|
||||
:type pods: {}
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
for pod_path in pods:
|
||||
add_pod(pod_path, wait=False)
|
||||
watch_all_deployments()
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
for pod_id in list(pods.values()):
|
||||
remove_pod(pod_id)
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def job(path, job_id):
|
||||
"""Context manager that deploys a job on entrance, and removes it on
|
||||
exit.
|
||||
|
||||
:param path: path to job's json definition:
|
||||
:type path: str
|
||||
:param job_id: job id
|
||||
:type job_id: str
|
||||
:param wait: whether to wait for the deploy
|
||||
:type wait: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
add_job(path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
remove_job(job_id)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def update_config(name, value, env=None):
|
||||
""" Context manager for altering config for tests
|
||||
|
||||
:param key: <key>
|
||||
:type key: str
|
||||
:param value: <value>
|
||||
:type value: str
|
||||
;param env: env vars
|
||||
:type env: dict
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
returncode, stdout, _ = exec_command(
|
||||
['dcos', 'config', 'show', name], env)
|
||||
|
||||
# when we change the dcos_url we remove the acs_token
|
||||
# we need to also restore the token if this occurs
|
||||
token = None
|
||||
if name == "core.dcos_url":
|
||||
returncode, token_val, _ = exec_command(
|
||||
['dcos', 'config', 'show', "core.dcos_acs_token"], env)
|
||||
if returncode == 0:
|
||||
token = token_val.decode('utf-8').strip()
|
||||
|
||||
result = None
|
||||
# config param already exists
|
||||
if returncode == 0:
|
||||
result = json.loads('"' + stdout.decode('utf-8').strip() + '"')
|
||||
|
||||
# if we are setting a value
|
||||
if value is not None:
|
||||
config_set(name, value, env)
|
||||
# only unset if the config param already exists
|
||||
elif result is not None:
|
||||
config_unset(name, env)
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
# return config to previous state
|
||||
if result is not None:
|
||||
config_set(name, result, env)
|
||||
else:
|
||||
exec_command(['dcos', 'config', 'unset', name], env)
|
||||
|
||||
if token:
|
||||
config_set("core.dcos_acs_token", token, env)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def package(package_name, deploy=False, args=[]):
|
||||
"""Context manager that deploys an app on entrance, and removes it on
|
||||
exit.
|
||||
|
||||
:param package_name: package name
|
||||
:type package_name: str
|
||||
:param deploy: If True, block on the deploy
|
||||
:type deploy: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
package_install(package_name, deploy, args)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
command = ['dcos', 'package', 'uninstall', package_name]
|
||||
returncode, _, _ = exec_command(command)
|
||||
assert returncode == 0
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def popen_tty(cmd):
|
||||
"""Open a process with stdin connected to a pseudo-tty. Returns a
|
||||
|
||||
:param cmd: command to run
|
||||
:type cmd: str
|
||||
:returns: (Popen, master) tuple, where master is the master side
|
||||
of the of the tty-pair. It is the responsibility of the caller
|
||||
to close the master fd, and to perform any cleanup (including
|
||||
waiting for completion) of the Popen object.
|
||||
:rtype: (Popen, int)
|
||||
|
||||
"""
|
||||
|
||||
import pty
|
||||
master, slave = pty.openpty()
|
||||
proc = subprocess.Popen(cmd,
|
||||
stdin=slave,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
preexec_fn=os.setsid,
|
||||
close_fds=True,
|
||||
shell=True)
|
||||
os.close(slave)
|
||||
|
||||
return (proc, master)
|
||||
|
||||
|
||||
def ssh_output(cmd):
|
||||
""" Runs an SSH command and returns the stdout/stderr/returncode.
|
||||
|
||||
:param cmd: command to run
|
||||
:type cmd: str
|
||||
:rtype: (str, str, int)
|
||||
"""
|
||||
|
||||
print('SSH COMMAND: {}'.format(cmd))
|
||||
|
||||
# ssh must run with stdin attached to a tty
|
||||
proc, master = popen_tty(cmd)
|
||||
|
||||
# wait for the ssh connection
|
||||
time.sleep(5)
|
||||
|
||||
proc.poll()
|
||||
returncode = proc.returncode
|
||||
|
||||
# kill the whole process group
|
||||
try:
|
||||
os.killpg(os.getpgid(proc.pid), 15)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
os.close(master)
|
||||
stdout, stderr = proc.communicate()
|
||||
|
||||
print('SSH STDOUT: {}'.format(stdout.decode('utf-8')))
|
||||
print('SSH STDERR: {}'.format(stderr.decode('utf-8')))
|
||||
|
||||
return stdout, stderr, returncode
|
||||
|
||||
|
||||
def config_set(key, value, env=None):
|
||||
""" dcos config set <key> <value>
|
||||
|
||||
:param key: <key>
|
||||
:type key: str
|
||||
:param value: <value>
|
||||
:type value: str
|
||||
;param env: env vars
|
||||
:type env: dict
|
||||
:rtype: None
|
||||
"""
|
||||
returncode, stdout, _ = exec_command(
|
||||
['dcos', 'config', 'set', key, value],
|
||||
env=env)
|
||||
|
||||
assert returncode == 0
|
||||
assert stdout == b''
|
||||
|
||||
|
||||
def config_unset(key, env=None):
|
||||
""" dcos config unset <key> --index=<index>
|
||||
|
||||
:param key: <key>
|
||||
:type key: str
|
||||
:param env: env vars
|
||||
:type env: dict
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'config', 'unset', key]
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd, env=env)
|
||||
|
||||
assert returncode == 0
|
||||
assert stdout == b''
|
||||
|
||||
|
||||
def base64_to_dict(byte_string):
|
||||
"""
|
||||
:param byte_string: base64 encoded string
|
||||
:type byte_string: str
|
||||
:return: python dictionary decoding of byte_string
|
||||
:rtype dict
|
||||
"""
|
||||
return json.loads(base64.b64decode(byte_string).decode('utf-8'))
|
||||
|
||||
|
||||
UNIVERSE_REPO = "https://universe.mesosphere.com/repo"
|
||||
UNIVERSE_TEST_REPO = "http://universe.marathon.mesos:8085/repo"
|
||||
|
||||
|
||||
def setup_universe_server():
|
||||
# add universe-server with static packages
|
||||
add_app('tests/data/universe-v3-stub.json', True)
|
||||
|
||||
assert_command(
|
||||
['dcos', 'package', 'repo', 'remove', 'Universe'])
|
||||
|
||||
assert_command(
|
||||
['dcos', 'package', 'repo', 'add', 'test-universe', UNIVERSE_TEST_REPO]
|
||||
)
|
||||
|
||||
# Give the test universe some time to become available
|
||||
describe_command = ['dcos', 'package', 'describe', 'helloworld']
|
||||
for _ in range(30):
|
||||
returncode, _, _ = exec_command(describe_command)
|
||||
if returncode == 0:
|
||||
break
|
||||
time.sleep(1)
|
||||
else:
|
||||
# Explicitly clean up in this case; pytest will not automatically
|
||||
# perform teardowns if setup fails. See the remarks at the end of
|
||||
# http://doc.pytest.org/en/latest/xunit_setup.html for more info.
|
||||
teardown_universe_server()
|
||||
assert False, 'test-universe failed to come up'
|
||||
|
||||
|
||||
def teardown_universe_server():
|
||||
assert_command(
|
||||
['dcos', 'package', 'repo', 'remove', 'test-universe'])
|
||||
assert_command(
|
||||
['dcos', 'package', 'repo', 'add', 'Universe', UNIVERSE_REPO])
|
||||
assert_command(
|
||||
['dcos', 'marathon', 'app', 'remove', '/universe', '--force'])
|
||||
|
||||
|
||||
def zip_contents_as_json(path, inner_file):
|
||||
with zipfile.ZipFile(path) as zip_file:
|
||||
inner_file_contents = zip_file.read(inner_file).decode()
|
||||
return json.loads(inner_file_contents)
|
||||
348
cli/tests/integrations/helpers/common.py
Normal file
348
cli/tests/integrations/helpers/common.py
Normal file
@@ -0,0 +1,348 @@
|
||||
import base64
|
||||
import contextlib
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import time
|
||||
import zipfile
|
||||
|
||||
import six
|
||||
from six.moves import urllib
|
||||
|
||||
from dcos import config, http
|
||||
|
||||
|
||||
def exec_command(cmd, env=None, stdin=None):
|
||||
"""Execute CLI command
|
||||
|
||||
:param cmd: Program and arguments
|
||||
:type cmd: [str]
|
||||
:param env: Environment variables
|
||||
:type env: dict | None
|
||||
:param stdin: File to use for stdin
|
||||
:type stdin: file
|
||||
:returns: A tuple with the returncode, stdout and stderr
|
||||
:rtype: (int, bytes, bytes)
|
||||
"""
|
||||
|
||||
print('CMD: {!r}'.format(cmd))
|
||||
|
||||
process = subprocess.Popen(
|
||||
cmd,
|
||||
stdin=stdin,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
env=env)
|
||||
|
||||
# This is needed to get rid of '\r' from Windows's lines endings.
|
||||
stdout, stderr = [std_stream.replace(b'\r', b'')
|
||||
for std_stream in process.communicate()]
|
||||
|
||||
# We should always print the stdout and stderr
|
||||
print('STDOUT: {}'.format(_truncate(stdout.decode('utf-8'))))
|
||||
print('STDERR: {}'.format(_truncate(stderr.decode('utf-8'))))
|
||||
|
||||
return (process.returncode, stdout, stderr)
|
||||
|
||||
|
||||
def _truncate(s, length=8000):
|
||||
if len(s) > length:
|
||||
return s[:length-3] + '...'
|
||||
else:
|
||||
return s
|
||||
|
||||
|
||||
def assert_command(
|
||||
cmd,
|
||||
returncode=0,
|
||||
stdout=b'',
|
||||
stderr=b'',
|
||||
env=None,
|
||||
stdin=None):
|
||||
"""Execute CLI command and assert expected behavior.
|
||||
|
||||
:param cmd: Program and arguments
|
||||
:type cmd: list of str
|
||||
:param returncode: Expected return code
|
||||
:type returncode: int
|
||||
:param stdout: Expected stdout
|
||||
:type stdout: bytes
|
||||
:param stderr: Expected stderr
|
||||
:type stderr: bytes
|
||||
:param env: Environment variables
|
||||
:type env: dict of str to str
|
||||
:param stdin: File to use for stdin
|
||||
:type stdin: file
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
returncode_, stdout_, stderr_ = exec_command(cmd, env, stdin)
|
||||
|
||||
assert returncode_ == returncode, (returncode_, returncode)
|
||||
assert stdout_ == stdout, (stdout_, stdout)
|
||||
assert stderr_ == stderr, (stderr_, stderr)
|
||||
|
||||
|
||||
def delete_zk_nodes():
|
||||
"""Delete Zookeeper nodes that were created during the tests
|
||||
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
for znode in ['universe', 'cassandra-mesos', 'chronos']:
|
||||
delete_zk_node(znode)
|
||||
|
||||
|
||||
def delete_zk_node(znode):
|
||||
"""Delete Zookeeper node
|
||||
|
||||
:param znode: znode to delete
|
||||
:type znode: str
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
dcos_url = config.get_config_val('core.dcos_url')
|
||||
znode_url = urllib.parse.urljoin(
|
||||
dcos_url,
|
||||
'/exhibitor/exhibitor/v1/explorer/znode/{}'.format(znode))
|
||||
http.delete(znode_url)
|
||||
|
||||
|
||||
def assert_lines(cmd, num_lines, greater_than=False):
|
||||
""" Assert stdout contains the expected number of lines
|
||||
|
||||
:param cmd: program and arguments
|
||||
:type cmd: [str]
|
||||
:param num_lines: expected number of lines for stdout
|
||||
:type num_lines: int
|
||||
:param greater_than: if True assume there may be at least num_lines or more
|
||||
:type greater_than: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
lines = len(stdout.decode('utf-8').split('\n')) - 1
|
||||
if greater_than:
|
||||
assert lines >= num_lines
|
||||
return
|
||||
assert lines == num_lines
|
||||
|
||||
|
||||
def file_json_ast(path):
|
||||
"""Returns the JSON AST parsed from file
|
||||
:param path: path to file
|
||||
:type path: str
|
||||
:returns: parsed JSON AST
|
||||
"""
|
||||
with open(path) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def json_ast_format(ast):
|
||||
"""Returns the given JSON AST formatted as bytes
|
||||
|
||||
:param ast: JSON AST
|
||||
:returns: formatted JSON
|
||||
:rtype: bytes
|
||||
"""
|
||||
return six.b(
|
||||
json.dumps(ast,
|
||||
sort_keys=True,
|
||||
indent=2,
|
||||
separators=(',', ': '))) + b'\n'
|
||||
|
||||
|
||||
def fetch_valid_json(cmd):
|
||||
"""Assert stdout contains valid JSON
|
||||
|
||||
:param cmd: program and arguments
|
||||
:type cmd: [str]
|
||||
:returns: parsed JSON AST
|
||||
"""
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
try:
|
||||
return json.loads(stdout.decode('utf-8'))
|
||||
except json.JSONDecodeError:
|
||||
error_text = 'Command "{}" returned invalid JSON'.format(' '.join(cmd))
|
||||
raise Exception(error_text)
|
||||
|
||||
|
||||
def file_json(path):
|
||||
""" Returns formatted json from file
|
||||
|
||||
:param path: path to file
|
||||
:type path: str
|
||||
:returns: formatted json
|
||||
:rtype: bytes
|
||||
"""
|
||||
return json_ast_format(file_json_ast(path))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def update_config(name, value, env=None):
|
||||
""" Context manager for altering config for tests
|
||||
|
||||
:param key: <key>
|
||||
:type key: str
|
||||
:param value: <value>
|
||||
:type value: str
|
||||
;param env: env vars
|
||||
:type env: dict
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
returncode, stdout, _ = exec_command(
|
||||
['dcos', 'config', 'show', name], env)
|
||||
|
||||
# when we change the dcos_url we remove the acs_token
|
||||
# we need to also restore the token if this occurs
|
||||
token = None
|
||||
if name == "core.dcos_url":
|
||||
returncode, token_val, _ = exec_command(
|
||||
['dcos', 'config', 'show', "core.dcos_acs_token"], env)
|
||||
if returncode == 0:
|
||||
token = token_val.decode('utf-8').strip()
|
||||
|
||||
result = None
|
||||
# config param already exists
|
||||
if returncode == 0:
|
||||
result = json.loads('"' + stdout.decode('utf-8').strip() + '"')
|
||||
|
||||
# if we are setting a value
|
||||
if value is not None:
|
||||
config_set(name, value, env)
|
||||
# only unset if the config param already exists
|
||||
elif result is not None:
|
||||
config_unset(name, env)
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
# return config to previous state
|
||||
if result is not None:
|
||||
config_set(name, result, env)
|
||||
else:
|
||||
exec_command(['dcos', 'config', 'unset', name], env)
|
||||
|
||||
if token:
|
||||
config_set("core.dcos_acs_token", token, env)
|
||||
|
||||
|
||||
def popen_tty(cmd):
|
||||
"""Open a process with stdin connected to a pseudo-tty. Returns a
|
||||
|
||||
:param cmd: command to run
|
||||
:type cmd: str
|
||||
:returns: (Popen, master) tuple, where master is the master side
|
||||
of the of the tty-pair. It is the responsibility of the caller
|
||||
to close the master fd, and to perform any cleanup (including
|
||||
waiting for completion) of the Popen object.
|
||||
:rtype: (Popen, int)
|
||||
|
||||
"""
|
||||
|
||||
import pty
|
||||
master, slave = pty.openpty()
|
||||
proc = subprocess.Popen(cmd,
|
||||
stdin=slave,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
preexec_fn=os.setsid,
|
||||
close_fds=True,
|
||||
shell=True)
|
||||
os.close(slave)
|
||||
|
||||
return (proc, master)
|
||||
|
||||
|
||||
def ssh_output(cmd):
|
||||
""" Runs an SSH command and returns the stdout/stderr/returncode.
|
||||
|
||||
:param cmd: command to run
|
||||
:type cmd: str
|
||||
:rtype: (str, str, int)
|
||||
"""
|
||||
|
||||
print('SSH COMMAND: {}'.format(cmd))
|
||||
|
||||
# ssh must run with stdin attached to a tty
|
||||
proc, master = popen_tty(cmd)
|
||||
|
||||
# wait for the ssh connection
|
||||
time.sleep(5)
|
||||
|
||||
proc.poll()
|
||||
returncode = proc.returncode
|
||||
|
||||
# kill the whole process group
|
||||
try:
|
||||
os.killpg(os.getpgid(proc.pid), 15)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
os.close(master)
|
||||
stdout, stderr = proc.communicate()
|
||||
|
||||
print('SSH STDOUT: {}'.format(stdout.decode('utf-8')))
|
||||
print('SSH STDERR: {}'.format(stderr.decode('utf-8')))
|
||||
|
||||
return stdout, stderr, returncode
|
||||
|
||||
|
||||
def config_set(key, value, env=None):
|
||||
""" dcos config set <key> <value>
|
||||
|
||||
:param key: <key>
|
||||
:type key: str
|
||||
:param value: <value>
|
||||
:type value: str
|
||||
;param env: env vars
|
||||
:type env: dict
|
||||
:rtype: None
|
||||
"""
|
||||
returncode, stdout, _ = exec_command(
|
||||
['dcos', 'config', 'set', key, value],
|
||||
env=env)
|
||||
|
||||
assert returncode == 0
|
||||
assert stdout == b''
|
||||
|
||||
|
||||
def config_unset(key, env=None):
|
||||
""" dcos config unset <key>
|
||||
|
||||
:param key: <key>
|
||||
:type key: str
|
||||
:param env: env vars
|
||||
:type env: dict
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'config', 'unset', key]
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd, env=env)
|
||||
|
||||
assert returncode == 0
|
||||
assert stdout == b''
|
||||
|
||||
|
||||
def base64_to_dict(byte_string):
|
||||
"""
|
||||
:param byte_string: base64 encoded string
|
||||
:type byte_string: str
|
||||
:return: python dictionary decoding of byte_string
|
||||
:rtype dict
|
||||
"""
|
||||
return json.loads(base64.b64decode(byte_string).decode('utf-8'))
|
||||
|
||||
|
||||
def zip_contents_as_json(path, inner_file):
|
||||
with zipfile.ZipFile(path) as zip_file:
|
||||
inner_file_contents = zip_file.read(inner_file).decode()
|
||||
return json.loads(inner_file_contents)
|
||||
140
cli/tests/integrations/helpers/job.py
Normal file
140
cli/tests/integrations/helpers/job.py
Normal file
@@ -0,0 +1,140 @@
|
||||
import contextlib
|
||||
import json
|
||||
|
||||
from .common import assert_command, exec_command
|
||||
from .marathon import watch_deployment
|
||||
|
||||
|
||||
def remove_job(job_id):
|
||||
""" Remove a job
|
||||
|
||||
:param job_id: id of job to remove
|
||||
:type job_id: str
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
assert_command(['dcos', 'job', 'remove',
|
||||
'--stop-current-job-runs', job_id])
|
||||
|
||||
|
||||
def show_job(app_id):
|
||||
"""Show details of a Metronome job.
|
||||
|
||||
:param app_id: The id for the application
|
||||
:type app_id: str
|
||||
:returns: The requested Metronome job.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'job', 'show', app_id]
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
result = json.loads(stdout.decode('utf-8'))
|
||||
assert isinstance(result, dict)
|
||||
assert result['id'] == app_id
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def show_job_schedule(app_id, schedule_id):
|
||||
"""Show details of a Metronome schedule.
|
||||
|
||||
:param app_id: The id for the job
|
||||
:type app_id: str
|
||||
:param schedule_id: The id for the schedule
|
||||
:type schedule_id: str
|
||||
:returns: The requested Metronome job.
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'job', 'schedule', 'show', app_id, '--json']
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
result = json.loads(stdout.decode('utf-8'))
|
||||
assert isinstance(result[0], dict)
|
||||
assert result[0]['id'] == schedule_id
|
||||
|
||||
return result[0]
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def job(path, job_id):
|
||||
"""Context manager that deploys a job on entrance, and removes it on
|
||||
exit.
|
||||
|
||||
:param path: path to job's json definition:
|
||||
:type path: str
|
||||
:param job_id: job id
|
||||
:type job_id: str
|
||||
:param wait: whether to wait for the deploy
|
||||
:type wait: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
add_job(path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
remove_job(job_id)
|
||||
|
||||
|
||||
def watch_job_deployments(count=300):
|
||||
"""Wait for all deployments to complete.
|
||||
|
||||
:param count: max number of seconds to wait
|
||||
:type count: int
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
deps = list_job_deployments()
|
||||
for dep in deps:
|
||||
watch_deployment(dep['id'], count)
|
||||
|
||||
|
||||
def add_job(job_path):
|
||||
""" Add a job, and wait for it to deploy
|
||||
|
||||
:param job_path: path to job's json definition
|
||||
:type job_path: str
|
||||
:param wait: whether to wait for the deploy
|
||||
:type wait: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
assert_command(['dcos', 'job', 'add', job_path])
|
||||
|
||||
|
||||
def list_job_deployments(expected_count=None, app_id=None):
|
||||
"""Get all active deployments.
|
||||
|
||||
:param expected_count: assert that number of active deployments
|
||||
equals `expected_count`
|
||||
:type expected_count: int
|
||||
:param app_id: only get deployments for this app
|
||||
:type app_id: str
|
||||
:returns: active deployments
|
||||
:rtype: [dict]
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'job', 'list', '--json']
|
||||
if app_id is not None:
|
||||
cmd.append(app_id)
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
result = json.loads(stdout.decode('utf-8'))
|
||||
|
||||
assert returncode == 0
|
||||
if expected_count is not None:
|
||||
assert len(result) == expected_count
|
||||
assert stderr == b''
|
||||
|
||||
return result
|
||||
405
cli/tests/integrations/helpers/marathon.py
Normal file
405
cli/tests/integrations/helpers/marathon.py
Normal file
@@ -0,0 +1,405 @@
|
||||
import contextlib
|
||||
import json
|
||||
import re
|
||||
|
||||
from .common import assert_command, exec_command
|
||||
|
||||
|
||||
def add_app(app_path, wait=True):
|
||||
""" Add an app, and wait for it to deploy
|
||||
|
||||
:param app_path: path to app's json definition
|
||||
:type app_path: str
|
||||
:param wait: whether to wait for the deploy
|
||||
:type wait: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'marathon', 'app', 'add', app_path]
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
assert returncode == 0
|
||||
assert re.fullmatch('Created deployment \S+\n', stdout.decode('utf-8'))
|
||||
assert stderr == b''
|
||||
|
||||
if wait:
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def start_app(app_id, instances=None):
|
||||
cmd = ['dcos', 'marathon', 'app', 'start', app_id]
|
||||
if instances is not None:
|
||||
cmd.append(str(instances))
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
assert returncode == 0
|
||||
assert stdout.decode().startswith('Created deployment ')
|
||||
assert stderr == b''
|
||||
|
||||
|
||||
def list_apps(app_id=None):
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'marathon', 'app', 'list', '--json'])
|
||||
|
||||
result = json.loads(stdout.decode('utf-8'))
|
||||
|
||||
if app_id is None:
|
||||
assert len(result) == 0
|
||||
else:
|
||||
assert len(result) == 1
|
||||
assert result[0]['id'] == '/' + app_id
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def remove_group(group_id):
|
||||
assert_command(['dcos', 'marathon', 'group', 'remove', group_id])
|
||||
|
||||
# Let's make sure that we don't return until the deployment has finished
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def remove_app(app_id):
|
||||
""" Remove an app
|
||||
|
||||
:param app_id: id of app to remove
|
||||
:type app_id: str
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
assert_command(['dcos', 'marathon', 'app', 'remove', '--force', app_id])
|
||||
|
||||
|
||||
def show_group(group_id, version=None):
|
||||
if version is None:
|
||||
cmd = ['dcos', 'marathon', 'group', 'show', group_id]
|
||||
else:
|
||||
cmd = ['dcos', 'marathon', 'group', 'show',
|
||||
'--group-version={}'.format(version), group_id]
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
result = json.loads(stdout.decode('utf-8'))
|
||||
|
||||
assert returncode == 0
|
||||
assert isinstance(result, dict)
|
||||
assert result['id'] == '/' + group_id
|
||||
assert stderr == b''
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def remove_pod(pod_id, force=True):
|
||||
""" Remove a pod
|
||||
|
||||
:param pod_id: id of app to remove
|
||||
:type pod_id: str
|
||||
:param force: whether to force a remove
|
||||
:type force: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'marathon', 'pod', 'remove', pod_id]
|
||||
if force:
|
||||
cmd += ['--force']
|
||||
assert_command(cmd)
|
||||
|
||||
|
||||
def show_app(app_id, version=None):
|
||||
"""Show details of a Marathon application.
|
||||
|
||||
:param app_id: The id for the application
|
||||
:type app_id: str
|
||||
:param version: The version, either absolute (date-time) or relative
|
||||
:type version: str
|
||||
:returns: The requested Marathon application
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
if version is None:
|
||||
cmd = ['dcos', 'marathon', 'app', 'show', app_id]
|
||||
else:
|
||||
cmd = ['dcos', 'marathon', 'app', 'show',
|
||||
'--app-version={}'.format(version), app_id]
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
result = json.loads(stdout.decode('utf-8'))
|
||||
assert isinstance(result, dict)
|
||||
assert result['id'] == '/' + app_id
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def app(path, app_id, wait=True):
|
||||
"""Context manager that deploys an app on entrance, and removes it on
|
||||
exit.
|
||||
|
||||
:param path: path to app's json definition:
|
||||
:type path: str
|
||||
:param app_id: app id
|
||||
:type app_id: str
|
||||
:param wait: whether to wait for the deploy
|
||||
:type wait: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
add_app(path, wait)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
remove_app(app_id)
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def add_pod(pod_path, wait=True):
|
||||
"""Add a pod, and wait for it to deploy
|
||||
|
||||
:param pod_path: path to pod's json definition
|
||||
:type pod_path: str
|
||||
:param wait: whether to wait for the deploy
|
||||
:type wait: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'marathon', 'pod', 'add', pod_path]
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
assert returncode == 0
|
||||
assert re.fullmatch('Created deployment \S+\n', stdout.decode('utf-8'))
|
||||
assert stderr == b''
|
||||
|
||||
if wait:
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def pod_spec_json(expected_pod_spec, actual_pod_spec):
|
||||
"""Checks that the "actual" pod spec JSON matches the "expected" JSON.
|
||||
|
||||
The comparison only looks at specific fields that are present in the
|
||||
test data used by this module.
|
||||
|
||||
:param expected_pod_spec: contains the baseline values for the comparison
|
||||
:type expected_pod_spec: {}
|
||||
:param actual_pod_spec: has its fields checked against the expected fields
|
||||
:type actual_pod_spec: {}
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
expected_containers = expected_pod_spec['containers']
|
||||
actual_containers = actual_pod_spec['containers']
|
||||
actual_containers_by_name = {c['name']: c for c in actual_containers}
|
||||
|
||||
for expected_container in expected_containers:
|
||||
container_name = expected_container['name']
|
||||
actual_container = actual_containers_by_name[container_name]
|
||||
|
||||
for k, v in expected_container['resources'].items():
|
||||
assert actual_container['resources'][k] == v
|
||||
|
||||
assert len(actual_containers) == len(expected_containers)
|
||||
|
||||
|
||||
def pod_status_json(expected_pod_status, actual_pod_status):
|
||||
"""Checks that the "actual" pod status JSON matched the "expected" JSON.
|
||||
|
||||
The comparison only looks at specific fields that are present in the
|
||||
test data used by this module.
|
||||
|
||||
:param expected_pod_status: contains the baseline values for the comparison
|
||||
:type expected_pod_status: {}
|
||||
:param actual_pod_status: has its fields checked against expected's fields
|
||||
:type actual_pod_status: {}
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
assert actual_pod_status['id'] == expected_pod_status['id']
|
||||
assert actual_pod_status['status'] == expected_pod_status['status']
|
||||
assert len(actual_pod_status['instances']) == \
|
||||
len(expected_pod_status['instances'])
|
||||
|
||||
pod_spec_json(expected_pod_status['spec'],
|
||||
actual_pod_status['spec'])
|
||||
|
||||
expected_instance = expected_pod_status['instances'][0]
|
||||
expected_container_statuses = {container['name']: container['status']
|
||||
for container
|
||||
in expected_instance['containers']}
|
||||
|
||||
for actual_instance in actual_pod_status['instances']:
|
||||
assert actual_instance['status'] == expected_instance['status']
|
||||
|
||||
actual_container_statuses = {container['name']: container['status']
|
||||
for container
|
||||
in actual_instance['containers']}
|
||||
|
||||
assert actual_container_statuses == expected_container_statuses
|
||||
|
||||
|
||||
def show_pod(pod_id, expected_json):
|
||||
"""Show details of a Marathon pod and make sure it matches expected output
|
||||
|
||||
:param pod_id: The id for the pod
|
||||
:type pod_id: str
|
||||
:param expected_json: expected results for pod `show`
|
||||
:type expected_json: dict
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'marathon', 'pod', 'show', pod_id]
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
status_json = json.loads(stdout.decode('utf-8'))
|
||||
pod_status_json(expected_json, status_json)
|
||||
|
||||
|
||||
def add_group(group_path, wait=True):
|
||||
"""Add a group, and wait for it to deploy
|
||||
|
||||
:param group_path: path to pod's json definition
|
||||
:type group_path: str
|
||||
:param wait: whether to wait for the deploy
|
||||
:type wait: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'marathon', 'group', 'add', group_path]
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
assert returncode == 0
|
||||
assert re.fullmatch('Created deployment \S+\n', stdout.decode('utf-8'))
|
||||
assert stderr == b''
|
||||
|
||||
if wait:
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def group(path, group_id, wait=True):
|
||||
"""Context manager that deploys an group on entrance, and removes it on exit
|
||||
|
||||
:param path: path to group's json definition:
|
||||
:type path: str
|
||||
:param group_id: group id
|
||||
:type group_id: str
|
||||
:param wait: whether to wait for the deploy
|
||||
:type wait: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
add_group(path, wait)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
remove_group(group_id)
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def pod(path, pod_id, wait=True):
|
||||
"""Context manager that deploys an pod on entrance, and removes it on exit
|
||||
|
||||
:param path: path to pod's json definition:
|
||||
:type path: str
|
||||
:param pod_id: pod id
|
||||
:type pod_id: str
|
||||
:param wait: whether to wait for the deploy
|
||||
:type wait: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
add_pod(path, wait)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
remove_pod(pod_id)
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def pods(pods):
|
||||
"""Context manager that deploys pods on entrance, and removes
|
||||
them on exit.
|
||||
|
||||
:param pods: dict of path/to/pod/json -> pod id
|
||||
:type pods: {}
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
for pod_path in pods:
|
||||
add_pod(pod_path, wait=False)
|
||||
watch_all_deployments()
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
for pod_id in list(pods.values()):
|
||||
remove_pod(pod_id)
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def watch_deployment(deployment_id, count):
|
||||
"""Wait for a deployment to complete.
|
||||
|
||||
:param deployment_id: deployment id
|
||||
:type deployment_id: str
|
||||
:param count: max number of seconds to wait
|
||||
:type count: int
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'marathon', 'deployment', 'watch',
|
||||
'--max-count={}'.format(count), deployment_id])
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
|
||||
def watch_all_deployments(count=300):
|
||||
"""Wait for all deployments to complete.
|
||||
|
||||
:param count: max number of seconds to wait
|
||||
:type count: int
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
deps = list_deployments()
|
||||
for dep in deps:
|
||||
watch_deployment(dep['id'], count)
|
||||
|
||||
|
||||
def list_deployments(expected_count=None, app_id=None):
|
||||
"""Get all active deployments.
|
||||
|
||||
:param expected_count: assert that number of active deployments
|
||||
equals `expected_count`
|
||||
:type expected_count: int
|
||||
:param app_id: only get deployments for this app
|
||||
:type app_id: str
|
||||
:returns: active deployments
|
||||
:rtype: [dict]
|
||||
"""
|
||||
|
||||
cmd = ['dcos', 'marathon', 'deployment', 'list', '--json']
|
||||
if app_id is not None:
|
||||
cmd.append(app_id)
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
result = json.loads(stdout.decode('utf-8'))
|
||||
|
||||
assert returncode == 0
|
||||
if expected_count is not None:
|
||||
assert len(result) == expected_count
|
||||
assert stderr == b''
|
||||
|
||||
return result
|
||||
113
cli/tests/integrations/helpers/package.py
Normal file
113
cli/tests/integrations/helpers/package.py
Normal file
@@ -0,0 +1,113 @@
|
||||
import contextlib
|
||||
import time
|
||||
|
||||
from .common import assert_command, exec_command
|
||||
from .marathon import add_app, watch_all_deployments
|
||||
from .service import get_services, service_shutdown
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def package(package_name, deploy=False, args=[]):
|
||||
"""Context manager that deploys an app on entrance, and removes it on
|
||||
exit.
|
||||
|
||||
:param package_name: package name
|
||||
:type package_name: str
|
||||
:param deploy: If True, block on the deploy
|
||||
:type deploy: bool
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
package_install(package_name, deploy, args)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
command = ['dcos', 'package', 'uninstall', package_name]
|
||||
returncode, _, _ = exec_command(command)
|
||||
assert returncode == 0
|
||||
watch_all_deployments()
|
||||
|
||||
services = get_services()
|
||||
for framework in services:
|
||||
if framework['name'] == package_name:
|
||||
service_shutdown(framework['id'])
|
||||
|
||||
|
||||
UNIVERSE_REPO = "https://universe.mesosphere.com/repo"
|
||||
UNIVERSE_TEST_REPO = "http://universe.marathon.mesos:8085/repo"
|
||||
|
||||
|
||||
def setup_universe_server():
|
||||
# add universe-server with static packages
|
||||
add_app('tests/data/universe-v3-stub.json', True)
|
||||
|
||||
assert_command(
|
||||
['dcos', 'package', 'repo', 'remove', 'Universe'])
|
||||
|
||||
assert_command(
|
||||
['dcos', 'package', 'repo', 'add', 'test-universe', UNIVERSE_TEST_REPO]
|
||||
)
|
||||
|
||||
watch_all_deployments()
|
||||
# Give the test universe some time to become available
|
||||
describe_command = ['dcos', 'package', 'describe', 'helloworld']
|
||||
for i in range(30):
|
||||
returncode, _, _ = exec_command(describe_command)
|
||||
if returncode == 0:
|
||||
break
|
||||
time.sleep(1)
|
||||
else:
|
||||
# Explicitly clean up in this case; pytest will not automatically
|
||||
# perform teardowns if setup fails. See the remarks at the end of
|
||||
# http://doc.pytest.org/en/latest/xunit_setup.html for more info.
|
||||
teardown_universe_server()
|
||||
assert False, 'test-universe failed to come up'
|
||||
|
||||
|
||||
def teardown_universe_server():
|
||||
assert_command(
|
||||
['dcos', 'package', 'repo', 'remove', 'test-universe'])
|
||||
assert_command(
|
||||
['dcos', 'package', 'repo', 'add', 'Universe', UNIVERSE_REPO])
|
||||
assert_command(
|
||||
['dcos', 'marathon', 'app', 'remove', '/universe', '--force'])
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def package_install(package, deploy=False, args=[]):
|
||||
""" Calls `dcos package install`
|
||||
|
||||
:param package: name of the package to install
|
||||
:type package: str
|
||||
:param deploy: whether or not to wait for the deploy
|
||||
:type deploy: bool
|
||||
:param args: extra CLI args
|
||||
:type args: [str]
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'package', 'install', '--yes', package] + args)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
if deploy:
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def package_uninstall(package_name, args=[], stderr=b''):
|
||||
""" Calls `dcos package uninstall`
|
||||
|
||||
:param package_name: name of the package to uninstall
|
||||
:type package_name: str
|
||||
:param args: extra CLI args
|
||||
:type args: [str]
|
||||
:param stderr: expected string in stderr for package uninstall
|
||||
:type stderr: bytes
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
assert_command(
|
||||
['dcos', 'package', 'uninstall', package_name] + args,
|
||||
stderr=stderr)
|
||||
64
cli/tests/integrations/helpers/service.py
Normal file
64
cli/tests/integrations/helpers/service.py
Normal file
@@ -0,0 +1,64 @@
|
||||
import collections
|
||||
import json
|
||||
|
||||
from .common import assert_command, exec_command
|
||||
|
||||
|
||||
def wait_for_service(service_name, number_of_services=1, max_count=300):
|
||||
"""Wait for service to register with Mesos
|
||||
|
||||
:param service_name: name of service
|
||||
:type service_name: str
|
||||
:param number_of_services: number of services with that name
|
||||
:type number_of_services: int
|
||||
:param max_count: max number of seconds to wait
|
||||
:type max_count: int
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
count = 0
|
||||
while count < max_count:
|
||||
services = get_services()
|
||||
|
||||
if (len([service for service in services
|
||||
if service['name'] == service_name]) >= number_of_services):
|
||||
return
|
||||
|
||||
count += 1
|
||||
|
||||
|
||||
def get_services(expected_count=None, args=[]):
|
||||
"""Get services
|
||||
|
||||
:param expected_count: assert exactly this number of services are
|
||||
running
|
||||
:type expected_count: int | None
|
||||
:param args: cli arguments
|
||||
:type args: [str]
|
||||
:returns: services
|
||||
:rtype: [dict]
|
||||
"""
|
||||
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'service', '--json'] + args)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
services = json.loads(stdout.decode('utf-8'))
|
||||
assert isinstance(services, collections.Sequence)
|
||||
if expected_count is not None:
|
||||
assert len(services) == expected_count
|
||||
|
||||
return services
|
||||
|
||||
|
||||
def service_shutdown(service_id):
|
||||
"""Shuts down a service using the command line program
|
||||
|
||||
:param service_id: the id of the service
|
||||
:type: service_id: str
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
assert_command(['dcos', 'service', 'shutdown', service_id])
|
||||
@@ -4,7 +4,7 @@ import pytest
|
||||
|
||||
from dcos import constants
|
||||
|
||||
from .common import assert_command, exec_command, update_config
|
||||
from .helpers.common import assert_command, exec_command, update_config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -6,8 +6,8 @@ import six
|
||||
|
||||
from dcos import constants
|
||||
|
||||
from .common import (assert_command, config_set, config_unset,
|
||||
exec_command, update_config)
|
||||
from .helpers.common import (assert_command, config_set, config_unset,
|
||||
exec_command, update_config)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -150,8 +150,7 @@ def test_set_nonexistent_subcommand(env):
|
||||
env=env)
|
||||
|
||||
|
||||
def test_set_when_extra_section():
|
||||
env = os.environ.copy()
|
||||
def test_set_when_extra_section(env):
|
||||
path = os.path.join('tests', 'data', 'config', 'invalid_section.toml')
|
||||
env['DCOS_CONFIG'] = path
|
||||
os.chmod(path, 0o600)
|
||||
@@ -298,8 +297,7 @@ def test_timeout(env):
|
||||
assert "(connect timeout=1)".encode('utf-8') in stderr
|
||||
|
||||
|
||||
def test_parse_error():
|
||||
env = os.environ.copy()
|
||||
def test_parse_error(env):
|
||||
path = os.path.join('tests', 'data', 'config', 'parse_error.toml')
|
||||
os.chmod(path, 0o600)
|
||||
env['DCOS_CONFIG'] = path
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from .common import assert_command, exec_command
|
||||
from .helpers.common import assert_command, exec_command
|
||||
|
||||
|
||||
def test_default():
|
||||
|
||||
@@ -8,9 +8,9 @@ import time
|
||||
|
||||
import dcoscli
|
||||
from dcos import util
|
||||
from .common import (assert_command, exec_command,
|
||||
file_json_ast, watch_all_deployments,
|
||||
zip_contents_as_json)
|
||||
from .helpers.common import (assert_command, exec_command, file_json_ast,
|
||||
zip_contents_as_json)
|
||||
from .helpers.marathon import watch_all_deployments
|
||||
|
||||
command_base = ['dcos', 'experimental']
|
||||
data_dir = os.path.join(os.getcwd(), 'tests', 'data')
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from .common import assert_command
|
||||
from .helpers.common import assert_command
|
||||
|
||||
|
||||
def test_help():
|
||||
|
||||
@@ -6,8 +6,8 @@ import pytest
|
||||
|
||||
from dcos import constants
|
||||
|
||||
from .common import (assert_command, exec_command, job, show_job,
|
||||
show_job_schedule, update_config)
|
||||
from .helpers.common import assert_command, exec_command, update_config
|
||||
from .helpers.job import job, show_job, show_job_schedule
|
||||
|
||||
|
||||
def test_help():
|
||||
|
||||
@@ -10,10 +10,12 @@ from six.moves.BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
|
||||
|
||||
from dcos import constants
|
||||
|
||||
from .common import (app, assert_command, assert_lines,
|
||||
exec_command, list_deployments, popen_tty,
|
||||
show_app, update_config, watch_all_deployments,
|
||||
watch_deployment)
|
||||
from .helpers.common import (assert_command, assert_lines, exec_command,
|
||||
popen_tty, update_config)
|
||||
from .helpers.marathon import (app, list_apps, list_deployments, show_app,
|
||||
start_app, watch_all_deployments,
|
||||
watch_deployment)
|
||||
|
||||
|
||||
_ZERO_INSTANCE_APP_ID = 'zero-instance-app'
|
||||
_ZERO_INSTANCE_APP_INSTANCES = 100
|
||||
@@ -67,17 +69,12 @@ def test_missing_config(env):
|
||||
|
||||
|
||||
def test_empty_list():
|
||||
_list_apps()
|
||||
|
||||
|
||||
def test_add_app():
|
||||
with _zero_instance_app():
|
||||
_list_apps('zero-instance-app')
|
||||
list_apps()
|
||||
|
||||
|
||||
def test_add_app_through_http():
|
||||
with _zero_instance_app_through_http():
|
||||
_list_apps('zero-instance-app')
|
||||
list_apps('zero-instance-app')
|
||||
|
||||
|
||||
def test_add_app_bad_resource():
|
||||
@@ -88,15 +85,10 @@ def test_add_app_bad_resource():
|
||||
stderr=stderr)
|
||||
|
||||
|
||||
def test_add_app_with_filename():
|
||||
with _zero_instance_app():
|
||||
_list_apps('zero-instance-app')
|
||||
|
||||
|
||||
def test_remove_app():
|
||||
with _zero_instance_app():
|
||||
pass
|
||||
_list_apps()
|
||||
list_apps()
|
||||
|
||||
|
||||
def test_add_bad_json_app():
|
||||
@@ -121,11 +113,6 @@ def test_add_existing_app():
|
||||
stdin=fd)
|
||||
|
||||
|
||||
def test_show_app():
|
||||
with _zero_instance_app():
|
||||
show_app('zero-instance-app')
|
||||
|
||||
|
||||
def test_show_absolute_app_version():
|
||||
with _zero_instance_app():
|
||||
_update_app(
|
||||
@@ -219,14 +206,9 @@ def test_start_missing_app():
|
||||
stderr=b"Error: App '/missing-id' does not exist\n")
|
||||
|
||||
|
||||
def test_start_app():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app')
|
||||
|
||||
|
||||
def test_start_already_started_app():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app')
|
||||
start_app('zero-instance-app')
|
||||
|
||||
stdout = (b"Application 'zero-instance-app' already "
|
||||
b"started: 1 instances.\n")
|
||||
@@ -244,7 +226,7 @@ def test_stop_missing_app():
|
||||
|
||||
def test_stop_app():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', 3)
|
||||
start_app('zero-instance-app', 3)
|
||||
watch_all_deployments()
|
||||
|
||||
returncode, stdout, stderr = exec_command(
|
||||
@@ -367,21 +349,9 @@ def test_restarting_missing_app():
|
||||
stderr=b"Error: App '/missing-id' does not exist\n")
|
||||
|
||||
|
||||
def test_restarting_app():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', 3)
|
||||
watch_all_deployments()
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'marathon', 'app', 'restart', 'zero-instance-app'])
|
||||
|
||||
assert returncode == 0
|
||||
assert stdout.decode().startswith('Created deployment ')
|
||||
assert stderr == b''
|
||||
|
||||
|
||||
def test_killing_app():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', 3)
|
||||
start_app('zero-instance-app', 3)
|
||||
watch_all_deployments()
|
||||
task_set_1 = set([task['id']
|
||||
for task in _list_tasks(3, 'zero-instance-app')])
|
||||
@@ -398,7 +368,7 @@ def test_killing_app():
|
||||
|
||||
def test_killing_scaling_app():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', 3)
|
||||
start_app('zero-instance-app', 3)
|
||||
watch_all_deployments()
|
||||
_list_tasks(3)
|
||||
command = ['dcos', 'marathon', 'app', 'kill', '--scale',
|
||||
@@ -415,7 +385,7 @@ def test_killing_scaling_app():
|
||||
|
||||
def test_killing_with_host_app():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', 3)
|
||||
start_app('zero-instance-app', 3)
|
||||
watch_all_deployments()
|
||||
existing_tasks = _list_tasks(3, 'zero-instance-app')
|
||||
task_hosts = set([task['host'] for task in existing_tasks])
|
||||
@@ -506,7 +476,7 @@ def test_list_empty_deployment():
|
||||
|
||||
def test_list_deployment():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', _ZERO_INSTANCE_APP_INSTANCES)
|
||||
start_app('zero-instance-app', _ZERO_INSTANCE_APP_INSTANCES)
|
||||
list_deployments(1)
|
||||
|
||||
|
||||
@@ -517,19 +487,19 @@ def test_list_deployment_table():
|
||||
"""
|
||||
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', _ZERO_INSTANCE_APP_INSTANCES)
|
||||
start_app('zero-instance-app', _ZERO_INSTANCE_APP_INSTANCES)
|
||||
assert_lines(['dcos', 'marathon', 'deployment', 'list'], 2)
|
||||
|
||||
|
||||
def test_list_deployment_missing_app():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app')
|
||||
start_app('zero-instance-app')
|
||||
list_deployments(0, 'missing-id')
|
||||
|
||||
|
||||
def test_list_deployment_app():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', _ZERO_INSTANCE_APP_INSTANCES)
|
||||
start_app('zero-instance-app', _ZERO_INSTANCE_APP_INSTANCES)
|
||||
list_deployments(1, 'zero-instance-app')
|
||||
|
||||
|
||||
@@ -542,7 +512,7 @@ def test_rollback_missing_deployment():
|
||||
|
||||
def test_rollback_deployment():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', _ZERO_INSTANCE_APP_INSTANCES)
|
||||
start_app('zero-instance-app', _ZERO_INSTANCE_APP_INSTANCES)
|
||||
result = list_deployments(1, 'zero-instance-app')
|
||||
|
||||
returncode, stdout, stderr = exec_command(
|
||||
@@ -561,7 +531,7 @@ def test_rollback_deployment():
|
||||
|
||||
def test_stop_deployment():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', _ZERO_INSTANCE_APP_INSTANCES)
|
||||
start_app('zero-instance-app', _ZERO_INSTANCE_APP_INSTANCES)
|
||||
result = list_deployments(1, 'zero-instance-app')
|
||||
|
||||
assert_command(
|
||||
@@ -576,7 +546,7 @@ def test_watching_missing_deployment():
|
||||
|
||||
def test_watching_deployment():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', _ZERO_INSTANCE_APP_INSTANCES)
|
||||
start_app('zero-instance-app', _ZERO_INSTANCE_APP_INSTANCES)
|
||||
result = list_deployments(1, 'zero-instance-app')
|
||||
watch_deployment(result[0]['id'], 60)
|
||||
assert_command(
|
||||
@@ -595,28 +565,28 @@ def test_list_empty_task_not_running_app():
|
||||
|
||||
def test_list_tasks():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', 3)
|
||||
start_app('zero-instance-app', 3)
|
||||
watch_all_deployments()
|
||||
_list_tasks(3)
|
||||
|
||||
|
||||
def test_list_tasks_table():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', 3)
|
||||
start_app('zero-instance-app', 3)
|
||||
watch_all_deployments()
|
||||
assert_lines(['dcos', 'marathon', 'task', 'list'], 4)
|
||||
|
||||
|
||||
def test_list_app_tasks():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', 3)
|
||||
start_app('zero-instance-app', 3)
|
||||
watch_all_deployments()
|
||||
_list_tasks(3, 'zero-instance-app')
|
||||
|
||||
|
||||
def test_list_missing_app_tasks():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', 3)
|
||||
start_app('zero-instance-app', 3)
|
||||
watch_all_deployments()
|
||||
_list_tasks(0, 'missing-id')
|
||||
|
||||
@@ -635,7 +605,7 @@ def test_show_missing_task():
|
||||
|
||||
def test_show_task():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', 3)
|
||||
start_app('zero-instance-app', 3)
|
||||
watch_all_deployments()
|
||||
result = _list_tasks(3, 'zero-instance-app')
|
||||
|
||||
@@ -651,7 +621,7 @@ def test_show_task():
|
||||
|
||||
def test_stop_task():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', 1)
|
||||
start_app('zero-instance-app', 1)
|
||||
watch_all_deployments()
|
||||
task_list = _list_tasks(1, 'zero-instance-app')
|
||||
task_id = task_list[0]['id']
|
||||
@@ -661,7 +631,7 @@ def test_stop_task():
|
||||
|
||||
def test_stop_task_wipe():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app', 1)
|
||||
start_app('zero-instance-app', 1)
|
||||
watch_all_deployments()
|
||||
task_list = _list_tasks(1, 'zero-instance-app')
|
||||
task_id = task_list[0]['id']
|
||||
@@ -671,7 +641,7 @@ def test_stop_task_wipe():
|
||||
|
||||
def test_stop_unknown_task():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app')
|
||||
start_app('zero-instance-app')
|
||||
watch_all_deployments()
|
||||
task_id = 'unknown-task-id'
|
||||
|
||||
@@ -680,7 +650,7 @@ def test_stop_unknown_task():
|
||||
|
||||
def test_stop_unknown_task_wipe():
|
||||
with _zero_instance_app():
|
||||
_start_app('zero-instance-app')
|
||||
start_app('zero-instance-app')
|
||||
watch_all_deployments()
|
||||
task_id = 'unknown-task-id'
|
||||
|
||||
@@ -727,36 +697,6 @@ def test_app_add_no_tty():
|
||||
b"E.g.: dcos marathon app add < app_resource.json\n")
|
||||
|
||||
|
||||
def _list_apps(app_id=None):
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'marathon', 'app', 'list', '--json'])
|
||||
|
||||
result = json.loads(stdout.decode('utf-8'))
|
||||
|
||||
if app_id is None:
|
||||
assert len(result) == 0
|
||||
else:
|
||||
assert len(result) == 1
|
||||
assert result[0]['id'] == '/' + app_id
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _start_app(app_id, instances=None):
|
||||
cmd = ['dcos', 'marathon', 'app', 'start', app_id]
|
||||
if instances is not None:
|
||||
cmd.append(str(instances))
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
assert returncode == 0
|
||||
assert stdout.decode().startswith('Created deployment ')
|
||||
assert stderr == b''
|
||||
|
||||
|
||||
def _update_app(app_id, file_path):
|
||||
with open(file_path) as fd:
|
||||
returncode, stdout, stderr = exec_command(
|
||||
|
||||
@@ -2,8 +2,9 @@ import contextlib
|
||||
import json
|
||||
import re
|
||||
|
||||
from .common import (app, exec_command, pod)
|
||||
from .test_marathon import (_list_tasks)
|
||||
from .helpers.common import exec_command
|
||||
from .helpers.marathon import app, pod
|
||||
from .test_marathon import _list_tasks
|
||||
|
||||
list_regex = '/stuck-(?:sleep|pod)\W+[^Z]+Z\W+9\W+(?:True|False)' \
|
||||
'\W+\d\W+\d\W+[^Z]+Z\W+[^Z]+Z'
|
||||
|
||||
@@ -1,35 +1,32 @@
|
||||
import contextlib
|
||||
import json
|
||||
import re
|
||||
|
||||
from .common import (assert_command, assert_lines, exec_command, remove_group,
|
||||
show_app, watch_all_deployments)
|
||||
from .helpers.common import assert_command, assert_lines, exec_command
|
||||
from .helpers.marathon import (group, remove_group, show_app,
|
||||
watch_all_deployments)
|
||||
|
||||
|
||||
GOOD_GROUP = 'tests/data/marathon/groups/good.json'
|
||||
SCALE_GROUP = 'tests/data/marathon/groups/scale.json'
|
||||
|
||||
|
||||
def test_deploy_group():
|
||||
_deploy_group(GOOD_GROUP)
|
||||
def test_add_group_by_stdin():
|
||||
_add_group_by_stdin(GOOD_GROUP)
|
||||
remove_group('test-group')
|
||||
|
||||
|
||||
def test_group_list_table():
|
||||
with _group(GOOD_GROUP, 'test-group'):
|
||||
with group(GOOD_GROUP, 'test-group'):
|
||||
assert_lines(['dcos', 'marathon', 'group', 'list'], 3)
|
||||
|
||||
|
||||
def test_validate_complicated_group_and_app():
|
||||
_deploy_group('tests/data/marathon/groups/complicated.json')
|
||||
remove_group('test-group')
|
||||
|
||||
|
||||
def test_optional_deploy_group():
|
||||
_deploy_group(GOOD_GROUP, False)
|
||||
remove_group('test-group')
|
||||
with group('tests/data/marathon/groups/complicated.json', 'test-group'):
|
||||
pass
|
||||
|
||||
|
||||
def test_add_existing_group():
|
||||
with _group(GOOD_GROUP, 'test-group'):
|
||||
with group(GOOD_GROUP, 'test-group'):
|
||||
with open(GOOD_GROUP) as fd:
|
||||
stderr = b"Group '/test-group' already exists\n"
|
||||
assert_command(['dcos', 'marathon', 'group', 'add'],
|
||||
@@ -38,11 +35,6 @@ def test_add_existing_group():
|
||||
stdin=fd)
|
||||
|
||||
|
||||
def test_show_group():
|
||||
with _group(GOOD_GROUP, 'test-group'):
|
||||
_show_group('test-group')
|
||||
|
||||
|
||||
def test_add_bad_complicated_group():
|
||||
with open('tests/data/marathon/groups/complicated_bad.json') as fd:
|
||||
returncode, stdout, stderr = exec_command(
|
||||
@@ -66,24 +58,8 @@ def test_add_bad_complicated_group():
|
||||
assert err in stderr
|
||||
|
||||
|
||||
def test_update_group():
|
||||
with _group(GOOD_GROUP, 'test-group'):
|
||||
newapp = json.dumps([{"id": "appadded", "cmd": "sleep 0"}])
|
||||
appjson = "apps={}".format(newapp)
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'marathon', 'group', 'update', 'test-group/sleep',
|
||||
appjson])
|
||||
|
||||
assert returncode == 0
|
||||
assert stdout.decode().startswith('Created deployment ')
|
||||
assert stderr == b''
|
||||
|
||||
watch_all_deployments()
|
||||
show_app('test-group/sleep/appadded')
|
||||
|
||||
|
||||
def test_update_group_from_stdin():
|
||||
with _group(GOOD_GROUP, 'test-group'):
|
||||
with group(GOOD_GROUP, 'test-group'):
|
||||
_update_group(
|
||||
'test-group',
|
||||
'tests/data/marathon/groups/update_good.json')
|
||||
@@ -97,19 +73,17 @@ def test_update_missing_group():
|
||||
|
||||
|
||||
def test_scale_group():
|
||||
_deploy_group('tests/data/marathon/groups/scale.json')
|
||||
returncode, stdout, stderr = exec_command(['dcos', 'marathon', 'group',
|
||||
'scale', 'scale-group', '2'])
|
||||
assert stderr == b''
|
||||
assert returncode == 0
|
||||
watch_all_deployments()
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'marathon', 'group', 'show',
|
||||
'scale-group'])
|
||||
res = json.loads(stdout.decode('utf-8'))
|
||||
with group(SCALE_GROUP, 'scale-group'):
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'marathon', 'group', 'scale', 'scale-group', '2'])
|
||||
assert stderr == b''
|
||||
assert returncode == 0
|
||||
watch_all_deployments()
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'marathon', 'group', 'show', 'scale-group'])
|
||||
res = json.loads(stdout.decode('utf-8'))
|
||||
|
||||
assert res['groups'][0]['apps'][0]['instances'] == 2
|
||||
remove_group('scale-group')
|
||||
assert res['groups'][0]['apps'][0]['instances'] == 2
|
||||
|
||||
|
||||
def test_scale_group_not_exist():
|
||||
@@ -127,64 +101,34 @@ def test_scale_group_not_exist():
|
||||
|
||||
|
||||
def test_scale_group_when_scale_factor_negative():
|
||||
_deploy_group('tests/data/marathon/groups/scale.json')
|
||||
returncode, stdout, stderr = exec_command(['dcos', 'marathon', 'group',
|
||||
'scale', 'scale-group', '-2'])
|
||||
assert b'Command not recognized' in stdout
|
||||
assert returncode == 1
|
||||
watch_all_deployments()
|
||||
remove_group('scale-group')
|
||||
with group(SCALE_GROUP, 'scale-group'):
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'marathon', 'group', 'scale', 'scale-group', '-2'])
|
||||
assert b'Command not recognized' in stdout
|
||||
assert returncode == 1
|
||||
|
||||
|
||||
def test_scale_group_when_scale_factor_not_float():
|
||||
_deploy_group('tests/data/marathon/groups/scale.json')
|
||||
returncode, stdout, stderr = exec_command(['dcos', 'marathon', 'group',
|
||||
'scale', 'scale-group', '1.a'])
|
||||
assert stderr == b'Error parsing string as float\n'
|
||||
assert returncode == 1
|
||||
watch_all_deployments()
|
||||
remove_group('scale-group')
|
||||
with group(SCALE_GROUP, 'scale-group'):
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'marathon', 'group', 'scale', 'scale-group', '1.a'])
|
||||
assert stderr == b'Error parsing string as float\n'
|
||||
assert returncode == 1
|
||||
|
||||
|
||||
def _deploy_group(file_path, stdin=True):
|
||||
if stdin:
|
||||
with open(file_path) as fd:
|
||||
cmd = ['dcos', 'marathon', 'group', 'add']
|
||||
returncode, stdout, stderr = exec_command(cmd, stdin=fd)
|
||||
assert returncode == 0
|
||||
assert re.fullmatch('Created deployment \S+\n',
|
||||
stdout.decode('utf-8'))
|
||||
assert stderr == b''
|
||||
else:
|
||||
cmd = ['dcos', 'marathon', 'group', 'add', file_path]
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
def _add_group_by_stdin(file_path):
|
||||
with open(file_path) as fd:
|
||||
cmd = ['dcos', 'marathon', 'group', 'add']
|
||||
returncode, stdout, stderr = exec_command(cmd, stdin=fd)
|
||||
assert returncode == 0
|
||||
assert re.fullmatch('Created deployment \S+\n', stdout.decode('utf-8'))
|
||||
assert re.fullmatch('Created deployment \S+\n',
|
||||
stdout.decode('utf-8'))
|
||||
assert stderr == b''
|
||||
|
||||
# Let's make sure that we don't return until the deployment has finished
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def _show_group(group_id, version=None):
|
||||
if version is None:
|
||||
cmd = ['dcos', 'marathon', 'group', 'show', group_id]
|
||||
else:
|
||||
cmd = ['dcos', 'marathon', 'group', 'show',
|
||||
'--group-version={}'.format(version), group_id]
|
||||
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
result = json.loads(stdout.decode('utf-8'))
|
||||
|
||||
assert returncode == 0
|
||||
assert isinstance(result, dict)
|
||||
assert result['id'] == '/' + group_id
|
||||
assert stderr == b''
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _update_group(group_id, file_path):
|
||||
with open(file_path) as fd:
|
||||
returncode, stdout, stderr = exec_command(
|
||||
@@ -197,22 +141,3 @@ def _update_group(group_id, file_path):
|
||||
|
||||
# Let's make sure that we don't return until the deployment has finished
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _group(path, group_id):
|
||||
"""Context manager that deploys a group on entrance, and removes it on
|
||||
exit.
|
||||
|
||||
:param path: path to group's json definition
|
||||
:type path: str
|
||||
:param group_id: group id
|
||||
:type group_id: str
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
_deploy_group(path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
remove_group(group_id)
|
||||
|
||||
@@ -4,12 +4,11 @@ import time
|
||||
|
||||
import pytest
|
||||
|
||||
from .common import (add_pod, assert_command, exec_command,
|
||||
file_json_ast, pod, pods, remove_pod,
|
||||
watch_all_deployments)
|
||||
from .helpers.common import assert_command, exec_command
|
||||
from .helpers.marathon import (add_pod, pod, pod_spec_json, pods, remove_pod,
|
||||
watch_all_deployments)
|
||||
from ..fixtures.marathon import (DOUBLE_POD_FILE_PATH, DOUBLE_POD_ID,
|
||||
GOOD_POD_FILE_PATH, GOOD_POD_ID,
|
||||
GOOD_POD_STATUS_FILE_PATH,
|
||||
POD_KILL_FILE_PATH, POD_KILL_ID,
|
||||
pod_list_fixture, TRIPLE_POD_FILE_PATH,
|
||||
TRIPLE_POD_ID, UNGOOD_POD_FILE_PATH,
|
||||
@@ -30,12 +29,6 @@ def test_pod_add_from_file():
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def test_pod_add_from_stdin():
|
||||
_pod_add_from_stdin(GOOD_POD_FILE_PATH)
|
||||
remove_pod(GOOD_POD_ID)
|
||||
watch_all_deployments()
|
||||
|
||||
|
||||
def test_pod_list():
|
||||
expected_json = pod_list_fixture()
|
||||
|
||||
@@ -47,13 +40,6 @@ def test_pod_list():
|
||||
_assert_pod_list_table()
|
||||
|
||||
|
||||
def test_pod_show():
|
||||
expected_json = file_json_ast(GOOD_POD_STATUS_FILE_PATH)
|
||||
|
||||
with pod(GOOD_POD_FILE_PATH, GOOD_POD_ID):
|
||||
_assert_pod_show(GOOD_POD_ID, expected_json)
|
||||
|
||||
|
||||
def test_pod_update_does_not_support_properties():
|
||||
cmd = _POD_UPDATE_CMD + ['any-pod', 'foo=bar']
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
@@ -123,74 +109,11 @@ def _assert_pod_list_json_subset(expected_json, actual_json):
|
||||
for expected_pod in expected_json:
|
||||
pod_id = expected_pod['id']
|
||||
actual_pod = actual_pods_by_id[pod_id]
|
||||
_assert_pod_spec_json(expected_pod['spec'], actual_pod['spec'])
|
||||
pod_spec_json(expected_pod['spec'], actual_pod['spec'])
|
||||
|
||||
assert len(actual_json) == len(expected_json)
|
||||
|
||||
|
||||
def _assert_pod_status_json(expected_pod_status, actual_pod_status):
|
||||
"""Checks that the "actual" pod status JSON matched the "expected" JSON.
|
||||
|
||||
The comparison only looks at specific fields that are present in the
|
||||
test data used by this module.
|
||||
|
||||
:param expected_pod_status: contains the baseline values for the comparison
|
||||
:type expected_pod_status: {}
|
||||
:param actual_pod_status: has its fields checked against expected's fields
|
||||
:type actual_pod_status: {}
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
assert actual_pod_status['id'] == expected_pod_status['id']
|
||||
assert actual_pod_status['status'] == expected_pod_status['status']
|
||||
assert len(actual_pod_status['instances']) == \
|
||||
len(expected_pod_status['instances'])
|
||||
|
||||
_assert_pod_spec_json(expected_pod_status['spec'],
|
||||
actual_pod_status['spec'])
|
||||
|
||||
expected_instance = expected_pod_status['instances'][0]
|
||||
expected_container_statuses = {container['name']: container['status']
|
||||
for container
|
||||
in expected_instance['containers']}
|
||||
|
||||
for actual_instance in actual_pod_status['instances']:
|
||||
assert actual_instance['status'] == expected_instance['status']
|
||||
|
||||
actual_container_statuses = {container['name']: container['status']
|
||||
for container
|
||||
in actual_instance['containers']}
|
||||
|
||||
assert actual_container_statuses == expected_container_statuses
|
||||
|
||||
|
||||
def _assert_pod_spec_json(expected_pod_spec, actual_pod_spec):
|
||||
"""Checks that the "actual" pod spec JSON matches the "expected" JSON.
|
||||
|
||||
The comparison only looks at specific fields that are present in the
|
||||
test data used by this module.
|
||||
|
||||
:param expected_pod_spec: contains the baseline values for the comparison
|
||||
:type expected_pod_spec: {}
|
||||
:param actual_pod_spec: has its fields checked against the expected fields
|
||||
:type actual_pod_spec: {}
|
||||
:rtype: None
|
||||
"""
|
||||
|
||||
expected_containers = expected_pod_spec['containers']
|
||||
actual_containers = actual_pod_spec['containers']
|
||||
actual_containers_by_name = {c['name']: c for c in actual_containers}
|
||||
|
||||
for expected_container in expected_containers:
|
||||
container_name = expected_container['name']
|
||||
actual_container = actual_containers_by_name[container_name]
|
||||
|
||||
for k, v in expected_container['resources'].items():
|
||||
assert actual_container['resources'][k] == v
|
||||
|
||||
assert len(actual_containers) == len(expected_containers)
|
||||
|
||||
|
||||
def _assert_pod_list_table():
|
||||
_wait_for_instances({'/double-pod': 2, '/good-pod': 1, '/winston': 1})
|
||||
returncode, stdout, stderr = exec_command(_POD_LIST_CMD)
|
||||
@@ -217,17 +140,6 @@ def _assert_pod_list_table():
|
||||
assert len(stdout_lines) == 11
|
||||
|
||||
|
||||
def _assert_pod_show(pod_id, expected_json):
|
||||
cmd = _POD_SHOW_CMD + [pod_id]
|
||||
returncode, stdout, stderr = exec_command(cmd)
|
||||
|
||||
assert returncode == 0
|
||||
assert stderr == b''
|
||||
|
||||
pod_status_json = json.loads(stdout.decode('utf-8'))
|
||||
_assert_pod_status_json(expected_json, pod_status_json)
|
||||
|
||||
|
||||
def _assert_pod_update_from_stdin(extra_args, pod_json_file_path):
|
||||
cmd = _POD_UPDATE_CMD + [GOOD_POD_ID] + extra_args
|
||||
with open(pod_json_file_path) as fd:
|
||||
|
||||
@@ -9,8 +9,8 @@ import dcos.util as util
|
||||
from dcos import mesos
|
||||
from dcos.util import create_schema
|
||||
|
||||
from .common import assert_command, assert_lines, exec_command, \
|
||||
fetch_valid_json, ssh_output
|
||||
from .helpers.common import (assert_command, assert_lines, exec_command,
|
||||
fetch_valid_json, ssh_output)
|
||||
from ..fixtures.node import slave_fixture
|
||||
|
||||
|
||||
|
||||
@@ -9,13 +9,14 @@ import six
|
||||
|
||||
from dcos import constants, subcommand
|
||||
|
||||
from .common import (assert_command, assert_lines, base64_to_dict,
|
||||
delete_zk_node, delete_zk_nodes, exec_command, file_json,
|
||||
get_services, package_install, package_uninstall,
|
||||
service_shutdown, setup_universe_server,
|
||||
teardown_universe_server, UNIVERSE_REPO,
|
||||
UNIVERSE_TEST_REPO, update_config, wait_for_service,
|
||||
watch_all_deployments)
|
||||
from .helpers.common import (assert_command, assert_lines, base64_to_dict,
|
||||
delete_zk_node, delete_zk_nodes, exec_command,
|
||||
file_json, update_config)
|
||||
from .helpers.marathon import watch_all_deployments
|
||||
from .helpers.package import (package_install, package_uninstall,
|
||||
setup_universe_server, teardown_universe_server,
|
||||
UNIVERSE_REPO, UNIVERSE_TEST_REPO)
|
||||
from .helpers.service import get_services, service_shutdown
|
||||
from ..common import file_bytes
|
||||
|
||||
|
||||
@@ -81,13 +82,6 @@ def test_repo_list():
|
||||
assert_command(['dcos', 'package', 'repo', 'list'], stdout=repo_list)
|
||||
|
||||
|
||||
def test_repo_list_json():
|
||||
repo_list = file_json(
|
||||
'tests/data/package/json/test_repo_list.json')
|
||||
assert_command(
|
||||
['dcos', 'package', 'repo', 'list', '--json'], stdout=repo_list)
|
||||
|
||||
|
||||
def test_repo_add():
|
||||
repo_list = bytes("test-universe: {}\nUniverse: {}\n".format(
|
||||
UNIVERSE_TEST_REPO, UNIVERSE_REPO), 'utf-8')
|
||||
@@ -148,19 +142,6 @@ def test_describe_nonexistent_version():
|
||||
returncode=1)
|
||||
|
||||
|
||||
def test_describe():
|
||||
stdout = file_json(
|
||||
'tests/data/package/json/test_describe_marathon.json')
|
||||
|
||||
returncode_, stdout_, stderr_ = exec_command(
|
||||
['dcos', 'package', 'describe', 'marathon'])
|
||||
|
||||
assert returncode_ == 0
|
||||
output = json.loads(stdout_.decode('utf-8'))
|
||||
assert output == json.loads(stdout.decode('utf-8'))
|
||||
assert stderr_ == b''
|
||||
|
||||
|
||||
def test_describe_cli():
|
||||
stdout = file_json(
|
||||
'tests/data/package/json/test_describe_cli_kafka.json')
|
||||
@@ -299,15 +280,6 @@ Please create a JSON file with the appropriate options, and pass the \
|
||||
stderr=stderr)
|
||||
|
||||
|
||||
def test_install(zk_znode):
|
||||
with _chronos_package():
|
||||
watch_all_deployments()
|
||||
wait_for_service('chronos')
|
||||
services = get_services(args=['--inactive'])
|
||||
assert len([service for service in services
|
||||
if service['name'] == 'chronos']) == 0
|
||||
|
||||
|
||||
def test_bad_install_marathon_msg():
|
||||
stdout = (b'A sample pre-installation message\n'
|
||||
b'Installing Marathon app for package [helloworld] version '
|
||||
|
||||
100
cli/tests/integrations/test_sanity.py
Normal file
100
cli/tests/integrations/test_sanity.py
Normal file
@@ -0,0 +1,100 @@
|
||||
import json
|
||||
|
||||
from .helpers.common import (assert_command, delete_zk_nodes, exec_command,
|
||||
file_json, file_json_ast)
|
||||
from .helpers.marathon import (app, group, pod, show_app, show_group,
|
||||
show_pod, start_app, watch_all_deployments)
|
||||
from .helpers.package import (package, setup_universe_server,
|
||||
teardown_universe_server)
|
||||
from .helpers.service import get_services, wait_for_service
|
||||
|
||||
|
||||
def setup_module(module):
|
||||
setup_universe_server()
|
||||
|
||||
|
||||
def teardown_module(module):
|
||||
teardown_universe_server()
|
||||
delete_zk_nodes()
|
||||
|
||||
|
||||
_ZERO_INSTANCE_APP = 'tests/data/marathon/apps/zero_instance_sleep.json'
|
||||
|
||||
|
||||
def test_add_app():
|
||||
app_id = 'zero-instance-app'
|
||||
with app(_ZERO_INSTANCE_APP, app_id):
|
||||
show_app('zero-instance-app')
|
||||
|
||||
|
||||
def test_restarting_app():
|
||||
app_id = 'zero-instance-app'
|
||||
with app(_ZERO_INSTANCE_APP, app_id):
|
||||
start_app(app_id, 3)
|
||||
watch_all_deployments()
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'marathon', 'app', 'restart', app_id])
|
||||
|
||||
assert returncode == 0
|
||||
assert stdout.decode().startswith('Created deployment ')
|
||||
assert stderr == b''
|
||||
|
||||
|
||||
def test_add_group():
|
||||
group_id = 'test-group'
|
||||
with group('tests/data/marathon/groups/good.json', group_id):
|
||||
show_group(group_id)
|
||||
|
||||
|
||||
def test_update_group():
|
||||
group_app = 'tests/data/marathon/groups/good.json'
|
||||
with group(group_app, 'test-group'):
|
||||
newapp = json.dumps([{"id": "appadded", "cmd": "sleep 0"}])
|
||||
appjson = "apps={}".format(newapp)
|
||||
returncode, stdout, stderr = exec_command(
|
||||
['dcos', 'marathon', 'group', 'update', 'test-group/sleep',
|
||||
appjson])
|
||||
|
||||
assert returncode == 0
|
||||
assert stdout.decode().startswith('Created deployment ')
|
||||
assert stderr == b''
|
||||
|
||||
watch_all_deployments()
|
||||
show_app('test-group/sleep/appadded')
|
||||
|
||||
|
||||
def test_add_pod():
|
||||
pod_id = 'good-pod'
|
||||
with pod('tests/data/marathon/pods/good.json', pod_id):
|
||||
expected = file_json_ast('tests/data/marathon/pods/good_status.json')
|
||||
show_pod(pod_id, expected)
|
||||
|
||||
|
||||
def test_repo_list():
|
||||
repo_list = file_json(
|
||||
'tests/data/package/json/test_repo_list.json')
|
||||
assert_command(
|
||||
['dcos', 'package', 'repo', 'list', '--json'], stdout=repo_list)
|
||||
|
||||
|
||||
def test_package_describe():
|
||||
stdout = file_json(
|
||||
'tests/data/package/json/test_describe_marathon.json')
|
||||
|
||||
returncode_, stdout_, stderr_ = exec_command(
|
||||
['dcos', 'package', 'describe', 'marathon'])
|
||||
|
||||
assert returncode_ == 0
|
||||
output = json.loads(stdout_.decode('utf-8'))
|
||||
assert output == json.loads(stdout.decode('utf-8'))
|
||||
assert stderr_ == b''
|
||||
|
||||
|
||||
def test_install():
|
||||
with package('chronos', deploy=True, args=[]):
|
||||
watch_all_deployments()
|
||||
wait_for_service('chronos')
|
||||
|
||||
services = get_services(args=['--inactive'])
|
||||
assert len([service for service in services
|
||||
if service['name'] == 'chronos']) == 0
|
||||
@@ -9,11 +9,12 @@ import pytest
|
||||
import dcos.util as util
|
||||
from dcos.util import create_schema
|
||||
|
||||
from .common import (assert_command, assert_lines, delete_zk_node,
|
||||
delete_zk_nodes, exec_command, get_services,
|
||||
package, package_install, remove_app, service_shutdown,
|
||||
setup_universe_server, ssh_output,
|
||||
teardown_universe_server, wait_for_service)
|
||||
from .helpers.common import (assert_command, assert_lines, delete_zk_node,
|
||||
delete_zk_nodes, exec_command, ssh_output)
|
||||
from .helpers.marathon import remove_app
|
||||
from .helpers.package import (package, package_install, setup_universe_server,
|
||||
teardown_universe_server)
|
||||
from .helpers.service import get_services, service_shutdown, wait_for_service
|
||||
from ..fixtures.service import framework_fixture
|
||||
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import pytest
|
||||
|
||||
from dcos import config, constants
|
||||
|
||||
from .common import config_set, exec_command, update_config
|
||||
from .helpers.common import config_set, exec_command, update_config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -11,8 +11,9 @@ import pytest
|
||||
import dcos.util as util
|
||||
from dcos.util import create_schema
|
||||
|
||||
from .common import (add_app, app, assert_command, assert_lines, exec_command,
|
||||
pod, remove_app, watch_all_deployments)
|
||||
from .helpers.common import assert_command, assert_lines, exec_command
|
||||
from .helpers.marathon import (add_app, app, pod, remove_app,
|
||||
watch_all_deployments)
|
||||
from ..fixtures.task import task_fixture
|
||||
|
||||
SLEEP_COMPLETED = 'tests/data/marathon/apps/sleep-completed.json'
|
||||
|
||||
Reference in New Issue
Block a user