Log exceptions when checking status

This change adds logging whenever a request has a status code of >= 300.
The check_status method will now create an SHA1 ID from the request URL
and log the status, reason, text, and headers (in debug) whenever a
request has a return code of >= 300. This will allow us to better debug
image processing issues, even when multiprocessing requests. Deployers
will now have the ability to track down issues to specific images and
endpoints.

Session creation was moved into a single object class to ensure we're
managing sessions in a uniform way.

Change-Id: If4e84a0273e295267248559c63ae994a5a826004
Signed-off-by: Kevin Carter <kecarter@redhat.com>
(cherry picked from commit e06cf4482d)
This commit is contained in:
Kevin Carter 2019-08-06 14:21:06 -05:00 committed by Kevin Carter
parent 42d9fdb0c7
commit 4e6995dc44
4 changed files with 288 additions and 107 deletions

View File

@ -17,6 +17,7 @@ import collections
import hashlib import hashlib
import json import json
import os import os
import requests
import shutil import shutil
from oslo_log import log as logging from oslo_log import log as logging
@ -54,7 +55,7 @@ def image_tag_from_url(image_url):
def export_stream(target_url, layer, layer_stream, verify_digest=True): def export_stream(target_url, layer, layer_stream, verify_digest=True):
image, tag = image_tag_from_url(target_url) image, _ = image_tag_from_url(target_url)
digest = layer['digest'] digest = layer['digest']
blob_dir_path = os.path.join(IMAGE_EXPORT_DIR, 'v2', image, 'blobs') blob_dir_path = os.path.join(IMAGE_EXPORT_DIR, 'v2', image, 'blobs')
make_dir(blob_dir_path) make_dir(blob_dir_path)
@ -64,37 +65,54 @@ def export_stream(target_url, layer, layer_stream, verify_digest=True):
length = 0 length = 0
calc_digest = hashlib.sha256() calc_digest = hashlib.sha256()
try: try:
with open(blob_path, 'w+b') as f: with open(blob_path, 'wb') as f:
for chunk in layer_stream: for chunk in layer_stream:
if not chunk: if not chunk:
break break
f.write(chunk) f.write(chunk)
calc_digest.update(chunk) calc_digest.update(chunk)
length += len(chunk) length += len(chunk)
layer_digest = 'sha256:%s' % calc_digest.hexdigest()
LOG.debug('Calculated layer digest: %s' % layer_digest)
if verify_digest:
if digest != layer_digest:
raise IOError('Expected digest %s '
'does not match calculated %s' %
(digest, layer_digest))
else:
# if the original layer is uncompressed
# the digest may change on export
expected_blob_path = os.path.join(
blob_dir_path, '%s.gz' % layer_digest)
if blob_path != expected_blob_path:
os.rename(blob_path, expected_blob_path)
except Exception as e: except Exception as e:
LOG.error('Error while writing blob %s' % blob_path) write_error = 'Write Failure: {}'.format(str(e))
# cleanup blob file LOG.error(write_error)
if os.path.isfile(blob_path): if os.path.isfile(blob_path):
os.remove(blob_path) os.remove(blob_path)
raise e LOG.error('Broken layer found and removed: %s' % blob_path)
raise IOError(write_error)
else:
LOG.info('Layer written successfully: %s' % blob_path)
layer_digest = 'sha256:%s' % calc_digest.hexdigest()
LOG.debug('Provided layer digest: %s' % digest)
LOG.debug('Calculated layer digest: %s' % layer_digest)
if verify_digest:
if digest != layer_digest:
hash_request_id = hashlib.sha1(str(target_url.geturl()).encode())
error_msg = (
'Image ID: %s, Expected digest "%s" does not match'
' calculated digest "%s", Blob path "%s". Blob'
' path will be cleaned up.' % (
hash_request_id.hexdigest(),
digest,
layer_digest,
blob_path
)
)
LOG.error(error_msg)
if os.path.isfile(blob_path):
os.remove(blob_path)
raise requests.exceptions.HTTPError(error_msg)
else:
# if the original layer is uncompressed
# the digest may change on export
expected_blob_path = os.path.join(
blob_dir_path, '%s.gz' % layer_digest
)
if blob_path != expected_blob_path:
os.rename(blob_path, expected_blob_path)
layer['digest'] = layer_digest layer['digest'] = layer_digest
layer['size'] = length layer['size'] = length

View File

@ -21,6 +21,7 @@ import netifaces
import os import os
import re import re
import requests import requests
from requests.adapters import HTTPAdapter
from requests import auth as requests_auth from requests import auth as requests_auth
import shutil import shutil
import six import six
@ -100,6 +101,38 @@ def get_undercloud_registry():
return '%s:%s' % (addr, '8787') return '%s:%s' % (addr, '8787')
class MakeSession(object):
"""Class method to uniformly create sessions.
Sessions created by this class will retry on errors with an exponential
backoff before raising an exception. Because our primary interaction is
with the container registries the adapter will also retry on 401 and
404. This is being done because registries commonly return 401 when an
image is not found, which is commonly a cache miss. See the adapter
definitions for more on retry details.
"""
def __init__(self, verify=True):
self.session = requests.Session()
self.session.verify = verify
adapter = HTTPAdapter(
max_retries=8,
pool_connections=24,
pool_maxsize=24,
pool_block=False
)
self.session.mount('http://', adapter)
self.session.mount('https://', adapter)
def create(self):
return self.__enter__()
def __enter__(self):
return self.session
def __exit__(self, *args, **kwargs):
self.session.close()
class ImageUploadManager(BaseImageManager): class ImageUploadManager(BaseImageManager):
"""Manage the uploading of image files """Manage the uploading of image files
@ -121,13 +154,15 @@ class ImageUploadManager(BaseImageManager):
self.cleanup = cleanup self.cleanup = cleanup
if mirrors: if mirrors:
for uploader in self.uploaders.values(): for uploader in self.uploaders.values():
uploader.mirrors.update(mirrors) if hasattr(uploader, 'mirrors'):
uploader.mirrors.update(mirrors)
if registry_credentials: if registry_credentials:
self.validate_registry_credentials(registry_credentials) self.validate_registry_credentials(registry_credentials)
for uploader in self.uploaders.values(): for uploader in self.uploaders.values():
uploader.registry_credentials = registry_credentials uploader.registry_credentials = registry_credentials
def validate_registry_credentials(self, creds_data): @staticmethod
def validate_registry_credentials(creds_data):
if not isinstance(creds_data, dict): if not isinstance(creds_data, dict):
raise TypeError('Credentials data must be a dict') raise TypeError('Credentials data must be a dict')
for registry, cred_entry in creds_data.items(): for registry, cred_entry in creds_data.items():
@ -158,7 +193,8 @@ class ImageUploadManager(BaseImageManager):
def get_uploader(self, uploader): def get_uploader(self, uploader):
return self.uploader(uploader) return self.uploader(uploader)
def get_push_destination(self, item): @staticmethod
def get_push_destination(item):
push_destination = item.get('push_destination') push_destination = item.get('push_destination')
if not push_destination: if not push_destination:
return get_undercloud_registry() return get_undercloud_registry()
@ -247,15 +283,15 @@ class BaseImageUploader(object):
def run_modify_playbook(cls, modify_role, modify_vars, def run_modify_playbook(cls, modify_role, modify_vars,
source_image, target_image, append_tag, source_image, target_image, append_tag,
container_build_tool='buildah'): container_build_tool='buildah'):
vars = {} run_vars = {}
if modify_vars: if modify_vars:
vars.update(modify_vars) run_vars.update(modify_vars)
vars['source_image'] = source_image run_vars['source_image'] = source_image
vars['target_image'] = target_image run_vars['target_image'] = target_image
vars['modified_append_tag'] = append_tag run_vars['modified_append_tag'] = append_tag
vars['container_build_tool'] = container_build_tool run_vars['container_build_tool'] = container_build_tool
LOG.info('Playbook variables: \n%s' % yaml.safe_dump( LOG.info('Playbook variables: \n%s' % yaml.safe_dump(
vars, default_flow_style=False)) run_vars, default_flow_style=False))
playbook = [{ playbook = [{
'hosts': 'localhost', 'hosts': 'localhost',
'tasks': [{ 'tasks': [{
@ -263,7 +299,7 @@ class BaseImageUploader(object):
'import_role': { 'import_role': {
'name': modify_role 'name': modify_role
}, },
'vars': vars 'vars': run_vars
}] }]
}] }]
LOG.info('Playbook: \n%s' % yaml.safe_dump( LOG.info('Playbook: \n%s' % yaml.safe_dump(
@ -341,11 +377,15 @@ class BaseImageUploader(object):
session=None): session=None):
netloc = image_url.netloc netloc = image_url.netloc
image, tag = self._image_tag_from_url(image_url) image, tag = self._image_tag_from_url(image_url)
self.is_insecure_registry(netloc) self.is_insecure_registry(registry_host=netloc)
url = self._build_url(image_url, path='/') url = self._build_url(image_url, path='/')
verify = (netloc not in self.no_verify_registries)
if not session:
session = MakeSession(verify=verify).create()
else:
session.headers.pop('Authorization', None)
session.verify = verify
session = requests.Session()
session.verify = (netloc not in self.no_verify_registries)
r = session.get(url, timeout=30) r = session.get(url, timeout=30)
LOG.debug('%s status code %s' % (url, r.status_code)) LOG.debug('%s status code %s' % (url, r.status_code))
if r.status_code == 200: if r.status_code == 200:
@ -367,12 +407,20 @@ class BaseImageUploader(object):
token_param['service'] = re.search( token_param['service'] = re.search(
'service="(.*?)"', www_auth).group(1) 'service="(.*?)"', www_auth).group(1)
token_param['scope'] = 'repository:%s:pull' % image[1:] token_param['scope'] = 'repository:%s:pull' % image[1:]
auth = None auth = None
if username: if username:
auth = requests_auth.HTTPBasicAuth(username, password) auth = requests_auth.HTTPBasicAuth(username, password)
LOG.debug('Token parameters: params {}'.format(token_param))
rauth = session.get(realm, params=token_param, auth=auth, timeout=30) rauth = session.get(realm, params=token_param, auth=auth, timeout=30)
rauth.raise_for_status() rauth.raise_for_status()
session.headers['Authorization'] = 'Bearer %s' % rauth.json()['token'] session.headers['Authorization'] = 'Bearer %s' % rauth.json()['token']
hash_request_id = hashlib.sha1(str(rauth.url).encode())
LOG.info(
'Session authenticated: id {}'.format(
hash_request_id.hexdigest()
)
)
setattr(session, 'reauthenticate', self.authenticate) setattr(session, 'reauthenticate', self.authenticate)
setattr( setattr(
session, session,
@ -387,14 +435,93 @@ class BaseImageUploader(object):
return session return session
@staticmethod @staticmethod
def check_status(session, request): def _get_response_text(response, encoding='utf-8', force_encoding=False):
if hasattr(session, 'reauthenticate'): """Return request response text
if request.status_code == 401:
session.reauthenticate(**session.auth_args) We need to set the encoding for the response other wise it
if hasattr(request, 'text'): will attempt to detect the encoding which is very time consuming.
raise requests.exceptions.HTTPError(request.text) See https://github.com/psf/requests/issues/4235 for additional
else: context.
raise SystemError()
:param: response: requests Respoinse object
:param: encoding: encoding to set if not currently set
:param: force_encoding: set response encoding always
"""
if force_encoding or not response.encoding:
response.encoding = encoding
return response.text
@staticmethod
def check_status(session, request, allow_reauth=True):
hash_request_id = hashlib.sha1(str(request.url).encode())
request_id = hash_request_id.hexdigest()
text = getattr(request, 'text', 'unknown')
reason = getattr(request, 'reason', 'unknown')
status_code = getattr(request, 'status_code', None)
headers = getattr(request, 'headers', {})
session_headers = getattr(session, 'headers', {})
if status_code >= 300:
LOG.info(
'Non-2xx: id {}, status {}, reason {}, text {}'.format(
request_id,
status_code,
reason,
text
)
)
if status_code == 401:
LOG.warning(
'Failure: id {}, status {}, reason {} text {}'.format(
request_id,
status_code,
reason,
text
)
)
LOG.debug(
'Request headers after 401: id {}, headers {}'.format(
request_id,
headers
)
)
LOG.debug(
'Session headers after 401: id {}, headers {}'.format(
request_id,
session_headers
)
)
www_auth = headers.get(
'www-authenticate',
headers.get(
'Www-Authenticate'
)
)
if www_auth:
error = None
if 'error=' in www_auth:
error = re.search('error="(.*?)"', www_auth).group(1)
LOG.warning(
'Error detected in auth headers: error {}'.format(
error
)
)
if error == 'invalid_token' and allow_reauth:
if hasattr(session, 'reauthenticate'):
reauth = int(session.headers.get('_TripleOReAuth', 0))
reauth += 1
session.headers['_TripleOReAuth'] = str(reauth)
LOG.warning(
'Re-authenticating: id {}, count {}'.format(
request_id,
reauth
)
)
session.reauthenticate(**session.auth_args)
request.raise_for_status() request.raise_for_status()
@classmethod @classmethod
@ -404,10 +531,10 @@ class BaseImageUploader(object):
mirror = cls.mirrors[netloc] mirror = cls.mirrors[netloc]
return '%sv2%s' % (mirror, path) return '%sv2%s' % (mirror, path)
else: else:
if netloc in cls.insecure_registries: if not cls.is_insecure_registry(registry_host=netloc):
scheme = 'http'
else:
scheme = 'https' scheme = 'https'
else:
scheme = 'http'
if netloc == 'docker.io': if netloc == 'docker.io':
netloc = 'registry-1.docker.io' netloc = 'registry-1.docker.io'
return '%s://%s/v2%s' % (scheme, netloc, path) return '%s://%s/v2%s' % (scheme, netloc, path)
@ -457,7 +584,7 @@ class BaseImageUploader(object):
tags_r = tags_f.result() tags_r = tags_f.result()
cls.check_status(session=session, request=tags_r) cls.check_status(session=session, request=tags_r)
manifest_str = manifest_r.text manifest_str = cls._get_response_text(manifest_r)
if 'Docker-Content-Digest' in manifest_r.headers: if 'Docker-Content-Digest' in manifest_r.headers:
digest = manifest_r.headers['Docker-Content-Digest'] digest = manifest_r.headers['Docker-Content-Digest']
@ -512,7 +639,7 @@ class BaseImageUploader(object):
} }
def list(self, registry, session=None): def list(self, registry, session=None):
self.is_insecure_registry(registry) self.is_insecure_registry(registry_host=registry)
url = self._image_to_url(registry) url = self._image_to_url(registry)
catalog_url = self._build_url( catalog_url = self._build_url(
url, CALL_CATALOG url, CALL_CATALOG
@ -525,7 +652,7 @@ class BaseImageUploader(object):
else: else:
raise ImageUploaderException( raise ImageUploaderException(
'Image registry made invalid response: %s' % 'Image registry made invalid response: %s' %
(catalog_resp.status_code) catalog_resp.status_code
) )
tags_get_args = [] tags_get_args = []
@ -589,7 +716,7 @@ class BaseImageUploader(object):
fallback_tag=None): fallback_tag=None):
labels = i.get('Labels', {}) labels = i.get('Labels', {})
if(hasattr(labels, 'keys')): if hasattr(labels, 'keys'):
label_keys = ', '.join(labels.keys()) label_keys = ', '.join(labels.keys())
else: else:
label_keys = "" label_keys = ""
@ -614,7 +741,7 @@ class BaseImageUploader(object):
) )
else: else:
tag_label = None tag_label = None
if(isinstance(labels, dict)): if isinstance(labels, dict):
tag_label = labels.get(tag_from_label) tag_label = labels.get(tag_from_label)
if tag_label is None: if tag_label is None:
if fallback_tag: if fallback_tag:
@ -639,7 +766,7 @@ class BaseImageUploader(object):
# prime self.insecure_registries by testing every image # prime self.insecure_registries by testing every image
for url in image_urls: for url in image_urls:
self.is_insecure_registry(url) self.is_insecure_registry(registry_host=url)
discover_args = [] discover_args = []
for image in images: for image in images:
@ -655,7 +782,7 @@ class BaseImageUploader(object):
def discover_image_tag(self, image, tag_from_label=None, def discover_image_tag(self, image, tag_from_label=None,
fallback_tag=None, username=None, password=None): fallback_tag=None, username=None, password=None):
image_url = self._image_to_url(image) image_url = self._image_to_url(image)
self.is_insecure_registry(image_url.netloc) self.is_insecure_registry(registry_host=image_url.netloc)
session = self.authenticate( session = self.authenticate(
image_url, username=username, password=password) image_url, username=username, password=password)
@ -668,7 +795,7 @@ class BaseImageUploader(object):
images_with_labels = [] images_with_labels = []
for image in images: for image in images:
url = self._image_to_url(image) url = self._image_to_url(image)
self.is_insecure_registry(url.netloc) self.is_insecure_registry(registry_host=url.netloc)
session = self.authenticate( session = self.authenticate(
url, username=username, password=password) url, username=username, password=password)
image_labels = self._image_labels( image_labels = self._image_labels(
@ -682,19 +809,23 @@ class BaseImageUploader(object):
# prime insecure_registries # prime insecure_registries
if task.pull_source: if task.pull_source:
self.is_insecure_registry( self.is_insecure_registry(
self._image_to_url(task.pull_source).netloc) registry_host=self._image_to_url(task.pull_source).netloc
)
else: else:
self.is_insecure_registry( self.is_insecure_registry(
self._image_to_url(task.image_name).netloc) registry_host=self._image_to_url(task.image_name).netloc
)
self.is_insecure_registry( self.is_insecure_registry(
self._image_to_url(task.push_destination).netloc) registry_host=self._image_to_url(task.push_destination).netloc
)
self.upload_tasks.append((self, task)) self.upload_tasks.append((self, task))
def is_insecure_registry(self, registry_host): @classmethod
if registry_host in self.secure_registries: def is_insecure_registry(cls, registry_host):
if registry_host in cls.secure_registries:
return False return False
if (registry_host in self.insecure_registries or if (registry_host in cls.insecure_registries or
registry_host in self.no_verify_registries): registry_host in cls.no_verify_registries):
return True return True
with requests.Session() as s: with requests.Session() as s:
try: try:
@ -705,7 +836,7 @@ class BaseImageUploader(object):
try: try:
s.get('https://%s/v2' % registry_host, timeout=30, s.get('https://%s/v2' % registry_host, timeout=30,
verify=False) verify=False)
self.no_verify_registries.add(registry_host) cls.no_verify_registries.add(registry_host)
# Techinically these type of registries are insecure when # Techinically these type of registries are insecure when
# the container engine tries to do a pull. The python # the container engine tries to do a pull. The python
# uploader ignores the certificate problem, but they are # uploader ignores the certificate problem, but they are
@ -714,14 +845,14 @@ class BaseImageUploader(object):
return True return True
except requests.exceptions.SSLError: except requests.exceptions.SSLError:
# So nope, it's really not a certificate verification issue # So nope, it's really not a certificate verification issue
self.insecure_registries.add(registry_host) cls.insecure_registries.add(registry_host)
return True return True
except Exception: except Exception:
# for any other error assume it is a secure registry, because: # for any other error assume it is a secure registry, because:
# - it is secure registry # - it is secure registry
# - the host is not accessible # - the host is not accessible
pass pass
self.secure_registries.add(registry_host) cls.secure_registries.add(registry_host)
return False return False
@classmethod @classmethod
@ -919,14 +1050,11 @@ class SkopeoImageUploader(BaseImageUploader):
# Pull a single image first, to avoid duplicate pulls of the # Pull a single image first, to avoid duplicate pulls of the
# same base layers # same base layers
uploader, first_task = self.upload_tasks.pop() local_images.extend(upload_task(args=self.upload_tasks.pop()))
result = uploader.upload_image(first_task)
local_images.extend(result)
# workers will be half the CPU count, to a minimum of 2 # workers will be half the CPU count, to a minimum of 2
workers = max(2, processutils.get_worker_count() // 2) workers = max(2, (processutils.get_worker_count() - 1))
p = futures.ThreadPoolExecutor(max_workers=workers) p = futures.ThreadPoolExecutor(max_workers=workers)
for result in p.map(upload_task, self.upload_tasks): for result in p.map(upload_task, self.upload_tasks):
local_images.extend(result) local_images.extend(result)
LOG.info('result %s' % local_images) LOG.info('result %s' % local_images)
@ -1004,7 +1132,11 @@ class PythonImageUploader(BaseImageUploader):
if not t.modify_role: if not t.modify_role:
LOG.warning('Completed upload for image %s' % t.image_name) LOG.warning('Completed upload for image %s' % t.image_name)
else: else:
# Copy ummodified from target to local LOG.info(
'Copy ummodified imagename: "{}" from target to local'.format(
t.image_name
)
)
self._copy_registry_to_local(t.target_image_source_tag_url) self._copy_registry_to_local(t.target_image_source_tag_url)
if t.cleanup in (CLEANUP_FULL, CLEANUP_PARTIAL): if t.cleanup in (CLEANUP_FULL, CLEANUP_PARTIAL):
@ -1056,7 +1188,7 @@ class PythonImageUploader(BaseImageUploader):
return False return False
# detect if the registry is push-capable by requesting an upload URL. # detect if the registry is push-capable by requesting an upload URL.
image, tag = cls._image_tag_from_url(image_url) image, _ = cls._image_tag_from_url(image_url)
upload_req_url = cls._build_url( upload_req_url = cls._build_url(
image_url, image_url,
path=CALL_UPLOAD % {'image': image}) path=CALL_UPLOAD % {'image': image})
@ -1091,7 +1223,7 @@ class PythonImageUploader(BaseImageUploader):
if r.status_code in (403, 404): if r.status_code in (403, 404):
raise ImageNotFoundException('Not found image: %s' % url) raise ImageNotFoundException('Not found image: %s' % url)
cls.check_status(session=session, request=r) cls.check_status(session=session, request=r)
return r.text return cls._get_response_text(r)
@classmethod @classmethod
@tenacity.retry( # Retry up to 5 times with jittered exponential backoff @tenacity.retry( # Retry up to 5 times with jittered exponential backoff
@ -1137,6 +1269,8 @@ class PythonImageUploader(BaseImageUploader):
chunk_size = 2 ** 20 chunk_size = 2 ** 20
with session.get( with session.get(
source_blob_url, stream=True, timeout=30) as blob_req: source_blob_url, stream=True, timeout=30) as blob_req:
# TODO(aschultz): unsure if necessary or if only when using .text
blob_req.encoding = 'utf-8'
cls.check_status(session=session, request=blob_req) cls.check_status(session=session, request=blob_req)
for data in blob_req.iter_content(chunk_size): for data in blob_req.iter_content(chunk_size):
if not data: if not data:
@ -1313,7 +1447,7 @@ class PythonImageUploader(BaseImageUploader):
} }
) )
if r.status_code == 400: if r.status_code == 400:
LOG.error(r.text) LOG.error(cls._get_response_text(r))
raise ImageUploaderException('Pushing manifest failed') raise ImageUploaderException('Pushing manifest failed')
cls.check_status(session=target_session, request=r) cls.check_status(session=target_session, request=r)
@ -1326,25 +1460,34 @@ class PythonImageUploader(BaseImageUploader):
def _copy_registry_to_local(cls, source_url): def _copy_registry_to_local(cls, source_url):
cls._assert_scheme(source_url, 'docker') cls._assert_scheme(source_url, 'docker')
pull_source = source_url.netloc + source_url.path pull_source = source_url.netloc + source_url.path
LOG.info('Pulling %s' % pull_source) cmd = ['buildah', '--debug', 'pull']
cmd = ['buildah', 'pull']
if source_url.netloc in [cls.insecure_registries, if source_url.netloc in [cls.insecure_registries,
cls.no_verify_registries]: cls.no_verify_registries]:
cmd.append('--tls-verify=false') cmd.append('--tls-verify=false')
cmd.append(pull_source) cmd.append(pull_source)
LOG.info('Pulling %s' % pull_source)
LOG.info('Running %s' % ' '.join(cmd)) LOG.info('Running %s' % ' '.join(cmd))
env = os.environ.copy() process = subprocess.Popen(
process = subprocess.Popen(cmd, env=env, stdout=subprocess.PIPE, cmd,
universal_newlines=True) stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
close_fds=True
)
out, err = process.communicate() out, err = process.communicate()
LOG.info(out)
if process.returncode != 0: if process.returncode != 0:
LOG.error('Error pulling image:\n%s\n%s' % error_msg = (
(' '.join(cmd), err)) 'Pulling image failed: cmd "{}", stdout "{}",'
raise ImageUploaderException('Pulling image failed') ' stderr "{}"'.format(
' '.join(cmd),
out,
err
)
)
LOG.error(error_msg)
raise ImageUploaderException(error_msg)
return out return out
@classmethod @classmethod
@ -1604,7 +1747,7 @@ class PythonImageUploader(BaseImageUploader):
config = json.loads(config_str) config = json.loads(config_str)
layers = [l['digest'] for l in manifest['layers']] layers = [l['digest'] for l in manifest['layers']]
i, tag = cls._image_tag_from_url(image_url) i, _ = cls._image_tag_from_url(image_url)
digest = image['digest'] digest = image['digest']
created = image['created'] created = image['created']
labels = config['config']['Labels'] labels = config['config']['Labels']
@ -1668,14 +1811,11 @@ class PythonImageUploader(BaseImageUploader):
# Pull a single image first, to avoid duplicate pulls of the # Pull a single image first, to avoid duplicate pulls of the
# same base layers # same base layers
uploader, first_task = self.upload_tasks.pop() local_images.extend(upload_task(args=self.upload_tasks.pop()))
result = uploader.upload_image(first_task)
local_images.extend(result)
# workers will be half the CPU count, to a minimum of 2 # workers will the CPU count minus 1, with a minimum of 2
workers = max(2, processutils.get_worker_count() // 2) workers = max(2, (processutils.get_worker_count() - 1))
p = futures.ThreadPoolExecutor(max_workers=workers) p = futures.ThreadPoolExecutor(max_workers=workers)
for result in p.map(upload_task, self.upload_tasks): for result in p.map(upload_task, self.upload_tasks):
local_images.extend(result) local_images.extend(result)
LOG.info('result %s' % local_images) LOG.info('result %s' % local_images)
@ -1721,7 +1861,8 @@ class UploadTask(object):
self.source_image_url = image_to_url(self.source_image) self.source_image_url = image_to_url(self.source_image)
self.target_image_url = image_to_url(self.target_image) self.target_image_url = image_to_url(self.target_image)
self.target_image_source_tag_url = image_to_url( self.target_image_source_tag_url = image_to_url(
self.target_image_source_tag) self.target_image_source_tag
)
def upload_task(args): def upload_task(args):

View File

@ -17,6 +17,7 @@ import hashlib
import io import io
import json import json
import os import os
import requests
import shutil import shutil
import six import six
from six.moves.urllib.parse import urlparse from six.moves.urllib.parse import urlparse
@ -116,7 +117,8 @@ class TestImageExport(base.TestCase):
} }
calc_digest = hashlib.sha256() calc_digest = hashlib.sha256()
layer_stream = io.BytesIO(blob_compressed) layer_stream = io.BytesIO(blob_compressed)
self.assertRaises(IOError, image_export.export_stream, self.assertRaises(requests.exceptions.HTTPError,
image_export.export_stream,
target_url, layer, layer_stream, target_url, layer, layer_stream,
verify_digest=True) verify_digest=True)
blob_dir = os.path.join(image_export.IMAGE_EXPORT_DIR, blob_dir = os.path.join(image_export.IMAGE_EXPORT_DIR,

View File

@ -53,6 +53,8 @@ class TestImageUploadManager(base.TestCase):
files.append('testfile') files.append('testfile')
self.filelist = files self.filelist = files
@mock.patch('tripleo_common.image.image_uploader.'
'BaseImageUploader.check_status')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
'PythonImageUploader._fetch_manifest') 'PythonImageUploader._fetch_manifest')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
@ -75,7 +77,8 @@ class TestImageUploadManager(base.TestCase):
'get_undercloud_registry', return_value='192.0.2.0:8787') 'get_undercloud_registry', return_value='192.0.2.0:8787')
def test_file_parsing(self, mock_gur, mockioctl, mockpath, def test_file_parsing(self, mock_gur, mockioctl, mockpath,
mock_images_match, mock_is_insecure, mock_inspect, mock_images_match, mock_is_insecure, mock_inspect,
mock_auth, mock_copy, mock_manifest): mock_auth, mock_copy, mock_manifest,
check_status):
mock_manifest.return_value = '{"layers": []}' mock_manifest.return_value = '{"layers": []}'
mock_inspect.return_value = {} mock_inspect.return_value = {}
@ -527,7 +530,7 @@ class TestBaseImageUploader(base.TestCase):
insecure_reg.add('registry-1.docker.io') insecure_reg.add('registry-1.docker.io')
secure_reg.add('192.0.2.1:8787') secure_reg.add('192.0.2.1:8787')
self.assertEqual( self.assertEqual(
'http://registry-1.docker.io/v2/t/nova-api/manifests/latest', 'https://registry-1.docker.io/v2/t/nova-api/manifests/latest',
build(url2, '/t/nova-api/manifests/latest') build(url2, '/t/nova-api/manifests/latest')
) )
self.assertEqual( self.assertEqual(
@ -870,6 +873,8 @@ class TestSkopeoImageUploader(base.TestCase):
self.uploader._copy.retry.sleep = mock.Mock() self.uploader._copy.retry.sleep = mock.Mock()
self.uploader._inspect.retry.sleep = mock.Mock() self.uploader._inspect.retry.sleep = mock.Mock()
@mock.patch('tripleo_common.image.image_uploader.'
'BaseImageUploader.check_status')
@mock.patch('os.environ') @mock.patch('os.environ')
@mock.patch('subprocess.Popen') @mock.patch('subprocess.Popen')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
@ -877,7 +882,7 @@ class TestSkopeoImageUploader(base.TestCase):
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
'BaseImageUploader.authenticate') 'BaseImageUploader.authenticate')
def test_upload_image(self, mock_auth, mock_inspect, def test_upload_image(self, mock_auth, mock_inspect,
mock_popen, mock_environ): mock_popen, mock_environ, check_status):
mock_process = mock.Mock() mock_process = mock.Mock()
mock_process.communicate.return_value = ('copy complete', '') mock_process.communicate.return_value = ('copy complete', '')
mock_process.returncode = 0 mock_process.returncode = 0
@ -1145,10 +1150,11 @@ class TestPythonImageUploader(base.TestCase):
u._copy_layer_local_to_registry.retry.sleep = mock.Mock() u._copy_layer_local_to_registry.retry.sleep = mock.Mock()
u._copy_layer_registry_to_registry.retry.sleep = mock.Mock() u._copy_layer_registry_to_registry.retry.sleep = mock.Mock()
u._copy_registry_to_registry.retry.sleep = mock.Mock() u._copy_registry_to_registry.retry.sleep = mock.Mock()
u._copy_registry_to_local.retry.sleep = mock.Mock()
u._copy_local_to_registry.retry.sleep = mock.Mock() u._copy_local_to_registry.retry.sleep = mock.Mock()
self.requests = self.useFixture(rm_fixture.Fixture()) self.requests = self.useFixture(rm_fixture.Fixture())
@mock.patch('tripleo_common.image.image_uploader.'
'BaseImageUploader.check_status')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
'PythonImageUploader.authenticate') 'PythonImageUploader.authenticate')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
@ -1159,7 +1165,7 @@ class TestPythonImageUploader(base.TestCase):
'PythonImageUploader._copy_registry_to_registry') 'PythonImageUploader._copy_registry_to_registry')
def test_upload_image( def test_upload_image(
self, _copy_registry_to_registry, _cross_repo_mount, self, _copy_registry_to_registry, _cross_repo_mount,
_fetch_manifest, authenticate): _fetch_manifest, authenticate, check_status):
target_session = mock.Mock() target_session = mock.Mock()
source_session = mock.Mock() source_session = mock.Mock()
@ -1237,6 +1243,8 @@ class TestPythonImageUploader(base.TestCase):
target_session=target_session target_session=target_session
) )
@mock.patch('tripleo_common.image.image_uploader.'
'BaseImageUploader.check_status')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
'PythonImageUploader.authenticate') 'PythonImageUploader.authenticate')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
@ -1247,7 +1255,7 @@ class TestPythonImageUploader(base.TestCase):
'PythonImageUploader._copy_registry_to_registry') 'PythonImageUploader._copy_registry_to_registry')
def test_authenticate_upload_image( def test_authenticate_upload_image(
self, _copy_registry_to_registry, _cross_repo_mount, self, _copy_registry_to_registry, _cross_repo_mount,
_fetch_manifest, authenticate): _fetch_manifest, authenticate, check_status):
self.uploader.registry_credentials = { self.uploader.registry_credentials = {
'docker.io': {'my_username': 'my_password'}, 'docker.io': {'my_username': 'my_password'},
@ -1308,6 +1316,8 @@ class TestPythonImageUploader(base.TestCase):
), ),
]) ])
@mock.patch('tripleo_common.image.image_uploader.'
'BaseImageUploader.check_status')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
'PythonImageUploader.authenticate') 'PythonImageUploader.authenticate')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
@ -1318,7 +1328,7 @@ class TestPythonImageUploader(base.TestCase):
'PythonImageUploader._copy_registry_to_registry') 'PythonImageUploader._copy_registry_to_registry')
def test_insecure_registry( def test_insecure_registry(
self, _copy_registry_to_registry, _cross_repo_mount, self, _copy_registry_to_registry, _cross_repo_mount,
_fetch_manifest, authenticate): _fetch_manifest, authenticate, check_status):
target_session = mock.Mock() target_session = mock.Mock()
source_session = mock.Mock() source_session = mock.Mock()
authenticate.side_effect = [ authenticate.side_effect = [
@ -1374,6 +1384,8 @@ class TestPythonImageUploader(base.TestCase):
), ),
]) ])
@mock.patch('tripleo_common.image.image_uploader.'
'BaseImageUploader.check_status')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
'PythonImageUploader.authenticate') 'PythonImageUploader.authenticate')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
@ -1384,7 +1396,7 @@ class TestPythonImageUploader(base.TestCase):
'PythonImageUploader._copy_registry_to_registry') 'PythonImageUploader._copy_registry_to_registry')
def test_upload_image_v1_manifest( def test_upload_image_v1_manifest(
self, _copy_registry_to_registry, _cross_repo_mount, self, _copy_registry_to_registry, _cross_repo_mount,
_fetch_manifest, authenticate): _fetch_manifest, authenticate, check_status):
target_session = mock.Mock() target_session = mock.Mock()
source_session = mock.Mock() source_session = mock.Mock()
@ -1460,6 +1472,8 @@ class TestPythonImageUploader(base.TestCase):
target_session=target_session target_session=target_session
) )
@mock.patch('tripleo_common.image.image_uploader.'
'BaseImageUploader.check_status')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
'PythonImageUploader.authenticate') 'PythonImageUploader.authenticate')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
@ -1479,7 +1493,8 @@ class TestPythonImageUploader(base.TestCase):
def test_upload_image_modify( def test_upload_image_modify(
self, _copy_local_to_registry, run_modify_playbook, self, _copy_local_to_registry, run_modify_playbook,
_copy_registry_to_local, _copy_registry_to_registry, _copy_registry_to_local, _copy_registry_to_registry,
_cross_repo_mount, _fetch_manifest, _image_exists, authenticate): _cross_repo_mount, _fetch_manifest, _image_exists, authenticate,
check_status):
_image_exists.return_value = False _image_exists.return_value = False
target_session = mock.Mock() target_session = mock.Mock()
@ -1599,7 +1614,9 @@ class TestPythonImageUploader(base.TestCase):
session=target_session session=target_session
) )
def test_fetch_manifest(self): @mock.patch('tripleo_common.image.image_uploader.'
'BaseImageUploader.check_status')
def test_fetch_manifest(self, check_status):
url = urlparse('docker://docker.io/t/nova-api:tripleo-current') url = urlparse('docker://docker.io/t/nova-api:tripleo-current')
manifest = '{"layers": []}' manifest = '{"layers": []}'
session = mock.Mock() session = mock.Mock()
@ -1619,7 +1636,9 @@ class TestPythonImageUploader(base.TestCase):
} }
) )
def test_upload_url(self): @mock.patch('tripleo_common.image.image_uploader.'
'BaseImageUploader.check_status')
def test_upload_url(self, check_status):
# test with previous request # test with previous request
previous_request = mock.Mock() previous_request = mock.Mock()
previous_request.headers = { previous_request.headers = {
@ -1740,12 +1759,15 @@ class TestPythonImageUploader(base.TestCase):
'docker' 'docker'
) )
@mock.patch('tripleo_common.image.image_uploader.'
'BaseImageUploader.check_status')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
'PythonImageUploader._upload_url') 'PythonImageUploader._upload_url')
@mock.patch('tripleo_common.image.image_uploader.' @mock.patch('tripleo_common.image.image_uploader.'
'PythonImageUploader.' 'PythonImageUploader.'
'_copy_layer_registry_to_registry') '_copy_layer_registry_to_registry')
def test_copy_registry_to_registry(self, _copy_layer, _upload_url): def test_copy_registry_to_registry(self, _copy_layer, _upload_url,
check_status):
source_url = urlparse('docker://docker.io/t/nova-api:latest') source_url = urlparse('docker://docker.io/t/nova-api:latest')
target_url = urlparse('docker://192.168.2.1:5000/t/nova-api:latest') target_url = urlparse('docker://192.168.2.1:5000/t/nova-api:latest')
_upload_url.return_value = 'https://192.168.2.1:5000/v2/upload' _upload_url.return_value = 'https://192.168.2.1:5000/v2/upload'
@ -1806,7 +1828,6 @@ class TestPythonImageUploader(base.TestCase):
params={'digest': 'sha256:1234'}, params={'digest': 'sha256:1234'},
timeout=30 timeout=30
), ),
mock.call().raise_for_status(),
mock.call( mock.call(
'https://192.168.2.1:5000/v2/t/nova-api/manifests/latest', 'https://192.168.2.1:5000/v2/t/nova-api/manifests/latest',
data=mock.ANY, data=mock.ANY,
@ -1816,7 +1837,6 @@ class TestPythonImageUploader(base.TestCase):
}, },
timeout=30 timeout=30
), ),
mock.call().raise_for_status(),
]) ])
put_manifest = json.loads( put_manifest = json.loads(
target_session.put.call_args[1]['data'].decode('utf-8') target_session.put.call_args[1]['data'].decode('utf-8')