Kolla provides production-ready containers and deployment tools for operating OpenStack clouds
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

build.py 43KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149
  1. #!/usr/bin/env python
  2. # Licensed under the Apache License, Version 2.0 (the "License");
  3. # you may not use this file except in compliance with the License.
  4. # You may obtain a copy of the License at
  5. #
  6. # http://www.apache.org/licenses/LICENSE-2.0
  7. #
  8. # Unless required by applicable law or agreed to in writing, software
  9. # distributed under the License is distributed on an "AS IS" BASIS,
  10. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  11. # See the License for the specific language governing permissions and
  12. # limitations under the License.
  13. from __future__ import print_function
  14. import contextlib
  15. import datetime
  16. import errno
  17. import json
  18. import logging
  19. import os
  20. import re
  21. import requests
  22. import shutil
  23. import sys
  24. import tarfile
  25. import tempfile
  26. import threading
  27. import time
  28. import docker
  29. import git
  30. import jinja2
  31. from oslo_config import cfg
  32. from requests import exceptions as requests_exc
  33. import six
  34. # NOTE(SamYaple): Update the search path to prefer PROJECT_ROOT as the source
  35. # of packages to import if we are using local tools instead of
  36. # pip installed kolla tools
  37. PROJECT_ROOT = os.path.abspath(os.path.join(
  38. os.path.dirname(os.path.realpath(__file__)), '../..'))
  39. if PROJECT_ROOT not in sys.path:
  40. sys.path.insert(0, PROJECT_ROOT)
  41. from kolla.common import config as common_config
  42. from kolla.common import task
  43. from kolla import exception
  44. from kolla.template import filters as jinja_filters
  45. from kolla.template import methods as jinja_methods
  46. from kolla import version
  47. def make_a_logger(conf=None, image_name=None):
  48. if image_name:
  49. log = logging.getLogger(".".join([__name__, image_name]))
  50. else:
  51. log = logging.getLogger(__name__)
  52. if not log.handlers:
  53. if conf is None or not conf.logs_dir or not image_name:
  54. handler = logging.StreamHandler(sys.stderr)
  55. log.propagate = False
  56. else:
  57. filename = os.path.join(conf.logs_dir, "%s.log" % image_name)
  58. handler = logging.FileHandler(filename, delay=True)
  59. handler.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
  60. log.addHandler(handler)
  61. if conf is not None and conf.debug:
  62. log.setLevel(logging.DEBUG)
  63. else:
  64. log.setLevel(logging.INFO)
  65. return log
  66. LOG = make_a_logger()
  67. # Image status constants.
  68. #
  69. # TODO(harlowja): use enum lib in the future??
  70. STATUS_CONNECTION_ERROR = 'connection_error'
  71. STATUS_PUSH_ERROR = 'push_error'
  72. STATUS_ERROR = 'error'
  73. STATUS_PARENT_ERROR = 'parent_error'
  74. STATUS_BUILT = 'built'
  75. STATUS_BUILDING = 'building'
  76. STATUS_UNMATCHED = 'unmatched'
  77. STATUS_MATCHED = 'matched'
  78. STATUS_UNPROCESSED = 'unprocessed'
  79. STATUS_SKIPPED = 'skipped'
  80. # All error status constants.
  81. STATUS_ERRORS = (STATUS_CONNECTION_ERROR, STATUS_PUSH_ERROR,
  82. STATUS_ERROR, STATUS_PARENT_ERROR)
  83. class ArchivingError(Exception):
  84. pass
  85. @contextlib.contextmanager
  86. def join_many(threads):
  87. try:
  88. yield
  89. for t in threads:
  90. t.join()
  91. except KeyboardInterrupt:
  92. try:
  93. LOG.info('Waiting for daemon threads exit. Push Ctrl + c again to'
  94. ' force exit')
  95. for t in threads:
  96. if t.is_alive():
  97. LOG.debug('Waiting thread %s to exit', t.name)
  98. # NOTE(Jeffrey4l): Python Bug: When join without timeout,
  99. # KeyboardInterrupt is never sent.
  100. t.join(0xffff)
  101. LOG.debug('Thread %s exits', t.name)
  102. except KeyboardInterrupt:
  103. LOG.warning('Force exits')
  104. class DockerTask(task.Task):
  105. docker_kwargs = docker.utils.kwargs_from_env()
  106. def __init__(self):
  107. super(DockerTask, self).__init__()
  108. self._dc = None
  109. @property
  110. def dc(self):
  111. if self._dc is not None:
  112. return self._dc
  113. docker_kwargs = self.docker_kwargs.copy()
  114. self._dc = docker.Client(version='auto', **docker_kwargs)
  115. return self._dc
  116. class Image(object):
  117. def __init__(self, name, canonical_name, path, parent_name='',
  118. status=STATUS_UNPROCESSED, parent=None,
  119. source=None, logger=None):
  120. self.name = name
  121. self.canonical_name = canonical_name
  122. self.path = path
  123. self.status = status
  124. self.parent = parent
  125. self.source = source
  126. self.parent_name = parent_name
  127. if logger is None:
  128. logger = make_a_logger(image_name=name)
  129. self.logger = logger
  130. self.children = []
  131. self.plugins = []
  132. self.additions = []
  133. def copy(self):
  134. c = Image(self.name, self.canonical_name, self.path,
  135. logger=self.logger, parent_name=self.parent_name,
  136. status=self.status, parent=self.parent)
  137. if self.source:
  138. c.source = self.source.copy()
  139. if self.children:
  140. c.children = list(self.children)
  141. if self.plugins:
  142. c.plugins = list(self.plugins)
  143. if self.additions:
  144. c.additions = list(self.additions)
  145. return c
  146. def __repr__(self):
  147. return ("Image(%s, %s, %s, parent_name=%s,"
  148. " status=%s, parent=%s, source=%s)") % (
  149. self.name, self.canonical_name, self.path,
  150. self.parent_name, self.status, self.parent, self.source)
  151. class PushIntoQueueTask(task.Task):
  152. """Task that pushes some other task into a queue."""
  153. def __init__(self, push_task, push_queue):
  154. super(PushIntoQueueTask, self).__init__()
  155. self.push_task = push_task
  156. self.push_queue = push_queue
  157. @property
  158. def name(self):
  159. return 'PushIntoQueueTask(%s=>%s)' % (self.push_task.name,
  160. self.push_queue)
  161. def run(self):
  162. self.push_queue.put(self.push_task)
  163. self.success = True
  164. class PushTask(DockerTask):
  165. """Task that pushes an image to a docker repository."""
  166. def __init__(self, conf, image):
  167. super(PushTask, self).__init__()
  168. self.conf = conf
  169. self.image = image
  170. self.logger = image.logger
  171. @property
  172. def name(self):
  173. return 'PushTask(%s)' % self.image.name
  174. def run(self):
  175. image = self.image
  176. self.logger.info('Trying to push the image')
  177. try:
  178. self.push_image(image)
  179. except requests_exc.ConnectionError:
  180. self.logger.exception('Make sure Docker is running and that you'
  181. ' have the correct privileges to run Docker'
  182. ' (root)')
  183. image.status = STATUS_CONNECTION_ERROR
  184. except Exception:
  185. self.logger.exception('Unknown error when pushing')
  186. image.status = STATUS_PUSH_ERROR
  187. finally:
  188. if (image.status not in STATUS_ERRORS
  189. and image.status != STATUS_UNPROCESSED):
  190. self.logger.info('Pushed successfully')
  191. self.success = True
  192. else:
  193. self.success = False
  194. def push_image(self, image):
  195. for response in self.dc.push(image.canonical_name,
  196. stream=True,
  197. insecure_registry=True):
  198. stream = json.loads(response)
  199. if 'stream' in stream:
  200. self.logger.info(stream['stream'])
  201. elif 'errorDetail' in stream:
  202. image.status = STATUS_ERROR
  203. self.logger.error(stream['errorDetail']['message'])
  204. class BuildTask(DockerTask):
  205. """Task that builds out an image."""
  206. def __init__(self, conf, image, push_queue):
  207. super(BuildTask, self).__init__()
  208. self.conf = conf
  209. self.image = image
  210. self.push_queue = push_queue
  211. self.nocache = not conf.cache
  212. self.forcerm = not conf.keep
  213. self.logger = image.logger
  214. @property
  215. def name(self):
  216. return 'BuildTask(%s)' % self.image.name
  217. def run(self):
  218. self.builder(self.image)
  219. if self.image.status in (STATUS_BUILT, STATUS_SKIPPED):
  220. self.success = True
  221. @property
  222. def followups(self):
  223. followups = []
  224. if self.conf.push and self.success:
  225. followups.extend([
  226. # If we are supposed to push the image into a docker
  227. # repository, then make sure we do that...
  228. PushIntoQueueTask(
  229. PushTask(self.conf, self.image),
  230. self.push_queue),
  231. ])
  232. if self.image.children and self.success:
  233. for image in self.image.children:
  234. if image.status == STATUS_UNMATCHED:
  235. continue
  236. followups.append(BuildTask(self.conf, image, self.push_queue))
  237. return followups
  238. def process_source(self, image, source):
  239. dest_archive = os.path.join(image.path, source['name'] + '-archive')
  240. if source.get('type') == 'url':
  241. self.logger.debug("Getting archive from %s", source['source'])
  242. try:
  243. r = requests.get(source['source'], timeout=self.conf.timeout)
  244. except requests_exc.Timeout:
  245. self.logger.exception(
  246. 'Request timed out while getting archive from %s',
  247. source['source'])
  248. image.status = STATUS_ERROR
  249. return
  250. if r.status_code == 200:
  251. with open(dest_archive, 'wb') as f:
  252. f.write(r.content)
  253. else:
  254. self.logger.error(
  255. 'Failed to download archive: status_code %s',
  256. r.status_code)
  257. image.status = STATUS_ERROR
  258. return
  259. elif source.get('type') == 'git':
  260. clone_dir = '{}-{}'.format(dest_archive,
  261. source['reference'].replace('/', '-'))
  262. try:
  263. self.logger.debug("Cloning from %s", source['source'])
  264. git.Git().clone(source['source'], clone_dir)
  265. git.Git(clone_dir).checkout(source['reference'])
  266. reference_sha = git.Git(clone_dir).rev_parse('HEAD')
  267. self.logger.debug("Git checkout by reference %s (%s)",
  268. source['reference'], reference_sha)
  269. except Exception as e:
  270. self.logger.error("Failed to get source from git", image.name)
  271. self.logger.error("Error: %s", e)
  272. # clean-up clone folder to retry
  273. shutil.rmtree(clone_dir)
  274. image.status = STATUS_ERROR
  275. return
  276. with tarfile.open(dest_archive, 'w') as tar:
  277. tar.add(clone_dir, arcname=os.path.basename(clone_dir))
  278. elif source.get('type') == 'local':
  279. self.logger.debug("Getting local archive from %s",
  280. source['source'])
  281. if os.path.isdir(source['source']):
  282. with tarfile.open(dest_archive, 'w') as tar:
  283. tar.add(source['source'],
  284. arcname=os.path.basename(source['source']))
  285. else:
  286. shutil.copyfile(source['source'], dest_archive)
  287. else:
  288. self.logger.error("Wrong source type '%s'", source.get('type'))
  289. image.status = STATUS_ERROR
  290. return
  291. # Set time on destination archive to epoch 0
  292. os.utime(dest_archive, (0, 0))
  293. return dest_archive
  294. def update_buildargs(self):
  295. buildargs = dict()
  296. if self.conf.build_args:
  297. buildargs = dict(self.conf.build_args)
  298. proxy_vars = ('HTTP_PROXY', 'http_proxy', 'HTTPS_PROXY',
  299. 'https_proxy', 'FTP_PROXY', 'ftp_proxy',
  300. 'NO_PROXY', 'no_proxy')
  301. for proxy_var in proxy_vars:
  302. if proxy_var in os.environ and proxy_var not in buildargs:
  303. buildargs[proxy_var] = os.environ.get(proxy_var)
  304. if not buildargs:
  305. return None
  306. return buildargs
  307. def builder(self, image):
  308. def make_an_archive(items, arcname, item_child_path=None):
  309. if not item_child_path:
  310. item_child_path = arcname
  311. archives = list()
  312. items_path = os.path.join(image.path, item_child_path)
  313. for item in items:
  314. archive_path = self.process_source(image, item)
  315. if image.status in STATUS_ERRORS:
  316. raise ArchivingError
  317. archives.append(archive_path)
  318. if archives:
  319. for archive in archives:
  320. with tarfile.open(archive, 'r') as archive_tar:
  321. archive_tar.extractall(path=items_path)
  322. else:
  323. try:
  324. os.mkdir(items_path)
  325. except OSError as e:
  326. if e.errno == errno.EEXIST:
  327. self.logger.info(
  328. 'Directory %s already exist. Skipping.',
  329. items_path)
  330. else:
  331. self.logger.error('Failed to create directory %s: %s',
  332. items_path, e)
  333. image.status = STATUS_CONNECTION_ERROR
  334. raise ArchivingError
  335. arc_path = os.path.join(image.path, '%s-archive' % arcname)
  336. with tarfile.open(arc_path, 'w') as tar:
  337. tar.add(items_path, arcname=arcname)
  338. return len(os.listdir(items_path))
  339. self.logger.debug('Processing')
  340. if image.status == STATUS_SKIPPED:
  341. self.logger.info('Skipping %s (--skip-parents)' % image.name)
  342. return
  343. if image.status == STATUS_UNMATCHED:
  344. return
  345. if (image.parent is not None and
  346. image.parent.status in STATUS_ERRORS):
  347. self.logger.error('Parent image error\'d with message "%s"',
  348. image.parent.status)
  349. image.status = STATUS_PARENT_ERROR
  350. return
  351. image.status = STATUS_BUILDING
  352. self.logger.info('Building')
  353. if image.source and 'source' in image.source:
  354. self.process_source(image, image.source)
  355. if image.status in STATUS_ERRORS:
  356. return
  357. try:
  358. plugins_am = make_an_archive(image.plugins, 'plugins')
  359. except ArchivingError:
  360. self.logger.error(
  361. "Failed turning any plugins into a plugins archive")
  362. return
  363. else:
  364. self.logger.debug(
  365. "Turned %s plugins into plugins archive",
  366. plugins_am)
  367. try:
  368. additions_am = make_an_archive(image.additions, 'additions')
  369. except ArchivingError:
  370. self.logger.error(
  371. "Failed turning any additions into a additions archive")
  372. return
  373. else:
  374. self.logger.debug(
  375. "Turned %s additions into additions archive",
  376. additions_am)
  377. # Pull the latest image for the base distro only
  378. pull = self.conf.pull if image.parent is None else False
  379. buildargs = self.update_buildargs()
  380. try:
  381. for response in self.dc.build(path=image.path,
  382. tag=image.canonical_name,
  383. nocache=not self.conf.cache,
  384. rm=True,
  385. pull=pull,
  386. forcerm=self.forcerm,
  387. buildargs=buildargs):
  388. stream = json.loads(response.decode('utf-8'))
  389. if 'stream' in stream:
  390. for line in stream['stream'].split('\n'):
  391. if line:
  392. self.logger.info('%s', line)
  393. if 'errorDetail' in stream:
  394. image.status = STATUS_ERROR
  395. self.logger.error('Error\'d with the following message')
  396. for line in stream['errorDetail']['message'].split('\n'):
  397. if line:
  398. self.logger.error('%s', line)
  399. return
  400. except docker.errors.DockerException:
  401. image.status = STATUS_ERROR
  402. self.logger.exception('Unknown docker error when building')
  403. except Exception:
  404. image.status = STATUS_ERROR
  405. self.logger.exception('Unknown error when building')
  406. else:
  407. image.status = STATUS_BUILT
  408. self.logger.info('Built')
  409. class WorkerThread(threading.Thread):
  410. """Thread that executes tasks until the queue provides a tombstone."""
  411. #: Object to be put on worker queues to get them to die.
  412. tombstone = object()
  413. def __init__(self, conf, queue):
  414. super(WorkerThread, self).__init__()
  415. self.queue = queue
  416. self.conf = conf
  417. self.should_stop = False
  418. def run(self):
  419. while not self.should_stop:
  420. task = self.queue.get()
  421. if task is self.tombstone:
  422. # Ensure any other threads also get the tombstone.
  423. self.queue.put(task)
  424. break
  425. try:
  426. for attempt in six.moves.range(self.conf.retries + 1):
  427. if self.should_stop:
  428. break
  429. LOG.info("Attempt number: %s to run task: %s ",
  430. attempt + 1, task.name)
  431. try:
  432. task.run()
  433. if task.success:
  434. break
  435. except Exception:
  436. LOG.exception('Unhandled error when running %s',
  437. task.name)
  438. # try again...
  439. task.reset()
  440. if task.success and not self.should_stop:
  441. for next_task in task.followups:
  442. LOG.info('Added next task %s to queue',
  443. next_task.name)
  444. self.queue.put(next_task)
  445. finally:
  446. self.queue.task_done()
  447. class KollaWorker(object):
  448. def __init__(self, conf):
  449. self.conf = conf
  450. self.images_dir = self._get_images_dir()
  451. self.registry = conf.registry
  452. if self.registry:
  453. self.namespace = self.registry + '/' + conf.namespace
  454. else:
  455. self.namespace = conf.namespace
  456. self.base = conf.base
  457. self.base_tag = conf.base_tag
  458. self.install_type = conf.install_type
  459. self.tag = conf.tag
  460. self.base_arch = conf.base_arch
  461. self.images = list()
  462. rpm_setup_config = ([repo_file for repo_file in
  463. conf.rpm_setup_config if repo_file is not None])
  464. self.rpm_setup = self.build_rpm_setup(rpm_setup_config)
  465. rh_base = ['centos', 'oraclelinux', 'rhel']
  466. rh_type = ['source', 'binary', 'rdo', 'rhos']
  467. deb_base = ['ubuntu', 'debian']
  468. deb_type = ['source', 'binary']
  469. if not ((self.base in rh_base and self.install_type in rh_type) or
  470. (self.base in deb_base and self.install_type in deb_type)):
  471. raise exception.KollaMismatchBaseTypeException(
  472. '{} is unavailable for {}'.format(self.install_type, self.base)
  473. )
  474. if self.install_type == 'binary':
  475. self.install_metatype = 'rdo'
  476. elif self.install_type == 'source':
  477. self.install_metatype = 'mixed'
  478. elif self.install_type == 'rdo':
  479. self.install_type = 'binary'
  480. self.install_metatype = 'rdo'
  481. elif self.install_type == 'rhos':
  482. self.install_type = 'binary'
  483. self.install_metatype = 'rhos'
  484. else:
  485. raise exception.KollaUnknownBuildTypeException(
  486. 'Unknown install type'
  487. )
  488. self.image_prefix = self.base + '-' + self.install_type + '-'
  489. self.regex = conf.regex
  490. self.image_statuses_bad = dict()
  491. self.image_statuses_good = dict()
  492. self.image_statuses_unmatched = dict()
  493. self.image_statuses_skipped = dict()
  494. self.maintainer = conf.maintainer
  495. def _get_images_dir(self):
  496. possible_paths = (
  497. PROJECT_ROOT,
  498. os.path.join(sys.prefix, 'share/kolla'),
  499. os.path.join(sys.prefix, 'local/share/kolla'),
  500. # NOTE(zioproto): When Kolla is used within a snap, the env var
  501. # $SNAP is the directory where the snap is mounted.
  502. # https://github.com/openstack/snap-kolla
  503. # More info in snap packages https://snapcraft.io
  504. os.path.join(os.environ.get('SNAP', ''), 'share/kolla'))
  505. for path in possible_paths:
  506. image_path = os.path.join(path, 'docker')
  507. # NOTE(SamYaple): We explicty check for the base folder to ensure
  508. # this is the correct path
  509. # TODO(SamYaple): Improve this to make this safer
  510. if os.path.exists(os.path.join(image_path, 'base')):
  511. LOG.info('Found the docker image folder at %s', image_path)
  512. return image_path
  513. else:
  514. raise exception.KollaDirNotFoundException('Image dir can not '
  515. 'be found')
  516. def build_rpm_setup(self, rpm_setup_config):
  517. """Generates a list of docker commands based on provided configuration.
  518. :param rpm_setup_config: A list of .rpm or .repo paths or URLs
  519. (can be empty)
  520. :return: A list of docker commands
  521. """
  522. rpm_setup = list()
  523. for config in rpm_setup_config:
  524. if config.endswith('.rpm'):
  525. # RPM files can be installed with yum from file path or url
  526. cmd = "RUN yum -y install {}".format(config)
  527. elif config.endswith('.repo'):
  528. if config.startswith('http'):
  529. # Curl http://url/etc.repo to /etc/yum.repos.d/etc.repo
  530. name = config.split('/')[-1]
  531. cmd = "RUN curl -L {} -o /etc/yum.repos.d/{}".format(
  532. config, name)
  533. else:
  534. # Copy .repo file from filesystem
  535. cmd = "COPY {} /etc/yum.repos.d/".format(config)
  536. elif not config:
  537. cmd = ''
  538. else:
  539. raise exception.KollaRpmSetupUnknownConfig(
  540. 'RPM setup must be provided as .rpm or .repo files.'
  541. ' Attempted configuration was {}'.format(config)
  542. )
  543. rpm_setup.append(cmd)
  544. return rpm_setup
  545. def copy_apt_files(self):
  546. if self.conf.apt_sources_list:
  547. shutil.copyfile(
  548. self.conf.apt_sources_list,
  549. os.path.join(self.working_dir, "base", "sources.list")
  550. )
  551. if self.conf.apt_preferences:
  552. shutil.copyfile(
  553. self.conf.apt_preferences,
  554. os.path.join(self.working_dir, "base", "apt_preferences")
  555. )
  556. def setup_working_dir(self):
  557. """Creates a working directory for use while building."""
  558. if self.conf.work_dir:
  559. self.working_dir = os.path.join(self.conf.work_dir, 'docker')
  560. else:
  561. ts = time.time()
  562. ts = datetime.datetime.fromtimestamp(ts).strftime(
  563. '%Y-%m-%d_%H-%M-%S_')
  564. self.temp_dir = tempfile.mkdtemp(prefix='kolla-' + ts)
  565. self.working_dir = os.path.join(self.temp_dir, 'docker')
  566. shutil.copytree(self.images_dir, self.working_dir)
  567. self.copy_apt_files()
  568. LOG.debug('Created working dir: %s', self.working_dir)
  569. def set_time(self):
  570. for root, dirs, files in os.walk(self.working_dir):
  571. for file_ in files:
  572. os.utime(os.path.join(root, file_), (0, 0))
  573. for dir_ in dirs:
  574. os.utime(os.path.join(root, dir_), (0, 0))
  575. LOG.debug('Set atime and mtime to 0 for all content in working dir')
  576. def _get_filters(self):
  577. filters = {
  578. 'customizable': jinja_filters.customizable,
  579. }
  580. return filters
  581. def _get_methods(self):
  582. """Mapping of available Jinja methods.
  583. return a dictionary that maps available function names and their
  584. corresponding python methods to make them available in jinja templates
  585. """
  586. return {
  587. 'debian_package_install': jinja_methods.debian_package_install,
  588. }
  589. def get_users(self):
  590. all_sections = (set(six.iterkeys(self.conf._groups)) |
  591. set(self.conf.list_all_sections()))
  592. ret = dict()
  593. for section in all_sections:
  594. match = re.search('^.*-user$', section)
  595. if match:
  596. user = self.conf[match.group(0)]
  597. ret[match.group(0)[:-5]] = {
  598. 'uid': user.uid,
  599. 'gid': user.gid,
  600. }
  601. return ret
  602. def create_dockerfiles(self):
  603. kolla_version = version.version_info.cached_version_string()
  604. supported_distro_release = common_config.DISTRO_RELEASE.get(
  605. self.base)
  606. for path in self.docker_build_paths:
  607. template_name = "Dockerfile.j2"
  608. image_name = path.split("/")[-1]
  609. ts = time.time()
  610. build_date = datetime.datetime.fromtimestamp(ts).strftime(
  611. '%Y%m%d')
  612. values = {'base_distro': self.base,
  613. 'base_image': self.conf.base_image,
  614. 'base_distro_tag': self.base_tag,
  615. 'base_arch': self.base_arch,
  616. 'supported_distro_release': supported_distro_release,
  617. 'install_metatype': self.install_metatype,
  618. 'image_prefix': self.image_prefix,
  619. 'install_type': self.install_type,
  620. 'namespace': self.namespace,
  621. 'tag': self.tag,
  622. 'maintainer': self.maintainer,
  623. 'kolla_version': kolla_version,
  624. 'image_name': image_name,
  625. 'users': self.get_users(),
  626. 'rpm_setup': self.rpm_setup,
  627. 'build_date': build_date}
  628. env = jinja2.Environment( # nosec: not used to render HTML
  629. loader=jinja2.FileSystemLoader(self.working_dir))
  630. env.filters.update(self._get_filters())
  631. env.globals.update(self._get_methods())
  632. tpl_path = os.path.join(
  633. os.path.relpath(path, self.working_dir),
  634. template_name)
  635. template = env.get_template(tpl_path)
  636. if self.conf.template_override:
  637. tpl_dict = self._merge_overrides(self.conf.template_override)
  638. template_name = os.path.basename(tpl_dict.keys()[0])
  639. values['parent_template'] = template
  640. env = jinja2.Environment( # nosec: not used to render HTML
  641. loader=jinja2.DictLoader(tpl_dict))
  642. env.filters.update(self._get_filters())
  643. env.globals.update(self._get_methods())
  644. template = env.get_template(template_name)
  645. content = template.render(values)
  646. content_path = os.path.join(path, 'Dockerfile')
  647. with open(content_path, 'w') as f:
  648. LOG.debug("Rendered %s into:", tpl_path)
  649. LOG.debug(content)
  650. f.write(content)
  651. LOG.debug("Wrote it to %s", content_path)
  652. def _merge_overrides(self, overrides):
  653. tpl_name = os.path.basename(overrides[0])
  654. with open(overrides[0], 'r') as f:
  655. tpl_content = f.read()
  656. for override in overrides[1:]:
  657. with open(override, 'r') as f:
  658. cont = f.read()
  659. # Remove extends header
  660. cont = re.sub(r'.*\{\%.*extends.*\n', '', cont)
  661. tpl_content += cont
  662. return {tpl_name: tpl_content}
  663. def find_dockerfiles(self):
  664. """Recursive search for Dockerfiles in the working directory."""
  665. self.docker_build_paths = list()
  666. path = self.working_dir
  667. filename = 'Dockerfile.j2'
  668. for root, dirs, names in os.walk(path):
  669. if filename in names:
  670. self.docker_build_paths.append(root)
  671. LOG.debug('Found %s', root.split(self.working_dir)[1])
  672. LOG.debug('Found %d Dockerfiles', len(self.docker_build_paths))
  673. def cleanup(self):
  674. """Remove temp files."""
  675. if not self.conf.work_dir:
  676. shutil.rmtree(self.temp_dir)
  677. def filter_images(self):
  678. """Filter which images to build."""
  679. filter_ = list()
  680. if self.regex:
  681. filter_ += self.regex
  682. elif self.conf.profile:
  683. for profile in self.conf.profile:
  684. if profile not in self.conf.profiles:
  685. self.conf.register_opt(cfg.ListOpt(profile,
  686. default=[]),
  687. 'profiles')
  688. if len(self.conf.profiles[profile]) == 0:
  689. msg = 'Profile: {} does not exist'.format(profile)
  690. raise ValueError(msg)
  691. else:
  692. filter_ += self.conf.profiles[profile]
  693. if filter_:
  694. patterns = re.compile(r"|".join(filter_).join('()'))
  695. for image in self.images:
  696. if image.status in (STATUS_MATCHED, STATUS_SKIPPED):
  697. continue
  698. if re.search(patterns, image.name):
  699. image.status = STATUS_MATCHED
  700. while (image.parent is not None and
  701. image.parent.status not in (STATUS_MATCHED,
  702. STATUS_SKIPPED)):
  703. image = image.parent
  704. if self.conf.skip_parents:
  705. image.status = STATUS_SKIPPED
  706. else:
  707. image.status = STATUS_MATCHED
  708. LOG.debug('Image %s matched regex', image.name)
  709. else:
  710. image.status = STATUS_UNMATCHED
  711. else:
  712. for image in self.images:
  713. image.status = STATUS_MATCHED
  714. def summary(self):
  715. """Walk the dictionary of images statuses and print results."""
  716. # For debug we print the logs again if the image error'd. This is to
  717. # help us debug and it will be extra helpful in the gate.
  718. for image in self.images:
  719. if image.status in STATUS_ERRORS:
  720. LOG.debug("Image %s failed", image.name)
  721. self.get_image_statuses()
  722. results = {
  723. 'built': [],
  724. 'failed': [],
  725. 'not_matched': [],
  726. 'skipped': [],
  727. }
  728. if self.image_statuses_good:
  729. LOG.info("=========================")
  730. LOG.info("Successfully built images")
  731. LOG.info("=========================")
  732. for name in sorted(self.image_statuses_good.keys()):
  733. LOG.info(name)
  734. results['built'].append({
  735. 'name': name,
  736. })
  737. if self.image_statuses_bad:
  738. LOG.info("===========================")
  739. LOG.info("Images that failed to build")
  740. LOG.info("===========================")
  741. for name, status in sorted(self.image_statuses_bad.items()):
  742. LOG.error('%s Failed with status: %s', name, status)
  743. results['failed'].append({
  744. 'name': name,
  745. 'status': status,
  746. })
  747. if self.image_statuses_unmatched:
  748. LOG.debug("=====================================")
  749. LOG.debug("Images not matched for build by regex")
  750. LOG.debug("=====================================")
  751. for name in self.image_statuses_unmatched.keys():
  752. LOG.debug(name)
  753. results['not_matched'].append({
  754. 'name': name,
  755. })
  756. if self.image_statuses_skipped:
  757. LOG.debug("=====================================")
  758. LOG.debug("Images skipped due to --skip-parents")
  759. LOG.debug("=====================================")
  760. for name in self.image_statuses_skipped.keys():
  761. LOG.debug(name)
  762. results['skipped'].append({
  763. 'name': name,
  764. })
  765. return results
  766. def get_image_statuses(self):
  767. if any([self.image_statuses_bad,
  768. self.image_statuses_good,
  769. self.image_statuses_unmatched,
  770. self.image_statuses_skipped]):
  771. return (self.image_statuses_bad,
  772. self.image_statuses_good,
  773. self.image_statuses_unmatched,
  774. self.image_statuses_skipped)
  775. for image in self.images:
  776. if image.status == STATUS_BUILT:
  777. self.image_statuses_good[image.name] = image.status
  778. elif image.status == STATUS_UNMATCHED:
  779. self.image_statuses_unmatched[image.name] = image.status
  780. elif image.status == STATUS_SKIPPED:
  781. self.image_statuses_skipped[image.name] = image.status
  782. else:
  783. self.image_statuses_bad[image.name] = image.status
  784. return (self.image_statuses_bad,
  785. self.image_statuses_good,
  786. self.image_statuses_unmatched,
  787. self.image_statuses_skipped)
  788. def build_image_list(self):
  789. def process_source_installation(image, section):
  790. installation = dict()
  791. # NOTE(jeffrey4l): source is not needed when the type is None
  792. if self.conf._get('type', self.conf._get_group(section)) is None:
  793. if image.parent_name is None:
  794. LOG.debug('No source location found in section %s',
  795. section)
  796. else:
  797. installation['type'] = self.conf[section]['type']
  798. installation['source'] = self.conf[section]['location']
  799. installation['name'] = section
  800. if installation['type'] == 'git':
  801. installation['reference'] = self.conf[section]['reference']
  802. return installation
  803. all_sections = (set(six.iterkeys(self.conf._groups)) |
  804. set(self.conf.list_all_sections()))
  805. for path in self.docker_build_paths:
  806. # Reading parent image name
  807. with open(os.path.join(path, 'Dockerfile')) as f:
  808. content = f.read()
  809. image_name = os.path.basename(path)
  810. canonical_name = (self.namespace + '/' + self.image_prefix +
  811. image_name + ':' + self.tag)
  812. parent_search_pattern = re.compile(r'^FROM.*$', re.MULTILINE)
  813. match = re.search(parent_search_pattern, content)
  814. if match:
  815. parent_name = match.group(0).split(' ')[1]
  816. else:
  817. parent_name = ''
  818. del match
  819. image = Image(image_name, canonical_name, path,
  820. parent_name=parent_name,
  821. logger=make_a_logger(self.conf, image_name))
  822. if self.install_type == 'source':
  823. # NOTE(jeffrey4l): register the opts if the section didn't
  824. # register in the kolla/common/config.py file
  825. if image.name not in self.conf._groups:
  826. self.conf.register_opts(common_config.get_source_opts(),
  827. image.name)
  828. image.source = process_source_installation(image, image.name)
  829. for plugin in [match.group(0) for match in
  830. (re.search('^{}-plugin-.+'.format(image.name),
  831. section) for section in
  832. all_sections) if match]:
  833. try:
  834. self.conf.register_opts(
  835. common_config.get_source_opts(),
  836. plugin
  837. )
  838. except cfg.DuplicateOptError:
  839. LOG.debug('Plugin %s already registered in config',
  840. plugin)
  841. image.plugins.append(
  842. process_source_installation(image, plugin))
  843. for addition in [
  844. match.group(0) for match in
  845. (re.search('^{}-additions-.+'.format(image.name),
  846. section) for section in all_sections) if match]:
  847. try:
  848. self.conf.register_opts(
  849. common_config.get_source_opts(),
  850. addition
  851. )
  852. except cfg.DuplicateOptError:
  853. LOG.debug('Addition %s already registered in config',
  854. addition)
  855. image.additions.append(
  856. process_source_installation(image, addition))
  857. self.images.append(image)
  858. def save_dependency(self, to_file):
  859. try:
  860. import graphviz
  861. except ImportError:
  862. LOG.error('"graphviz" is required for save dependency')
  863. raise
  864. dot = graphviz.Digraph(comment='Docker Images Dependency')
  865. dot.body.extend(['rankdir=LR'])
  866. for image in self.images:
  867. if image.status not in [STATUS_MATCHED]:
  868. continue
  869. dot.node(image.name)
  870. if image.parent is not None:
  871. dot.edge(image.parent.name, image.name)
  872. with open(to_file, 'w') as f:
  873. f.write(dot.source)
  874. def list_images(self):
  875. for count, image in enumerate([
  876. image for image in self.images if image.status == STATUS_MATCHED
  877. ]):
  878. print(count + 1, ':', image.name)
  879. def list_dependencies(self):
  880. match = False
  881. for image in self.images:
  882. if image.status in [STATUS_MATCHED]:
  883. match = True
  884. if image.parent is None:
  885. base = image
  886. if not match:
  887. print('Nothing matched!')
  888. return
  889. def list_children(images, ancestry):
  890. children = six.next(iter(ancestry.values()))
  891. for image in images:
  892. if image.status not in [STATUS_MATCHED]:
  893. continue
  894. if not image.children:
  895. children.append(image.name)
  896. else:
  897. newparent = {image.name: []}
  898. children.append(newparent)
  899. list_children(image.children, newparent)
  900. ancestry = {base.name: []}
  901. list_children(base.children, ancestry)
  902. json.dump(ancestry, sys.stdout, indent=2)
  903. def find_parents(self):
  904. """Associate all images with parents and children."""
  905. sort_images = dict()
  906. for image in self.images:
  907. sort_images[image.canonical_name] = image
  908. for parent_name, parent in sort_images.items():
  909. for image in sort_images.values():
  910. if image.parent_name == parent_name:
  911. parent.children.append(image)
  912. image.parent = parent
  913. def build_queue(self, push_queue):
  914. """Organizes Queue list.
  915. Return a list of Queues that have been organized into a hierarchy
  916. based on dependencies
  917. """
  918. self.build_image_list()
  919. self.find_parents()
  920. self.filter_images()
  921. queue = six.moves.queue.Queue()
  922. for image in self.images:
  923. if image.status == STATUS_UNMATCHED:
  924. # Don't bother queuing up build tasks for things that
  925. # were not matched in the first place... (not worth the
  926. # effort to run them, if they won't be used anyway).
  927. continue
  928. if image.parent is None:
  929. queue.put(BuildTask(self.conf, image, push_queue))
  930. LOG.info('Added image %s to queue', image.name)
  931. return queue
  932. def run_build():
  933. """Build container images.
  934. :return: A 3-tuple containing bad, good, and unmatched container image
  935. status dicts, or None if no images were built.
  936. """
  937. conf = cfg.ConfigOpts()
  938. common_config.parse(conf, sys.argv[1:], prog='kolla-build')
  939. if conf.debug:
  940. LOG.setLevel(logging.DEBUG)
  941. kolla = KollaWorker(conf)
  942. kolla.setup_working_dir()
  943. kolla.find_dockerfiles()
  944. kolla.create_dockerfiles()
  945. if conf.template_only:
  946. LOG.info('Dockerfiles are generated in %s', kolla.working_dir)
  947. return
  948. # We set the atime and mtime to 0 epoch to preserve allow the Docker cache
  949. # to work like we want. A different size or hash will still force a rebuild
  950. kolla.set_time()
  951. if conf.save_dependency:
  952. kolla.build_image_list()
  953. kolla.find_parents()
  954. kolla.filter_images()
  955. kolla.save_dependency(conf.save_dependency)
  956. LOG.info('Docker images dependency are saved in %s',
  957. conf.save_dependency)
  958. return
  959. if conf.list_images:
  960. kolla.build_image_list()
  961. kolla.find_parents()
  962. kolla.filter_images()
  963. kolla.list_images()
  964. return
  965. if conf.list_dependencies:
  966. kolla.build_image_list()
  967. kolla.find_parents()
  968. kolla.filter_images()
  969. kolla.list_dependencies()
  970. return
  971. push_queue = six.moves.queue.Queue()
  972. queue = kolla.build_queue(push_queue)
  973. workers = []
  974. with join_many(workers):
  975. try:
  976. for x in six.moves.range(conf.threads):
  977. worker = WorkerThread(conf, queue)
  978. worker.setDaemon(True)
  979. worker.start()
  980. workers.append(worker)
  981. for x in six.moves.range(conf.push_threads):
  982. worker = WorkerThread(conf, push_queue)
  983. worker.setDaemon(True)
  984. worker.start()
  985. workers.append(worker)
  986. # sleep until queue is empty
  987. while queue.unfinished_tasks or push_queue.unfinished_tasks:
  988. time.sleep(3)
  989. # ensure all threads exited happily
  990. push_queue.put(WorkerThread.tombstone)
  991. queue.put(WorkerThread.tombstone)
  992. except KeyboardInterrupt:
  993. for w in workers:
  994. w.should_stop = True
  995. push_queue.put(WorkerThread.tombstone)
  996. queue.put(WorkerThread.tombstone)
  997. raise
  998. results = kolla.summary()
  999. kolla.cleanup()
  1000. if conf.format == 'json':
  1001. print(json.dumps(results))
  1002. return kolla.get_image_statuses()