The Gatekeeper, or a project gating system
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 

5276 lines
194 KiB

  1. # Copyright 2012 Hewlett-Packard Development Company, L.P.
  2. # Copyright 2016 Red Hat, Inc.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License"); you may
  5. # not use this file except in compliance with the License. You may obtain
  6. # a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
  12. # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
  13. # License for the specific language governing permissions and limitations
  14. # under the License.
  15. import configparser
  16. from collections import OrderedDict
  17. from configparser import ConfigParser
  18. from contextlib import contextmanager
  19. import copy
  20. import datetime
  21. import errno
  22. import gc
  23. import hashlib
  24. from io import StringIO
  25. import itertools
  26. import json
  27. import logging
  28. import os
  29. import queue
  30. import random
  31. import re
  32. from logging import Logger
  33. from queue import Queue
  34. from typing import Callable, Optional, Any, Iterable, Generator, List, Dict
  35. import requests
  36. import select
  37. import shutil
  38. import socket
  39. import string
  40. import subprocess
  41. import sys
  42. import tempfile
  43. import threading
  44. import traceback
  45. import time
  46. import uuid
  47. import socketserver
  48. import http.server
  49. import urllib.parse
  50. import git
  51. import gear
  52. import fixtures
  53. import kazoo.client
  54. import kazoo.exceptions
  55. import pymysql
  56. import psycopg2
  57. import psycopg2.extensions
  58. import testtools
  59. import testtools.content
  60. import testtools.content_type
  61. from git.exc import NoSuchPathError
  62. import yaml
  63. import paramiko
  64. from zuul.model import Change
  65. from zuul.rpcclient import RPCClient
  66. from zuul.driver.zuul import ZuulDriver
  67. from zuul.driver.git import GitDriver
  68. from zuul.driver.smtp import SMTPDriver
  69. from zuul.driver.github import GithubDriver
  70. from zuul.driver.timer import TimerDriver
  71. from zuul.driver.sql import SQLDriver
  72. from zuul.driver.bubblewrap import BubblewrapDriver
  73. from zuul.driver.nullwrap import NullwrapDriver
  74. from zuul.driver.mqtt import MQTTDriver
  75. from zuul.driver.pagure import PagureDriver
  76. from zuul.driver.gitlab import GitlabDriver
  77. from zuul.driver.gerrit import GerritDriver
  78. from zuul.driver.github.githubconnection import GithubClientManager
  79. from zuul.lib.connections import ConnectionRegistry
  80. from psutil import Popen
  81. import tests.fakegithub
  82. import zuul.driver.gerrit.gerritsource as gerritsource
  83. import zuul.driver.gerrit.gerritconnection as gerritconnection
  84. import zuul.driver.git.gitwatcher as gitwatcher
  85. import zuul.driver.github.githubconnection as githubconnection
  86. import zuul.driver.pagure.pagureconnection as pagureconnection
  87. import zuul.driver.gitlab.gitlabconnection as gitlabconnection
  88. import zuul.driver.github
  89. import zuul.driver.sql
  90. import zuul.scheduler
  91. import zuul.executor.server
  92. import zuul.executor.client
  93. import zuul.lib.ansible
  94. import zuul.lib.connections
  95. import zuul.lib.auth
  96. import zuul.merger.client
  97. import zuul.merger.merger
  98. import zuul.merger.server
  99. import zuul.model
  100. import zuul.nodepool
  101. import zuul.rpcclient
  102. import zuul.zk
  103. import zuul.configloader
  104. from zuul.lib.config import get_default
  105. from zuul.lib.logutil import get_annotated_logger
  106. FIXTURE_DIR = os.path.join(os.path.dirname(__file__), 'fixtures')
  107. KEEP_TEMPDIRS = bool(os.environ.get('KEEP_TEMPDIRS', False))
  108. def repack_repo(path):
  109. cmd = ['git', '--git-dir=%s/.git' % path, 'repack', '-afd']
  110. output = subprocess.Popen(cmd, close_fds=True,
  111. stdout=subprocess.PIPE,
  112. stderr=subprocess.PIPE)
  113. out = output.communicate()
  114. if output.returncode:
  115. raise Exception("git repack returned %d" % output.returncode)
  116. return out
  117. def random_sha1():
  118. return hashlib.sha1(str(random.random()).encode('ascii')).hexdigest()
  119. def iterate_timeout(max_seconds, purpose):
  120. start = time.time()
  121. count = 0
  122. while (time.time() < start + max_seconds):
  123. count += 1
  124. yield count
  125. time.sleep(0.01)
  126. raise Exception("Timeout waiting for %s" % purpose)
  127. def simple_layout(path, driver='gerrit'):
  128. """Specify a layout file for use by a test method.
  129. :arg str path: The path to the layout file.
  130. :arg str driver: The source driver to use, defaults to gerrit.
  131. Some tests require only a very simple configuration. For those,
  132. establishing a complete config directory hierachy is too much
  133. work. In those cases, you can add a simple zuul.yaml file to the
  134. test fixtures directory (in fixtures/layouts/foo.yaml) and use
  135. this decorator to indicate the test method should use that rather
  136. than the tenant config file specified by the test class.
  137. The decorator will cause that layout file to be added to a
  138. config-project called "common-config" and each "project" instance
  139. referenced in the layout file will have a git repo automatically
  140. initialized.
  141. """
  142. def decorator(test):
  143. test.__simple_layout__ = (path, driver)
  144. return test
  145. return decorator
  146. def never_capture():
  147. """Never capture logs/output
  148. Due to high volume, log files are normally captured and attached
  149. to the subunit stream only on error. This can make diagnosing
  150. some problems difficult. Use this dectorator on a test to
  151. indicate that logs and output should not be captured.
  152. """
  153. def decorator(test):
  154. test.__never_capture__ = True
  155. return test
  156. return decorator
  157. class GerritDriverMock(GerritDriver):
  158. def __init__(self, registry, changes: Dict[str, Dict[str, Change]],
  159. upstream_root: str, additional_event_queues, poller_events,
  160. add_cleanup: Callable[[Callable[[], None]], None]):
  161. super(GerritDriverMock, self).__init__()
  162. self.registry = registry
  163. self.changes = changes
  164. self.upstream_root = upstream_root
  165. self.additional_event_queues = additional_event_queues
  166. self.poller_events = poller_events
  167. self.add_cleanup = add_cleanup
  168. def getConnection(self, name, config):
  169. db = self.changes.setdefault(config['server'], {})
  170. poll_event = self.poller_events.setdefault(name, threading.Event())
  171. ref_event = self.poller_events.setdefault(name + '-ref',
  172. threading.Event())
  173. connection = FakeGerritConnection(
  174. self, name, config,
  175. changes_db=db,
  176. upstream_root=self.upstream_root,
  177. poller_event=poll_event,
  178. ref_watcher_event=ref_event)
  179. if connection.web_server:
  180. self.add_cleanup(connection.web_server.stop)
  181. self.additional_event_queues.append(connection.event_queue)
  182. setattr(self.registry, 'fake_' + name, connection)
  183. return connection
  184. class GithubDriverMock(GithubDriver):
  185. def __init__(self, registry, changes: Dict[str, Dict[str, Change]],
  186. config: ConfigParser, upstream_root: str,
  187. additional_event_queues, rpcclient: RPCClient,
  188. git_url_with_auth: bool):
  189. super(GithubDriverMock, self).__init__()
  190. self.registry = registry
  191. self.changes = changes
  192. self.config = config
  193. self.upstream_root = upstream_root
  194. self.additional_event_queues = additional_event_queues
  195. self.rpcclient = rpcclient
  196. self.git_url_with_auth = git_url_with_auth
  197. def registerGithubProjects(self, connection):
  198. path = self.config.get('scheduler', 'tenant_config')
  199. with open(os.path.join(FIXTURE_DIR, path)) as f:
  200. tenant_config = yaml.safe_load(f.read())
  201. for tenant in tenant_config:
  202. sources = tenant['tenant']['source']
  203. conf = sources.get(connection.source.name)
  204. if not conf:
  205. return
  206. projects = conf.get('config-projects', [])
  207. projects.extend(conf.get('untrusted-projects', []))
  208. client = connection.getGithubClient(None)
  209. for project in projects:
  210. if isinstance(project, dict):
  211. # This can be a dict with the project as the only key
  212. client.addProjectByName(
  213. list(project.keys())[0])
  214. else:
  215. client.addProjectByName(project)
  216. def getConnection(self, name, config):
  217. server = config.get('server', 'github.com')
  218. db = self.changes.setdefault(server, {})
  219. connection = FakeGithubConnection(
  220. self, name, config, self.rpcclient,
  221. changes_db=db,
  222. upstream_root=self.upstream_root,
  223. git_url_with_auth=self.git_url_with_auth)
  224. self.additional_event_queues.append(connection.event_queue)
  225. setattr(self.registry, 'fake_' + name, connection)
  226. self.registerGithubProjects(connection)
  227. return connection
  228. class PagureDriverMock(PagureDriver):
  229. def __init__(self, registry, changes: Dict[str, Dict[str, Change]],
  230. upstream_root: str, additional_event_queues,
  231. rpcclient: RPCClient):
  232. super(PagureDriverMock, self).__init__()
  233. self.registry = registry
  234. self.changes = changes
  235. self.upstream_root = upstream_root
  236. self.additional_event_queues = additional_event_queues
  237. self.rpcclient = rpcclient
  238. def getConnection(self, name, config):
  239. server = config.get('server', 'pagure.io')
  240. db = self.changes.setdefault(server, {})
  241. connection = FakePagureConnection(
  242. self, name, config, self.rpcclient,
  243. changes_db=db,
  244. upstream_root=self.upstream_root)
  245. self.additional_event_queues.append(connection.event_queue)
  246. setattr(self.registry, 'fake_' + name, connection)
  247. return connection
  248. class GitlabDriverMock(GitlabDriver):
  249. def __init__(self, registry, changes: Dict[str, Dict[str, Change]],
  250. upstream_root: str, additional_event_queues,
  251. rpcclient: RPCClient):
  252. super(GitlabDriverMock, self).__init__()
  253. self.registry = registry
  254. self.changes = changes
  255. self.upstream_root = upstream_root
  256. self.additional_event_queues = additional_event_queues
  257. self.rpcclient = rpcclient
  258. def getConnection(self, name, config):
  259. server = config.get('server', 'gitlab.com')
  260. db = self.changes.setdefault(server, {})
  261. connection = FakeGitlabConnection(
  262. self, name, config, self.rpcclient,
  263. changes_db=db,
  264. upstream_root=self.upstream_root)
  265. self.additional_event_queues.append(connection.event_queue)
  266. setattr(self.registry, 'fake_' + name, connection)
  267. return connection
  268. class TestConnectionRegistry(ConnectionRegistry):
  269. def __init__(self, changes: Dict[str, Dict[str, Change]],
  270. config: ConfigParser, additional_event_queues,
  271. upstream_root: str, rpcclient: RPCClient, poller_events,
  272. git_url_with_auth: bool,
  273. add_cleanup: Callable[[Callable[[], None]], None]):
  274. self.connections = OrderedDict()
  275. self.drivers = {}
  276. self.registerDriver(ZuulDriver())
  277. self.registerDriver(GerritDriverMock(
  278. self, changes, upstream_root, additional_event_queues,
  279. poller_events, add_cleanup))
  280. self.registerDriver(GitDriver())
  281. self.registerDriver(GithubDriverMock(
  282. self, changes, config, upstream_root, additional_event_queues,
  283. rpcclient, git_url_with_auth))
  284. self.registerDriver(SMTPDriver())
  285. self.registerDriver(TimerDriver())
  286. self.registerDriver(SQLDriver())
  287. self.registerDriver(BubblewrapDriver())
  288. self.registerDriver(NullwrapDriver())
  289. self.registerDriver(MQTTDriver())
  290. self.registerDriver(PagureDriverMock(
  291. self, changes, upstream_root, additional_event_queues, rpcclient))
  292. self.registerDriver(GitlabDriverMock(
  293. self, changes, upstream_root, additional_event_queues, rpcclient))
  294. class FakeAnsibleManager(zuul.lib.ansible.AnsibleManager):
  295. def validate(self):
  296. return True
  297. def copyAnsibleFiles(self):
  298. pass
  299. class GerritChangeReference(git.Reference):
  300. _common_path_default = "refs/changes"
  301. _points_to_commits_only = True
  302. class FakeGerritChange(object):
  303. categories = {'Approved': ('Approved', -1, 1),
  304. 'Code-Review': ('Code-Review', -2, 2),
  305. 'Verified': ('Verified', -2, 2)}
  306. def __init__(self, gerrit, number, project, branch, subject,
  307. status='NEW', upstream_root=None, files={},
  308. parent=None):
  309. self.gerrit = gerrit
  310. self.source = gerrit
  311. self.reported = 0
  312. self.queried = 0
  313. self.patchsets = []
  314. self.number = number
  315. self.project = project
  316. self.branch = branch
  317. self.subject = subject
  318. self.latest_patchset = 0
  319. self.depends_on_change = None
  320. self.depends_on_patchset = None
  321. self.needed_by_changes = []
  322. self.fail_merge = False
  323. self.messages = []
  324. self.comments = []
  325. self.checks = {}
  326. self.checks_history = []
  327. self.data = {
  328. 'branch': branch,
  329. 'comments': self.comments,
  330. 'commitMessage': subject,
  331. 'createdOn': time.time(),
  332. 'id': 'I' + random_sha1(),
  333. 'lastUpdated': time.time(),
  334. 'number': str(number),
  335. 'open': status == 'NEW',
  336. 'owner': {'email': 'user@example.com',
  337. 'name': 'User Name',
  338. 'username': 'username'},
  339. 'patchSets': self.patchsets,
  340. 'project': project,
  341. 'status': status,
  342. 'subject': subject,
  343. 'submitRecords': [],
  344. 'url': '%s/%s' % (self.gerrit.baseurl.rstrip('/'), number)}
  345. self.upstream_root = upstream_root
  346. self.addPatchset(files=files, parent=parent)
  347. self.data['submitRecords'] = self.getSubmitRecords()
  348. self.open = status == 'NEW'
  349. def addFakeChangeToRepo(self, msg, files, large, parent):
  350. path = os.path.join(self.upstream_root, self.project)
  351. repo = git.Repo(path)
  352. if parent is None:
  353. parent = 'refs/tags/init'
  354. ref = GerritChangeReference.create(
  355. repo, '1/%s/%s' % (self.number, self.latest_patchset),
  356. parent)
  357. repo.head.reference = ref
  358. zuul.merger.merger.reset_repo_to_head(repo)
  359. repo.git.clean('-x', '-f', '-d')
  360. path = os.path.join(self.upstream_root, self.project)
  361. if not large:
  362. for fn, content in files.items():
  363. fn = os.path.join(path, fn)
  364. if content is None:
  365. os.unlink(fn)
  366. repo.index.remove([fn])
  367. else:
  368. d = os.path.dirname(fn)
  369. if not os.path.exists(d):
  370. os.makedirs(d)
  371. with open(fn, 'w') as f:
  372. f.write(content)
  373. repo.index.add([fn])
  374. else:
  375. for fni in range(100):
  376. fn = os.path.join(path, str(fni))
  377. f = open(fn, 'w')
  378. for ci in range(4096):
  379. f.write(random.choice(string.printable))
  380. f.close()
  381. repo.index.add([fn])
  382. r = repo.index.commit(msg)
  383. repo.head.reference = 'master'
  384. zuul.merger.merger.reset_repo_to_head(repo)
  385. repo.git.clean('-x', '-f', '-d')
  386. repo.heads['master'].checkout()
  387. return r
  388. def addPatchset(self, files=None, large=False, parent=None):
  389. self.latest_patchset += 1
  390. if not files:
  391. fn = '%s-%s' % (self.branch.replace('/', '_'), self.number)
  392. data = ("test %s %s %s\n" %
  393. (self.branch, self.number, self.latest_patchset))
  394. files = {fn: data}
  395. msg = self.subject + '-' + str(self.latest_patchset)
  396. c = self.addFakeChangeToRepo(msg, files, large, parent)
  397. ps_files = [{'file': '/COMMIT_MSG',
  398. 'type': 'ADDED'},
  399. {'file': 'README',
  400. 'type': 'MODIFIED'}]
  401. for f in files:
  402. ps_files.append({'file': f, 'type': 'ADDED'})
  403. d = {'approvals': [],
  404. 'createdOn': time.time(),
  405. 'files': ps_files,
  406. 'number': str(self.latest_patchset),
  407. 'ref': 'refs/changes/1/%s/%s' % (self.number,
  408. self.latest_patchset),
  409. 'revision': c.hexsha,
  410. 'uploader': {'email': 'user@example.com',
  411. 'name': 'User name',
  412. 'username': 'user'}}
  413. self.data['currentPatchSet'] = d
  414. self.patchsets.append(d)
  415. self.data['submitRecords'] = self.getSubmitRecords()
  416. def setCheck(self, checker, reset=False, **kw):
  417. if reset:
  418. self.checks[checker] = {'state': 'NOT_STARTED',
  419. 'created': str(datetime.datetime.now())}
  420. chk = self.checks.setdefault(checker, {})
  421. chk['updated'] = str(datetime.datetime.now())
  422. for (key, default) in [
  423. ('state', None),
  424. ('repository', self.project),
  425. ('change_number', self.number),
  426. ('patch_set_id', self.latest_patchset),
  427. ('checker_uuid', checker),
  428. ('message', None),
  429. ('url', None),
  430. ('started', None),
  431. ('finished', None),
  432. ]:
  433. val = kw.get(key, chk.get(key, default))
  434. if val is not None:
  435. chk[key] = val
  436. elif key in chk:
  437. del chk[key]
  438. self.checks_history.append(copy.deepcopy(self.checks))
  439. def addComment(self, filename, line, message, name, email, username,
  440. comment_range=None):
  441. comment = {
  442. 'file': filename,
  443. 'line': int(line),
  444. 'reviewer': {
  445. 'name': name,
  446. 'email': email,
  447. 'username': username,
  448. },
  449. 'message': message,
  450. }
  451. if comment_range:
  452. comment['range'] = comment_range
  453. self.comments.append(comment)
  454. def getPatchsetCreatedEvent(self, patchset):
  455. event = {"type": "patchset-created",
  456. "change": {"project": self.project,
  457. "branch": self.branch,
  458. "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
  459. "number": str(self.number),
  460. "subject": self.subject,
  461. "owner": {"name": "User Name"},
  462. "url": "https://hostname/3"},
  463. "patchSet": self.patchsets[patchset - 1],
  464. "uploader": {"name": "User Name"}}
  465. return event
  466. def getChangeRestoredEvent(self):
  467. event = {"type": "change-restored",
  468. "change": {"project": self.project,
  469. "branch": self.branch,
  470. "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
  471. "number": str(self.number),
  472. "subject": self.subject,
  473. "owner": {"name": "User Name"},
  474. "url": "https://hostname/3"},
  475. "restorer": {"name": "User Name"},
  476. "patchSet": self.patchsets[-1],
  477. "reason": ""}
  478. return event
  479. def getChangeAbandonedEvent(self):
  480. event = {"type": "change-abandoned",
  481. "change": {"project": self.project,
  482. "branch": self.branch,
  483. "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
  484. "number": str(self.number),
  485. "subject": self.subject,
  486. "owner": {"name": "User Name"},
  487. "url": "https://hostname/3"},
  488. "abandoner": {"name": "User Name"},
  489. "patchSet": self.patchsets[-1],
  490. "reason": ""}
  491. return event
  492. def getChangeCommentEvent(self, patchset):
  493. event = {"type": "comment-added",
  494. "change": {"project": self.project,
  495. "branch": self.branch,
  496. "id": "I5459869c07352a31bfb1e7a8cac379cabfcb25af",
  497. "number": str(self.number),
  498. "subject": self.subject,
  499. "owner": {"name": "User Name"},
  500. "url": "https://hostname/3"},
  501. "patchSet": self.patchsets[patchset - 1],
  502. "author": {"name": "User Name"},
  503. "approvals": [{"type": "Code-Review",
  504. "description": "Code-Review",
  505. "value": "0"}],
  506. "comment": "This is a comment"}
  507. return event
  508. def getChangeMergedEvent(self):
  509. event = {"submitter": {"name": "Jenkins",
  510. "username": "jenkins"},
  511. "newRev": "29ed3b5f8f750a225c5be70235230e3a6ccb04d9",
  512. "patchSet": self.patchsets[-1],
  513. "change": self.data,
  514. "type": "change-merged",
  515. "eventCreatedOn": 1487613810}
  516. return event
  517. def getRefUpdatedEvent(self):
  518. path = os.path.join(self.upstream_root, self.project)
  519. repo = git.Repo(path)
  520. oldrev = repo.heads[self.branch].commit.hexsha
  521. event = {
  522. "type": "ref-updated",
  523. "submitter": {
  524. "name": "User Name",
  525. },
  526. "refUpdate": {
  527. "oldRev": oldrev,
  528. "newRev": self.patchsets[-1]['revision'],
  529. "refName": self.branch,
  530. "project": self.project,
  531. }
  532. }
  533. return event
  534. def addApproval(self, category, value, username='reviewer_john',
  535. granted_on=None, message='', tag=None):
  536. if not granted_on:
  537. granted_on = time.time()
  538. approval = {
  539. 'description': self.categories[category][0],
  540. 'type': category,
  541. 'value': str(value),
  542. 'by': {
  543. 'username': username,
  544. 'email': username + '@example.com',
  545. },
  546. 'grantedOn': int(granted_on),
  547. '__tag': tag, # Not available in ssh api
  548. }
  549. for i, x in enumerate(self.patchsets[-1]['approvals'][:]):
  550. if x['by']['username'] == username and x['type'] == category:
  551. del self.patchsets[-1]['approvals'][i]
  552. self.patchsets[-1]['approvals'].append(approval)
  553. event = {'approvals': [approval],
  554. 'author': {'email': 'author@example.com',
  555. 'name': 'Patchset Author',
  556. 'username': 'author_phil'},
  557. 'change': {'branch': self.branch,
  558. 'id': 'Iaa69c46accf97d0598111724a38250ae76a22c87',
  559. 'number': str(self.number),
  560. 'owner': {'email': 'owner@example.com',
  561. 'name': 'Change Owner',
  562. 'username': 'owner_jane'},
  563. 'project': self.project,
  564. 'subject': self.subject,
  565. 'topic': 'master',
  566. 'url': 'https://hostname/459'},
  567. 'comment': message,
  568. 'patchSet': self.patchsets[-1],
  569. 'type': 'comment-added'}
  570. self.data['submitRecords'] = self.getSubmitRecords()
  571. return json.loads(json.dumps(event))
  572. def getSubmitRecords(self):
  573. status = {}
  574. for cat in self.categories:
  575. status[cat] = 0
  576. for a in self.patchsets[-1]['approvals']:
  577. cur = status[a['type']]
  578. cat_min, cat_max = self.categories[a['type']][1:]
  579. new = int(a['value'])
  580. if new == cat_min:
  581. cur = new
  582. elif abs(new) > abs(cur):
  583. cur = new
  584. status[a['type']] = cur
  585. labels = []
  586. ok = True
  587. for typ, cat in self.categories.items():
  588. cur = status[typ]
  589. cat_min, cat_max = cat[1:]
  590. if cur == cat_min:
  591. value = 'REJECT'
  592. ok = False
  593. elif cur == cat_max:
  594. value = 'OK'
  595. else:
  596. value = 'NEED'
  597. ok = False
  598. labels.append({'label': cat[0], 'status': value})
  599. if ok:
  600. return [{'status': 'OK'}]
  601. return [{'status': 'NOT_READY',
  602. 'labels': labels}]
  603. def setDependsOn(self, other, patchset):
  604. self.depends_on_change = other
  605. self.depends_on_patchset = patchset
  606. d = {'id': other.data['id'],
  607. 'number': other.data['number'],
  608. 'ref': other.patchsets[patchset - 1]['ref']
  609. }
  610. self.data['dependsOn'] = [d]
  611. other.needed_by_changes.append((self, len(self.patchsets)))
  612. needed = other.data.get('neededBy', [])
  613. d = {'id': self.data['id'],
  614. 'number': self.data['number'],
  615. 'ref': self.patchsets[-1]['ref'],
  616. 'revision': self.patchsets[-1]['revision']
  617. }
  618. needed.append(d)
  619. other.data['neededBy'] = needed
  620. def query(self):
  621. self.queried += 1
  622. d = self.data.get('dependsOn')
  623. if d:
  624. d = d[0]
  625. if (self.depends_on_change.patchsets[-1]['ref'] == d['ref']):
  626. d['isCurrentPatchSet'] = True
  627. else:
  628. d['isCurrentPatchSet'] = False
  629. return json.loads(json.dumps(self.data))
  630. def queryHTTP(self):
  631. self.queried += 1
  632. labels = {}
  633. for cat in self.categories:
  634. labels[cat] = {}
  635. for app in self.patchsets[-1]['approvals']:
  636. label = labels[app['type']]
  637. _, label_min, label_max = self.categories[app['type']]
  638. val = int(app['value'])
  639. label_all = label.setdefault('all', [])
  640. approval = {
  641. "value": val,
  642. "username": app['by']['username'],
  643. "email": app['by']['email'],
  644. "date": str(datetime.datetime.fromtimestamp(app['grantedOn'])),
  645. }
  646. if app.get('__tag') is not None:
  647. approval['tag'] = app['__tag']
  648. label_all.append(approval)
  649. if val == label_min:
  650. label['blocking'] = True
  651. if 'rejected' not in label:
  652. label['rejected'] = app['by']
  653. if val == label_max:
  654. if 'approved' not in label:
  655. label['approved'] = app['by']
  656. revisions = {}
  657. rev = self.patchsets[-1]
  658. num = len(self.patchsets)
  659. files = {}
  660. for f in rev['files']:
  661. if f['file'] == '/COMMIT_MSG':
  662. continue
  663. files[f['file']] = {"status": f['type'][0]} # ADDED -> A
  664. parent = '0000000000000000000000000000000000000000'
  665. if self.depends_on_change:
  666. parent = self.depends_on_change.patchsets[
  667. self.depends_on_patchset - 1]['revision']
  668. revisions[rev['revision']] = {
  669. "kind": "REWORK",
  670. "_number": num,
  671. "created": rev['createdOn'],
  672. "uploader": rev['uploader'],
  673. "ref": rev['ref'],
  674. "commit": {
  675. "subject": self.subject,
  676. "message": self.data['commitMessage'],
  677. "parents": [{
  678. "commit": parent,
  679. }]
  680. },
  681. "files": files
  682. }
  683. data = {
  684. "id": self.project + '~' + self.branch + '~' + self.data['id'],
  685. "project": self.project,
  686. "branch": self.branch,
  687. "hashtags": [],
  688. "change_id": self.data['id'],
  689. "subject": self.subject,
  690. "status": self.data['status'],
  691. "created": self.data['createdOn'],
  692. "updated": self.data['lastUpdated'],
  693. "_number": self.number,
  694. "owner": self.data['owner'],
  695. "labels": labels,
  696. "current_revision": self.patchsets[-1]['revision'],
  697. "revisions": revisions,
  698. "requirements": []
  699. }
  700. return json.loads(json.dumps(data))
  701. def queryRevisionHTTP(self, revision):
  702. for ps in self.patchsets:
  703. if ps['revision'] == revision:
  704. break
  705. else:
  706. return None
  707. changes = []
  708. if self.depends_on_change:
  709. changes.append({
  710. "commit": {
  711. "commit": self.depends_on_change.patchsets[
  712. self.depends_on_patchset - 1]['revision'],
  713. },
  714. "_change_number": self.depends_on_change.number,
  715. "_revision_number": self.depends_on_patchset
  716. })
  717. for (needed_by_change, needed_by_patchset) in self.needed_by_changes:
  718. changes.append({
  719. "commit": {
  720. "commit": needed_by_change.patchsets[
  721. needed_by_patchset - 1]['revision'],
  722. },
  723. "_change_number": needed_by_change.number,
  724. "_revision_number": needed_by_patchset,
  725. })
  726. return {"changes": changes}
  727. def setMerged(self):
  728. if (self.depends_on_change and
  729. self.depends_on_change.data['status'] != 'MERGED'):
  730. return
  731. if self.fail_merge:
  732. return
  733. self.data['status'] = 'MERGED'
  734. self.open = False
  735. path = os.path.join(self.upstream_root, self.project)
  736. repo = git.Repo(path)
  737. repo.head.reference = self.branch
  738. zuul.merger.merger.reset_repo_to_head(repo)
  739. repo.git.merge('-s', 'resolve', self.patchsets[-1]['ref'])
  740. repo.heads[self.branch].commit = repo.head.commit
  741. def setReported(self):
  742. self.reported += 1
  743. class GerritWebServer(object):
  744. def __init__(self, fake_gerrit):
  745. super(GerritWebServer, self).__init__()
  746. self.fake_gerrit = fake_gerrit
  747. def start(self):
  748. fake_gerrit = self.fake_gerrit
  749. class Server(http.server.SimpleHTTPRequestHandler):
  750. log = logging.getLogger("zuul.test.FakeGerritConnection")
  751. review_re = re.compile('/a/changes/(.*?)/revisions/(.*?)/review')
  752. submit_re = re.compile('/a/changes/(.*?)/submit')
  753. pending_checks_re = re.compile(
  754. r'/a/plugins/checks/checks\.pending/\?'
  755. r'query=checker:(.*?)\+\(state:(.*?)\)')
  756. update_checks_re = re.compile(
  757. r'/a/changes/(.*)/revisions/(.*?)/checks/(.*)')
  758. list_checkers_re = re.compile('/a/plugins/checks/checkers/')
  759. change_re = re.compile(r'/a/changes/(.*)\?o=.*')
  760. related_re = re.compile(r'/a/changes/(.*)/revisions/(.*)/related')
  761. change_search_re = re.compile(r'/a/changes/\?n=500.*&q=(.*)')
  762. version_re = re.compile(r'/a/config/server/version')
  763. def do_POST(self):
  764. path = self.path
  765. self.log.debug("Got POST %s", path)
  766. data = self.rfile.read(int(self.headers['Content-Length']))
  767. data = json.loads(data.decode('utf-8'))
  768. self.log.debug("Got data %s", data)
  769. m = self.review_re.match(path)
  770. if m:
  771. return self.review(m.group(1), m.group(2), data)
  772. m = self.submit_re.match(path)
  773. if m:
  774. return self.submit(m.group(1), data)
  775. m = self.update_checks_re.match(path)
  776. if m:
  777. return self.update_checks(
  778. m.group(1), m.group(2), m.group(3), data)
  779. self.send_response(500)
  780. self.end_headers()
  781. def do_GET(self):
  782. path = self.path
  783. self.log.debug("Got GET %s", path)
  784. m = self.change_re.match(path)
  785. if m:
  786. return self.get_change(m.group(1))
  787. m = self.related_re.match(path)
  788. if m:
  789. return self.get_related(m.group(1), m.group(2))
  790. m = self.change_search_re.match(path)
  791. if m:
  792. return self.get_changes(m.group(1))
  793. m = self.pending_checks_re.match(path)
  794. if m:
  795. return self.get_pending_checks(m.group(1), m.group(2))
  796. m = self.list_checkers_re.match(path)
  797. if m:
  798. return self.list_checkers()
  799. m = self.version_re.match(path)
  800. if m:
  801. return self.version()
  802. self.send_response(500)
  803. self.end_headers()
  804. def _404(self):
  805. self.send_response(404)
  806. self.end_headers()
  807. def _get_change(self, change_id):
  808. change_id = urllib.parse.unquote(change_id)
  809. project, branch, change = change_id.split('~')
  810. for c in fake_gerrit.changes.values():
  811. if (c.data['id'] == change and
  812. c.data['branch'] == branch and
  813. c.data['project'] == project):
  814. return c
  815. def review(self, change_id, revision, data):
  816. change = self._get_change(change_id)
  817. if not change:
  818. return self._404()
  819. message = data['message']
  820. labels = data.get('labels', {})
  821. comments = data.get('robot_comments', data.get('comments', {}))
  822. tag = data.get('tag', None)
  823. fake_gerrit._test_handle_review(
  824. int(change.data['number']), message, False, labels,
  825. comments, tag=tag)
  826. self.send_response(200)
  827. self.end_headers()
  828. def submit(self, change_id, data):
  829. change = self._get_change(change_id)
  830. if not change:
  831. return self._404()
  832. message = None
  833. labels = {}
  834. fake_gerrit._test_handle_review(
  835. int(change.data['number']), message, True, labels)
  836. self.send_response(200)
  837. self.end_headers()
  838. def update_checks(self, change_id, revision, checker, data):
  839. self.log.debug("Update checks %s %s %s",
  840. change_id, revision, checker)
  841. change = self._get_change(change_id)
  842. if not change:
  843. return self._404()
  844. change.setCheck(checker, **data)
  845. self.send_response(200)
  846. # TODO: return the real data structure, but zuul
  847. # ignores this now.
  848. self.end_headers()
  849. def get_pending_checks(self, checker, state):
  850. self.log.debug("Get pending checks %s %s", checker, state)
  851. ret = []
  852. for c in fake_gerrit.changes.values():
  853. if checker not in c.checks:
  854. continue
  855. patchset_pending_checks = {}
  856. if c.checks[checker]['state'] == state:
  857. patchset_pending_checks[checker] = {
  858. 'state': c.checks[checker]['state'],
  859. }
  860. if patchset_pending_checks:
  861. ret.append({
  862. 'patch_set': {
  863. 'repository': c.project,
  864. 'change_number': c.number,
  865. 'patch_set_id': c.latest_patchset,
  866. },
  867. 'pending_checks': patchset_pending_checks,
  868. })
  869. self.send_data(ret)
  870. def list_checkers(self):
  871. self.log.debug("Get checkers")
  872. self.send_data(fake_gerrit.fake_checkers)
  873. def get_change(self, number):
  874. change = fake_gerrit.changes.get(int(number))
  875. if not change:
  876. return self._404()
  877. self.send_data(change.queryHTTP())
  878. self.end_headers()
  879. def get_related(self, number, revision):
  880. change = fake_gerrit.changes.get(int(number))
  881. if not change:
  882. return self._404()
  883. data = change.queryRevisionHTTP(revision)
  884. if data is None:
  885. return self._404()
  886. self.send_data(data)
  887. self.end_headers()
  888. def get_changes(self, query):
  889. self.log.debug("simpleQueryHTTP: %s", query)
  890. query = urllib.parse.unquote(query)
  891. fake_gerrit.queries.append(query)
  892. results = []
  893. if query.startswith('(') and 'OR' in query:
  894. query = query[1:-1]
  895. for q in query.split(' OR '):
  896. for r in fake_gerrit._simpleQuery(q, http=True):
  897. if r not in results:
  898. results.append(r)
  899. else:
  900. results = fake_gerrit._simpleQuery(query, http=True)
  901. self.send_data(results)
  902. self.end_headers()
  903. def version(self):
  904. self.send_data('3.0.0-some-stuff')
  905. self.end_headers()
  906. def send_data(self, data):
  907. data = json.dumps(data).encode('utf-8')
  908. data = b")]}'\n" + data
  909. self.send_response(200)
  910. self.send_header('Content-Type', 'application/json')
  911. self.send_header('Content-Length', len(data))
  912. self.end_headers()
  913. self.wfile.write(data)
  914. def log_message(self, fmt, *args):
  915. self.log.debug(fmt, *args)
  916. self.httpd = socketserver.ThreadingTCPServer(('', 0), Server)
  917. self.port = self.httpd.socket.getsockname()[1]
  918. self.thread = threading.Thread(name='GerritWebServer',
  919. target=self.httpd.serve_forever)
  920. self.thread.daemon = True
  921. self.thread.start()
  922. def stop(self):
  923. self.httpd.shutdown()
  924. self.thread.join()
  925. class FakeGerritPoller(gerritconnection.GerritPoller):
  926. """A Fake Gerrit poller for use in tests.
  927. This subclasses
  928. :py:class:`~zuul.connection.gerrit.GerritPoller`.
  929. """
  930. poll_interval = 1
  931. def _run(self, *args, **kw):
  932. r = super(FakeGerritPoller, self)._run(*args, **kw)
  933. # Set the event so tests can confirm that the poller has run
  934. # after they changed something.
  935. self.connection._poller_event.set()
  936. return r
  937. class FakeGerritRefWatcher(gitwatcher.GitWatcher):
  938. """A Fake Gerrit ref watcher.
  939. This subclasses
  940. :py:class:`~zuul.connection.git.GitWatcher`.
  941. """
  942. def __init__(self, *args, **kw):
  943. super(FakeGerritRefWatcher, self).__init__(*args, **kw)
  944. self.baseurl = self.connection.upstream_root
  945. self.poll_delay = 1
  946. def _run(self, *args, **kw):
  947. r = super(FakeGerritRefWatcher, self)._run(*args, **kw)
  948. # Set the event so tests can confirm that the watcher has run
  949. # after they changed something.
  950. self.connection._ref_watcher_event.set()
  951. return r
  952. class FakeGerritConnection(gerritconnection.GerritConnection):
  953. """A Fake Gerrit connection for use in tests.
  954. This subclasses
  955. :py:class:`~zuul.connection.gerrit.GerritConnection` to add the
  956. ability for tests to add changes to the fake Gerrit it represents.
  957. """
  958. log = logging.getLogger("zuul.test.FakeGerritConnection")
  959. _poller_class = FakeGerritPoller
  960. _ref_watcher_class = FakeGerritRefWatcher
  961. def __init__(self, driver, connection_name, connection_config,
  962. changes_db=None, upstream_root=None, poller_event=None,
  963. ref_watcher_event=None):
  964. if connection_config.get('password'):
  965. self.web_server = GerritWebServer(self)
  966. self.web_server.start()
  967. url = 'http://localhost:%s' % self.web_server.port
  968. connection_config['baseurl'] = url
  969. else:
  970. self.web_server = None
  971. super(FakeGerritConnection, self).__init__(driver, connection_name,
  972. connection_config)
  973. self.event_queue = queue.Queue()
  974. self.fixture_dir = os.path.join(FIXTURE_DIR, 'gerrit')
  975. self.change_number = 0
  976. self.changes = changes_db
  977. self.queries = []
  978. self.upstream_root = upstream_root
  979. self.fake_checkers = []
  980. self._poller_event = poller_event
  981. self._ref_watcher_event = ref_watcher_event
  982. def addFakeChecker(self, **kw):
  983. self.fake_checkers.append(kw)
  984. def addFakeChange(self, project, branch, subject, status='NEW',
  985. files=None, parent=None):
  986. """Add a change to the fake Gerrit."""
  987. self.change_number += 1
  988. c = FakeGerritChange(self, self.change_number, project, branch,
  989. subject, upstream_root=self.upstream_root,
  990. status=status, files=files, parent=parent)
  991. self.changes[self.change_number] = c
  992. return c
  993. def addFakeTag(self, project, branch, tag):
  994. path = os.path.join(self.upstream_root, project)
  995. repo = git.Repo(path)
  996. commit = repo.heads[branch].commit
  997. newrev = commit.hexsha
  998. ref = 'refs/tags/' + tag
  999. git.Tag.create(repo, tag, commit)
  1000. event = {
  1001. "type": "ref-updated",
  1002. "submitter": {
  1003. "name": "User Name",
  1004. },
  1005. "refUpdate": {
  1006. "oldRev": 40 * '0',
  1007. "newRev": newrev,
  1008. "refName": ref,
  1009. "project": project,
  1010. }
  1011. }
  1012. return event
  1013. def getFakeBranchCreatedEvent(self, project, branch):
  1014. path = os.path.join(self.upstream_root, project)
  1015. repo = git.Repo(path)
  1016. oldrev = 40 * '0'
  1017. event = {
  1018. "type": "ref-updated",
  1019. "submitter": {
  1020. "name": "User Name",
  1021. },
  1022. "refUpdate": {
  1023. "oldRev": oldrev,
  1024. "newRev": repo.heads[branch].commit.hexsha,
  1025. "refName": 'refs/heads/' + branch,
  1026. "project": project,
  1027. }
  1028. }
  1029. return event
  1030. def getFakeBranchDeletedEvent(self, project, branch):
  1031. oldrev = '4abd38457c2da2a72d4d030219ab180ecdb04bf0'
  1032. newrev = 40 * '0'
  1033. event = {
  1034. "type": "ref-updated",
  1035. "submitter": {
  1036. "name": "User Name",
  1037. },
  1038. "refUpdate": {
  1039. "oldRev": oldrev,
  1040. "newRev": newrev,
  1041. "refName": 'refs/heads/' + branch,
  1042. "project": project,
  1043. }
  1044. }
  1045. return event
  1046. def review(self, item, message, submit, labels, checks_api, file_comments,
  1047. zuul_event_id=None):
  1048. if self.web_server:
  1049. return super(FakeGerritConnection, self).review(
  1050. item, message, submit, labels, checks_api, file_comments,
  1051. zuul_event_id)
  1052. self._test_handle_review(int(item.change.number), message, submit,
  1053. labels)
  1054. def _test_handle_review(self, change_number, message, submit, labels,
  1055. file_comments=None, tag=None):
  1056. # Handle a review action from a test
  1057. change = self.changes[change_number]
  1058. # Add the approval back onto the change (ie simulate what gerrit would
  1059. # do).
  1060. # Usually when zuul leaves a review it'll create a feedback loop where
  1061. # zuul's review enters another gerrit event (which is then picked up by
  1062. # zuul). However, we can't mimic this behaviour (by adding this
  1063. # approval event into the queue) as it stops jobs from checking what
  1064. # happens before this event is triggered. If a job needs to see what
  1065. # happens they can add their own verified event into the queue.
  1066. # Nevertheless, we can update change with the new review in gerrit.
  1067. for cat in labels:
  1068. change.addApproval(cat, labels[cat], username=self.user,
  1069. tag=tag)
  1070. if message:
  1071. change.messages.append(message)
  1072. if file_comments:
  1073. for filename, commentlist in file_comments.items():
  1074. for comment in commentlist:
  1075. change.addComment(filename, comment['line'],
  1076. comment['message'], 'Zuul',
  1077. 'zuul@example.com', self.user,
  1078. comment.get('range'))
  1079. if submit:
  1080. change.setMerged()
  1081. if message:
  1082. change.setReported()
  1083. def queryChangeSSH(self, number, event=None):
  1084. self.log.debug("Query change SSH: %s", number)
  1085. change = self.changes.get(int(number))
  1086. if change:
  1087. return change.query()
  1088. return {}
  1089. def _simpleQuery(self, query, http=False):
  1090. if http:
  1091. def queryMethod(change):
  1092. return change.queryHTTP()
  1093. else:
  1094. def queryMethod(change):
  1095. return change.query()
  1096. # the query can be in parenthesis so strip them if needed
  1097. if query.startswith('('):
  1098. query = query[1:-1]
  1099. if query.startswith('change:'):
  1100. # Query a specific changeid
  1101. changeid = query[len('change:'):]
  1102. l = [queryMethod(change) for change in self.changes.values()
  1103. if (change.data['id'] == changeid or
  1104. change.data['number'] == changeid)]
  1105. elif query.startswith('message:'):
  1106. # Query the content of a commit message
  1107. msg = query[len('message:'):].strip()
  1108. # Remove quoting if it is there
  1109. if msg.startswith('{') and msg.endswith('}'):
  1110. msg = msg[1:-1]
  1111. l = [queryMethod(change) for change in self.changes.values()
  1112. if msg in change.data['commitMessage']]
  1113. else:
  1114. # Query all open changes
  1115. l = [queryMethod(change) for change in self.changes.values()]
  1116. return l
  1117. def simpleQuerySSH(self, query, event=None):
  1118. log = get_annotated_logger(self.log, event)
  1119. log.debug("simpleQuerySSH: %s", query)
  1120. self.queries.append(query)
  1121. results = []
  1122. if query.startswith('(') and 'OR' in query:
  1123. query = query[1:-1]
  1124. for q in query.split(' OR '):
  1125. for r in self._simpleQuery(q):
  1126. if r not in results:
  1127. results.append(r)
  1128. else:
  1129. results = self._simpleQuery(query)
  1130. return results
  1131. def _start_watcher_thread(self, *args, **kw):
  1132. pass
  1133. def _uploadPack(self, project):
  1134. ret = ('00a31270149696713ba7e06f1beb760f20d359c4abed HEAD\x00'
  1135. 'multi_ack thin-pack side-band side-band-64k ofs-delta '
  1136. 'shallow no-progress include-tag multi_ack_detailed no-done\n')
  1137. path = os.path.join(self.upstream_root, project.name)
  1138. repo = git.Repo(path)
  1139. for ref in repo.refs:
  1140. if ref.path.endswith('.lock'):
  1141. # don't treat lockfiles as ref
  1142. continue
  1143. r = ref.object.hexsha + ' ' + ref.path + '\n'
  1144. ret += '%04x%s' % (len(r) + 4, r)
  1145. ret += '0000'
  1146. return ret
  1147. def getGitUrl(self, project):
  1148. return 'file://' + os.path.join(self.upstream_root, project.name)
  1149. class PagureChangeReference(git.Reference):
  1150. _common_path_default = "refs/pull"
  1151. _points_to_commits_only = True
  1152. class FakePagurePullRequest(object):
  1153. log = logging.getLogger("zuul.test.FakePagurePullRequest")
  1154. def __init__(self, pagure, number, project, branch,
  1155. subject, upstream_root, files={}, number_of_commits=1,
  1156. initial_comment=None):
  1157. self.pagure = pagure
  1158. self.source = pagure
  1159. self.number = number
  1160. self.project = project
  1161. self.branch = branch
  1162. self.subject = subject
  1163. self.upstream_root = upstream_root
  1164. self.number_of_commits = 0
  1165. self.status = 'Open'
  1166. self.initial_comment = initial_comment
  1167. self.uuid = uuid.uuid4().hex
  1168. self.comments = []
  1169. self.flags = []
  1170. self.files = {}
  1171. self.tags = []
  1172. self.cached_merge_status = ''
  1173. self.threshold_reached = False
  1174. self.commit_stop = None
  1175. self.commit_start = None
  1176. self.threshold_reached = False
  1177. self.upstream_root = upstream_root
  1178. self.cached_merge_status = 'MERGE'
  1179. self.url = "https://%s/%s/pull-request/%s" % (
  1180. self.pagure.server, self.project, self.number)
  1181. self.is_merged = False
  1182. self.pr_ref = self._createPRRef()
  1183. self._addCommitInPR(files=files)
  1184. self._updateTimeStamp()
  1185. def _getPullRequestEvent(self, action, pull_data_field='pullrequest'):
  1186. name = 'pg_pull_request'
  1187. data = {
  1188. 'msg': {
  1189. pull_data_field: {
  1190. 'branch': self.branch,
  1191. 'comments': self.comments,
  1192. 'commit_start': self.commit_start,
  1193. 'commit_stop': self.commit_stop,
  1194. 'date_created': '0',
  1195. 'tags': self.tags,
  1196. 'initial_comment': self.initial_comment,
  1197. 'id': self.number,
  1198. 'project': {
  1199. 'fullname': self.project,
  1200. },
  1201. 'status': self.status,
  1202. 'subject': self.subject,
  1203. 'uid': self.uuid,
  1204. }
  1205. },
  1206. 'msg_id': str(uuid.uuid4()),
  1207. 'timestamp': 1427459070,
  1208. 'topic': action
  1209. }
  1210. if action == 'pull-request.flag.added':
  1211. data['msg']['flag'] = self.flags[0]
  1212. if action == 'pull-request.tag.added':
  1213. data['msg']['tags'] = self.tags
  1214. return (name, data)
  1215. def getPullRequestOpenedEvent(self):
  1216. return self._getPullRequestEvent('pull-request.new')
  1217. def getPullRequestClosedEvent(self, merged=True):
  1218. if merged:
  1219. self.is_merged = True
  1220. self.status = 'Merged'
  1221. else:
  1222. self.is_merged = False
  1223. self.status = 'Closed'
  1224. return self._getPullRequestEvent('pull-request.closed')
  1225. def getPullRequestUpdatedEvent(self):
  1226. self._addCommitInPR()
  1227. self.addComment(
  1228. "**1 new commit added**\n\n * ``Bump``\n",
  1229. True)
  1230. return self._getPullRequestEvent('pull-request.comment.added')
  1231. def getPullRequestCommentedEvent(self, message):
  1232. self.addComment(message)
  1233. return self._getPullRequestEvent('pull-request.comment.added')
  1234. def getPullRequestInitialCommentEvent(self, message):
  1235. self.initial_comment = message
  1236. self._updateTimeStamp()
  1237. return self._getPullRequestEvent('pull-request.initial_comment.edited')
  1238. def getPullRequestTagAddedEvent(self, tags, reset=True):
  1239. if reset:
  1240. self.tags = []
  1241. _tags = set(self.tags)
  1242. _tags.update(set(tags))
  1243. self.tags = list(_tags)
  1244. self.addComment(
  1245. "**Metadata Update from @pingou**:\n- " +
  1246. "Pull-request tagged with: %s" % ', '.join(tags),
  1247. True)
  1248. self._updateTimeStamp()
  1249. return self._getPullRequestEvent(
  1250. 'pull-request.tag.added', pull_data_field='pull_request')
  1251. def getPullRequestStatusSetEvent(self, status, username="zuul"):
  1252. self.addFlag(
  1253. status, "https://url", "Build %s" % status, username)
  1254. return self._getPullRequestEvent('pull-request.flag.added')
  1255. def insertFlag(self, flag):
  1256. to_pop = None
  1257. for i, _flag in enumerate(self.flags):
  1258. if _flag['uid'] == flag['uid']:
  1259. to_pop = i
  1260. if to_pop is not None:
  1261. self.flags.pop(to_pop)
  1262. self.flags.insert(0, flag)
  1263. def addFlag(self, status, url, comment, username="zuul"):
  1264. flag_uid = "%s-%s-%s" % (username, self.number, self.project)
  1265. flag = {
  1266. "username": "Zuul CI",
  1267. "user": {
  1268. "name": username
  1269. },
  1270. "uid": flag_uid[:32],
  1271. "comment": comment,
  1272. "status": status,
  1273. "url": url
  1274. }
  1275. self.insertFlag(flag)
  1276. self._updateTimeStamp()
  1277. def editInitialComment(self, initial_comment):
  1278. self.initial_comment = initial_comment
  1279. self._updateTimeStamp()
  1280. def addComment(self, message, notification=False, fullname=None):
  1281. self.comments.append({
  1282. 'comment': message,
  1283. 'notification': notification,
  1284. 'date_created': str(int(time.time())),
  1285. 'user': {
  1286. 'fullname': fullname or 'Pingou'
  1287. }}
  1288. )
  1289. self._updateTimeStamp()
  1290. def getPRReference(self):
  1291. return '%s/head' % self.number
  1292. def _getRepo(self):
  1293. repo_path = os.path.join(self.upstream_root, self.project)
  1294. return git.Repo(repo_path)
  1295. def _createPRRef(self):
  1296. repo = self._getRepo()
  1297. return PagureChangeReference.create(
  1298. repo, self.getPRReference(), 'refs/tags/init')
  1299. def addCommit(self, files={}):
  1300. """Adds a commit on top of the actual PR head."""
  1301. self._addCommitInPR(files=files)
  1302. self._updateTimeStamp()
  1303. def forcePush(self, files={}):
  1304. """Clears actual commits and add a commit on top of the base."""
  1305. self._addCommitInPR(files=files, reset=True)
  1306. self._updateTimeStamp()
  1307. def _addCommitInPR(self, files={}, reset=False):
  1308. repo = self._getRepo()
  1309. ref = repo.references[self.getPRReference()]
  1310. if reset:
  1311. self.number_of_commits = 0
  1312. ref.set_object('refs/tags/init')
  1313. self.number_of_commits += 1
  1314. repo.head.reference = ref
  1315. repo.git.clean('-x', '-f', '-d')
  1316. if files:
  1317. self.files = files
  1318. else:
  1319. fn = '%s-%s' % (self.branch.replace('/', '_'), self.number)
  1320. self.files = {fn: "test %s %s\n" % (self.branch, self.number)}
  1321. msg = self.subject + '-' + str(self.number_of_commits)
  1322. for fn, content in self.files.items():
  1323. fn = os.path.join(repo.working_dir, fn)
  1324. with open(fn, 'w') as f:
  1325. f.write(content)
  1326. repo.index.add([fn])
  1327. self.commit_stop = repo.index.commit(msg).hexsha
  1328. if not self.commit_start:
  1329. self.commit_start = self.commit_stop
  1330. repo.create_head(self.getPRReference(), self.commit_stop, force=True)
  1331. self.pr_ref.set_commit(self.commit_stop)
  1332. repo.head.reference = 'master'
  1333. repo.git.clean('-x', '-f', '-d')
  1334. repo.heads['master'].checkout()
  1335. def _updateTimeStamp(self):
  1336. self.last_updated = str(int(time.time()))
  1337. class FakePagureAPIClient(pagureconnection.PagureAPIClient):
  1338. log = logging.getLogger("zuul.test.FakePagureAPIClient")
  1339. def __init__(self, baseurl, api_token, project,
  1340. pull_requests_db={}):
  1341. super(FakePagureAPIClient, self).__init__(
  1342. baseurl, api_token, project)
  1343. self.session = None
  1344. self.pull_requests = pull_requests_db
  1345. self.return_post_error = None
  1346. def gen_error(self, verb, custom_only=False):
  1347. if verb == 'POST' and self.return_post_error:
  1348. return {
  1349. 'error': self.return_post_error['error'],
  1350. 'error_code': self.return_post_error['error_code']
  1351. }, 401, "", 'POST'
  1352. self.return_post_error = None
  1353. if not custom_only:
  1354. return {
  1355. 'error': 'some error',
  1356. 'error_code': 'some error code'
  1357. }, 503, "", verb
  1358. def _get_pr(self, match):
  1359. project, number = match.groups()
  1360. pr = self.pull_requests.get(project, {}).get(number)
  1361. if not pr:
  1362. return self.gen_error("GET")
  1363. return pr
  1364. def get(self, url):
  1365. self.log.debug("Getting resource %s ..." % url)
  1366. match = re.match(r'.+/api/0/(.+)/pull-request/(\d+)$', url)
  1367. if match:
  1368. pr = self._get_pr(match)
  1369. return {
  1370. 'branch': pr.branch,
  1371. 'subject': pr.subject,
  1372. 'status': pr.status,
  1373. 'initial_comment': pr.initial_comment,
  1374. 'last_updated': pr.last_updated,
  1375. 'comments': pr.comments,
  1376. 'commit_stop': pr.commit_stop,
  1377. 'threshold_reached': pr.threshold_reached,
  1378. 'cached_merge_status': pr.cached_merge_status,
  1379. 'tags': pr.tags,
  1380. }, 200, "", "GET"
  1381. match = re.match(r'.+/api/0/(.+)/pull-request/(\d+)/flag$', url)
  1382. if match:
  1383. pr = self._get_pr(match)
  1384. return {'flags': pr.flags}, 200, "", "GET"
  1385. match = re.match('.+/api/0/(.+)/git/branches$', url)
  1386. if match:
  1387. # project = match.groups()[0]
  1388. return {'branches': ['master']}, 200, "", "GET"
  1389. match = re.match(r'.+/api/0/(.+)/pull-request/(\d+)/diffstats$', url)
  1390. if match:
  1391. pr = self._get_pr(match)
  1392. return pr.files, 200, "", "GET"
  1393. def post(self, url, params=None):
  1394. self.log.info(
  1395. "Posting on resource %s, params (%s) ..." % (url, params))
  1396. # Will only match if return_post_error is set
  1397. err = self.gen_error("POST", custom_only=True)
  1398. if err:
  1399. return err
  1400. match = re.match(r'.+/api/0/(.+)/pull-request/(\d+)/merge$', url)
  1401. if match:
  1402. pr = self._get_pr(match)
  1403. pr.status = 'Merged'
  1404. pr.is_merged = True
  1405. return {}, 200, "", "POST"
  1406. match = re.match(r'.+/api/0/-/whoami$', url)
  1407. if match:
  1408. return {"username": "zuul"}, 200, "", "POST"
  1409. if not params:
  1410. return self.gen_error("POST")
  1411. match = re.match(r'.+/api/0/(.+)/pull-request/(\d+)/flag$', url)
  1412. if match:
  1413. pr = self._get_pr(match)
  1414. params['user'] = {"name": "zuul"}
  1415. pr.insertFlag(params)
  1416. match = re.match(r'.+/api/0/(.+)/pull-request/(\d+)/comment$', url)
  1417. if match:
  1418. pr = self._get_pr(match)
  1419. pr.addComment(params['comment'])
  1420. return {}, 200, "", "POST"
  1421. class FakePagureConnection(pagureconnection.PagureConnection):
  1422. log = logging.getLogger("zuul.test.FakePagureConnection")
  1423. def __init__(self, driver, connection_name, connection_config, rpcclient,
  1424. changes_db=None, upstream_root=None):
  1425. super(FakePagureConnection, self).__init__(driver, connection_name,
  1426. connection_config)
  1427. self.connection_name = connection_name
  1428. self.pr_number = 0
  1429. self.pull_requests = changes_db
  1430. self.statuses = {}
  1431. self.upstream_root = upstream_root
  1432. self.reports = []
  1433. self.rpcclient = rpcclient
  1434. self.cloneurl = self.upstream_root
  1435. def get_project_api_client(self, project):
  1436. client = FakePagureAPIClient(
  1437. self.baseurl, None, project,
  1438. pull_requests_db=self.pull_requests)
  1439. if not self.username:
  1440. self.set_my_username(client)
  1441. return client
  1442. def get_project_webhook_token(self, project):
  1443. return 'fake_webhook_token-%s' % project
  1444. def emitEvent(self, event, use_zuulweb=False, project=None,
  1445. wrong_token=False):
  1446. name, payload = event
  1447. if use_zuulweb:
  1448. if not wrong_token:
  1449. secret = 'fake_webhook_token-%s' % project
  1450. else:
  1451. secret = ''
  1452. payload = json.dumps(payload).encode('utf-8')
  1453. signature, _ = pagureconnection._sign_request(payload, secret)
  1454. headers = {'x-pagure-signature': signature,
  1455. 'x-pagure-project': project}
  1456. return requests.post(
  1457. 'http://127.0.0.1:%s/api/connection/%s/payload'
  1458. % (self.zuul_web_port, self.connection_name),
  1459. data=payload, headers=headers)
  1460. else:
  1461. job = self.rpcclient.submitJob(
  1462. 'pagure:%s:payload' % self.connection_name,
  1463. {'payload': payload})
  1464. return json.loads(job.data[0])
  1465. def openFakePullRequest(self, project, branch, subject, files=[],
  1466. initial_comment=None):
  1467. self.pr_number += 1
  1468. pull_request = FakePagurePullRequest(
  1469. self, self.pr_number, project, branch, subject, self.upstream_root,
  1470. files=files, initial_comment=initial_comment)
  1471. self.pull_requests.setdefault(
  1472. project, {})[str(self.pr_number)] = pull_request
  1473. return pull_request
  1474. def getGitReceiveEvent(self, project):
  1475. name = 'pg_push'
  1476. repo_path = os.path.join(self.upstream_root, project)
  1477. repo = git.Repo(repo_path)
  1478. headsha = repo.head.commit.hexsha
  1479. data = {
  1480. 'msg': {
  1481. 'project_fullname': project,
  1482. 'branch': 'master',
  1483. 'end_commit': headsha,
  1484. 'old_commit': '1' * 40,
  1485. },
  1486. 'msg_id': str(uuid.uuid4()),
  1487. 'timestamp': 1427459070,
  1488. 'topic': 'git.receive',
  1489. }
  1490. return (name, data)
  1491. def getGitTagCreatedEvent(self, project, tag, rev):
  1492. name = 'pg_push'
  1493. data = {
  1494. 'msg': {
  1495. 'project_fullname': project,
  1496. 'tag': tag,
  1497. 'rev': rev
  1498. },
  1499. 'msg_id': str(uuid.uuid4()),
  1500. 'timestamp': 1427459070,
  1501. 'topic': 'git.tag.creation',
  1502. }
  1503. return (name, data)
  1504. def getGitBranchEvent(self, project, branch, type, rev):
  1505. name = 'pg_push'
  1506. data = {
  1507. 'msg': {
  1508. 'project_fullname': project,
  1509. 'branch': branch,
  1510. 'rev': rev,
  1511. },
  1512. 'msg_id': str(uuid.uuid4()),
  1513. 'timestamp': 1427459070,
  1514. 'topic': 'git.branch.%s' % type,
  1515. }
  1516. return (name, data)
  1517. def setZuulWebPort(self, port):
  1518. self.zuul_web_port = port
  1519. class FakeGitlabConnection(gitlabconnection.GitlabConnection):
  1520. log = logging.getLogger("zuul.test.FakeGitlabConnection")
  1521. def __init__(self, driver, connection_name, connection_config, rpcclient,
  1522. changes_db=None, upstream_root=None):
  1523. super(FakeGitlabConnection, self).__init__(driver, connection_name,
  1524. connection_config)
  1525. self.merge_requests = changes_db
  1526. self.gl_client = FakeGitlabAPIClient(
  1527. self.baseurl, self.api_token, merge_requests_db=changes_db)
  1528. self.rpcclient = rpcclient
  1529. self.upstream_root = upstream_root
  1530. self.mr_number = 0
  1531. def getGitUrl(self, project):
  1532. return 'file://' + os.path.join(self.upstream_root, project.name)
  1533. def openFakeMergeRequest(self, project,
  1534. branch, title, description='', files=[]):
  1535. self.mr_number += 1
  1536. merge_request = FakeGitlabMergeRequest(
  1537. self, self.mr_number, project, branch, title, self.upstream_root,
  1538. files=files, description=description)
  1539. self.merge_requests.setdefault(
  1540. project, {})[str(self.mr_number)] = merge_request
  1541. return merge_request
  1542. def emitEvent(self, event, use_zuulweb=False, project=None):
  1543. name, payload = event
  1544. if use_zuulweb:
  1545. payload = json.dumps(payload).encode('utf-8')
  1546. headers = {'x-gitlab-token': self.webhook_token}
  1547. return requests.post(
  1548. 'http://127.0.0.1:%s/api/connection/%s/payload'
  1549. % (self.zuul_web_port, self.connection_name),
  1550. data=payload, headers=headers)
  1551. else:
  1552. job = self.rpcclient.submitJob(
  1553. 'gitlab:%s:payload' % self.connection_name,
  1554. {'payload': payload})
  1555. return json.loads(job.data[0])
  1556. def setZuulWebPort(self, port):
  1557. self.zuul_web_port = port
  1558. def getPushEvent(
  1559. self, project, before=None, after=None,
  1560. branch='refs/heads/master'):
  1561. name = 'gl_push'
  1562. if not after:
  1563. repo_path = os.path.join(self.upstream_root, project)
  1564. repo = git.Repo(repo_path)
  1565. after = repo.head.commit.hexsha
  1566. data = {
  1567. 'object_kind': 'push',
  1568. 'before': before or '1' * 40,
  1569. 'after': after,
  1570. 'ref': branch,
  1571. 'project': {
  1572. 'path_with_namespace': project
  1573. },
  1574. }
  1575. return (name, data)
  1576. def getGitTagEvent(self, project, tag, sha):
  1577. name = 'gl_push'
  1578. data = {
  1579. 'object_kind': 'tag_push',
  1580. 'before': '0' * 40,
  1581. 'after': sha,
  1582. 'ref': 'refs/tags/%s' % tag,
  1583. 'project': {
  1584. 'path_with_namespace': project
  1585. },
  1586. }
  1587. return (name, data)
  1588. @contextmanager
  1589. def enable_community_edition(self):
  1590. self.gl_client.community_edition = True
  1591. yield
  1592. self.gl_client.community_edition = False
  1593. class FakeGitlabAPIClient(gitlabconnection.GitlabAPIClient):
  1594. log = logging.getLogger("zuul.test.FakeGitlabAPIClient")
  1595. def __init__(self, baseurl, api_token, merge_requests_db={}):
  1596. super(FakeGitlabAPIClient, self).__init__(baseurl, api_token)
  1597. self.merge_requests = merge_requests_db
  1598. self.community_edition = False
  1599. def gen_error(self, verb):
  1600. return {
  1601. 'message': 'some error',
  1602. }, 503, "", verb
  1603. def _get_mr(self, match):
  1604. project, number = match.groups()
  1605. project = urllib.parse.unquote(project)
  1606. mr = self.merge_requests.get(project, {}).get(number)
  1607. if not mr:
  1608. return self.gen_error("GET")
  1609. return mr
  1610. def get(self, url, zuul_event_id=None):
  1611. log = get_annotated_logger(self.log, zuul_event_id)
  1612. log.debug("Getting resource %s ..." % url)
  1613. match = re.match(r'.+/projects/(.+)/merge_requests/(\d+)$', url)
  1614. if match:
  1615. mr = self._get_mr(match)
  1616. return {
  1617. 'target_branch': mr.branch,
  1618. 'title': mr.subject,
  1619. 'state': mr.state,
  1620. 'description': mr.description,
  1621. 'author': {
  1622. 'name': 'Administrator',
  1623. 'username': 'admin'
  1624. },
  1625. 'updated_at': mr.updated_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ'),
  1626. 'sha': mr.sha,
  1627. 'labels': mr.labels,
  1628. 'merged_at': mr.merged_at,
  1629. 'diff_refs': {
  1630. 'base_sha': 'c380d3acebd181f13629a25d2e2acca46ffe1e00',
  1631. 'head_sha': '2be7ddb704c7b6b83732fdd5b9f09d5a397b5f8f',
  1632. 'start_sha': 'c380d3acebd181f13629a25d2e2acca46ffe1e00'
  1633. },
  1634. 'merge_status': mr.merge_status,
  1635. }, 200, "", "GET"
  1636. match = re.match('.+/projects/(.+)/repository/branches$', url)
  1637. if match:
  1638. return [{'name': 'master'}], 200, "", "GET"
  1639. match = re.match(
  1640. r'.+/projects/(.+)/merge_requests/(\d+)/approvals$', url)
  1641. if match:
  1642. mr = self._get_mr(match)
  1643. if not self.community_edition:
  1644. return {
  1645. 'approvals_left': 0 if mr.approved else 1,
  1646. }, 200, "", "GET"
  1647. else:
  1648. return {
  1649. 'approved': mr.approved,
  1650. }, 200, "", "GET"
  1651. def post(self, url, params=None, zuul_event_id=None):
  1652. self.log.info(
  1653. "Posting on resource %s, params (%s) ..." % (url, params))
  1654. match = re.match(r'.+/projects/(.+)/merge_requests/(\d+)/notes$', url)
  1655. if match:
  1656. mr = self._get_mr(match)
  1657. mr.addNote(params['body'])
  1658. match = re.match(
  1659. r'.+/projects/(.+)/merge_requests/(\d+)/approve$', url)
  1660. if match:
  1661. assert 'sha' in params
  1662. mr = self._get_mr(match)
  1663. if params['sha'] != mr.sha:
  1664. return {'message': 'SHA does not match HEAD of source '
  1665. 'branch: <new_sha>'}, 409, "", "POST"
  1666. mr.approved = True
  1667. match = re.match(
  1668. r'.+/projects/(.+)/merge_requests/(\d+)/unapprove$', url)
  1669. if match:
  1670. mr = self._get_mr(match)
  1671. mr.approved = False
  1672. return {}, 200, "", "POST"
  1673. def put(self, url, params=None, zuul_event_id=None):
  1674. self.log.info(
  1675. "Put on resource %s, params (%s) ..." % (url, params))
  1676. match = re.match(r'.+/projects/(.+)/merge_requests/(\d+)/merge$', url)
  1677. if match:
  1678. mr = self._get_mr(match)
  1679. mr.mergeMergeRequest()
  1680. return {}, 200, "", "PUT"
  1681. class GitlabChangeReference(git.Reference):
  1682. _common_path_default = "refs/merge-requests"
  1683. _points_to_commits_only = True
  1684. class FakeGitlabMergeRequest(object):
  1685. log = logging.getLogger("zuul.test.FakeGitlabMergeRequest")
  1686. def __init__(self, gitlab, number, project, branch,
  1687. subject, upstream_root, files=[], description=''):
  1688. self.gitlab = gitlab
  1689. self.source = gitlab
  1690. self.number = number
  1691. self.project = project
  1692. self.branch = branch
  1693. self.subject = subject
  1694. self.description = description
  1695. self.upstream_root = upstream_root
  1696. self.number_of_commits = 0
  1697. self.created_at = datetime.datetime.now()
  1698. self.updated_at = self.created_at
  1699. self.merged_at = None
  1700. self.sha = None
  1701. self.state = 'opened'
  1702. self.is_merged = False
  1703. self.merge_status = 'can_be_merged'
  1704. self.labels = []
  1705. self.notes = []
  1706. self.url = "https://%s/%s/merge_requests/%s" % (
  1707. self.gitlab.server, self.project, self.number)
  1708. self.approved = False
  1709. self.mr_ref = self._createMRRef()
  1710. self._addCommitInMR(files=files)
  1711. def _getRepo(self):
  1712. repo_path = os.path.join(self.upstream_root, self.project)
  1713. return git.Repo(repo_path)
  1714. def _createMRRef(self):
  1715. repo = self._getRepo()
  1716. return GitlabChangeReference.create(
  1717. repo, self.getMRReference(), 'refs/tags/init')
  1718. def getMRReference(self):
  1719. return '%s/head' % self.number
  1720. def addNote(self, body):
  1721. self.notes.append(
  1722. {
  1723. "body": body,
  1724. "created_at": datetime.datetime.now(),
  1725. }
  1726. )
  1727. def addCommit(self, files=[]):
  1728. self._addCommitInMR(files=files)
  1729. self._updateTimeStamp()
  1730. def closeMergeRequest(self):
  1731. self.state = 'closed'
  1732. self._updateTimeStamp()
  1733. def mergeMergeRequest(self):
  1734. self.state = 'merged'
  1735. self.is_merged = True
  1736. self._updateTimeStamp()
  1737. self.merged_at = self.updated_at
  1738. def reopenMergeRequest(self):
  1739. self.state = 'opened'
  1740. self._updateTimeStamp()
  1741. self.merged_at = None
  1742. def _addCommitInMR(self, files=[], reset=False):
  1743. repo = self._getRepo()
  1744. ref = repo.references[self.getMRReference()]
  1745. if reset:
  1746. self.number_of_commits = 0
  1747. ref.set_object('refs/tags/init')
  1748. self.number_of_commits += 1
  1749. repo.head.reference = ref
  1750. repo.git.clean('-x', '-f', '-d')
  1751. if files:
  1752. self.files = files
  1753. else:
  1754. fn = '%s-%s' % (self.branch.replace('/', '_'), self.number)
  1755. self.files = {fn: "test %s %s\n" % (self.branch, self.number)}
  1756. msg = self.subject + '-' + str(self.number_of_commits)
  1757. for fn, content in self.files.items():
  1758. fn = os.path.join(repo.working_dir, fn)
  1759. with open(fn, 'w') as f:
  1760. f.write(content)
  1761. repo.index.add([fn])
  1762. self.sha = repo.index.commit(msg).hexsha
  1763. repo.create_head(self.getMRReference(), self.sha, force=True)
  1764. self.mr_ref.set_commit(self.sha)
  1765. repo.head.reference = 'master'
  1766. repo.git.clean('-x', '-f', '-d')
  1767. repo.heads['master'].checkout()
  1768. def _updateTimeStamp(self):
  1769. self.updated_at = datetime.datetime.now()
  1770. def getMergeRequestEvent(self, action, include_labels=False):
  1771. name = 'gl_merge_request'
  1772. data = {
  1773. 'object_kind': 'merge_request',
  1774. 'project': {
  1775. 'path_with_namespace': self.project
  1776. },
  1777. 'object_attributes': {
  1778. 'title': self.subject,
  1779. 'created_at': self.created_at.strftime(
  1780. '%Y-%m-%d %H:%M:%S UTC'),
  1781. 'updated_at': self.updated_at.strftime(
  1782. '%Y-%m-%d %H:%M:%S UTC'),
  1783. 'iid': self.number,
  1784. 'target_branch': self.branch,
  1785. 'last_commit': {'id': self.sha},
  1786. 'action': action
  1787. },
  1788. }
  1789. data['labels'] = [{'title': label} for label in self.labels]
  1790. data['changes'] = {}
  1791. if include_labels:
  1792. data['changes']['labels'] = {
  1793. 'previous': [],
  1794. 'current': data['labels']
  1795. }
  1796. return (name, data)
  1797. def getMergeRequestOpenedEvent(self):
  1798. return self.getMergeRequestEvent(action='open')
  1799. def getMergeRequestUpdatedEvent(self):
  1800. self.addCommit()
  1801. return self.getMergeRequestEvent(action='update')
  1802. def getMergeRequestMergedEvent(self):
  1803. self.mergeMergeRequest()
  1804. return self.getMergeRequestEvent(action='merge')
  1805. def getMergeRequestApprovedEvent(self):
  1806. self.approved = True
  1807. return self.getMergeRequestEvent(action='approved')
  1808. def getMergeRequestUnapprovedEvent(self):
  1809. self.approved = False
  1810. return self.getMergeRequestEvent(action='unapproved')
  1811. def getMergeRequestLabeledEvent(self, labels):
  1812. self.labels = labels
  1813. return self.getMergeRequestEvent(action='update', include_labels=True)
  1814. def getMergeRequestCommentedEvent(self, note):
  1815. self.addNote(note)
  1816. note_date = self.notes[-1]['created_at'].strftime(
  1817. '%Y-%m-%d %H:%M:%S UTC')
  1818. name = 'gl_merge_request'
  1819. data = {
  1820. 'object_kind': 'note',
  1821. 'project': {
  1822. 'path_with_namespace': self.project
  1823. },
  1824. 'merge_request': {
  1825. 'title': self.subject,
  1826. 'iid': self.number,
  1827. 'target_branch': self.branch,
  1828. 'last_commit': {'id': self.sha}
  1829. },
  1830. 'object_attributes': {
  1831. 'created_at': note_date,
  1832. 'updated_at': note_date,
  1833. 'note': self.notes[-1]['body'],
  1834. },
  1835. }
  1836. return (name, data)
  1837. class GithubChangeReference(git.Reference):
  1838. _common_path_default = "refs/pull"
  1839. _points_to_commits_only = True
  1840. class FakeGithubPullRequest(object):
  1841. def __init__(self, github, number, project, branch,
  1842. subject, upstream_root, files=[], number_of_commits=1,
  1843. writers=[], body=None, body_text=None, draft=False,
  1844. base_sha=None):
  1845. """Creates a new PR with several commits.
  1846. Sends an event about opened PR."""
  1847. self.github = github
  1848. self.source = github
  1849. self.number = number
  1850. self.project = project
  1851. self.branch = branch
  1852. self.subject = subject
  1853. self.body = body
  1854. self.body_text = body_text
  1855. self.draft = draft
  1856. self.number_of_commits = 0
  1857. self.upstream_root = upstream_root
  1858. self.files = []
  1859. self.comments = []
  1860. self.labels = []
  1861. self.statuses = {}
  1862. self.reviews = []
  1863. self.writers = []
  1864. self.admins = []
  1865. self.updated_at = None
  1866. self.head_sha = None
  1867. self.is_merged = False
  1868. self.merge_message = None
  1869. self.state = 'open'
  1870. self.url = 'https://%s/%s/pull/%s' % (github.server, project, number)
  1871. self._createPRRef(base_sha=base_sha)
  1872. self._addCommitToRepo(files=files)
  1873. self._updateTimeStamp()
  1874. def addCommit(self, files=[]):
  1875. """Adds a commit on top of the actual PR head."""
  1876. self._addCommitToRepo(files=files)
  1877. self._updateTimeStamp()
  1878. def forcePush(self, files=[]):
  1879. """Clears actual commits and add a commit on top of the base."""
  1880. self._addCommitToRepo(files=files, reset=True)
  1881. self._updateTimeStamp()
  1882. def getPullRequestOpenedEvent(self):
  1883. return self._getPullRequestEvent('opened')
  1884. def getPullRequestSynchronizeEvent(self):
  1885. return self._getPullRequestEvent('synchronize')
  1886. def getPullRequestReopenedEvent(self):
  1887. return self._getPullRequestEvent('reopened')
  1888. def getPullRequestClosedEvent(self):
  1889. return self._getPullRequestEvent('closed')
  1890. def getPullRequestEditedEvent(self):
  1891. return self._getPullRequestEvent('edited')
  1892. def addComment(self, message):
  1893. self.comments.append(message)
  1894. self._updateTimeStamp()
  1895. def getIssueCommentAddedEvent(self, text):
  1896. name = 'issue_comment'
  1897. data = {
  1898. 'action': 'created',
  1899. 'issue': {
  1900. 'number': self.number
  1901. },
  1902. 'comment': {
  1903. 'body': text
  1904. },
  1905. 'repository': {
  1906. 'full_name': self.project
  1907. },
  1908. 'sender': {
  1909. 'login': 'ghuser'
  1910. }
  1911. }
  1912. return (name, data)
  1913. def getCommentAddedEvent(self, text):
  1914. name, data = self.getIssueCommentAddedEvent(text)
  1915. # A PR comment has an additional 'pull_request' key in the issue data
  1916. data['issue']['pull_request'] = {
  1917. 'url': 'http://%s/api/v3/repos/%s/pull/%s' % (
  1918. self.github.server, self.project, self.number)
  1919. }
  1920. return (name, data)
  1921. def getReviewAddedEvent(self, review):
  1922. name = 'pull_request_review'
  1923. data = {
  1924. 'action': 'submitted',
  1925. 'pull_request': {
  1926. 'number': self.number,
  1927. 'title': self.subject,
  1928. 'updated_at': self.updated_at,
  1929. 'base': {
  1930. 'ref': self.branch,
  1931. 'repo': {
  1932. 'full_name': self.project
  1933. }
  1934. },
  1935. 'head': {
  1936. 'sha': self.head_sha
  1937. }
  1938. },
  1939. 'review': {
  1940. 'state': review
  1941. },
  1942. 'repository': {
  1943. 'full_name': self.project
  1944. },
  1945. 'sender': {
  1946. 'login': 'ghuser'
  1947. }
  1948. }
  1949. return (name, data)
  1950. def addLabel(self, name):
  1951. if name not in self.labels:
  1952. self.labels.append(name)
  1953. self._updateTimeStamp()
  1954. return self._getLabelEvent(name)
  1955. def removeLabel(self, name):
  1956. if name in self.labels:
  1957. self.labels.remove(name)
  1958. self._updateTimeStamp()
  1959. return self._getUnlabelEvent(name)
  1960. def _getLabelEvent(self, label):
  1961. name = 'pull_request'
  1962. data = {
  1963. 'action': 'labeled',
  1964. 'pull_request': {
  1965. 'number': self.number,
  1966. 'updated_at': self.updated_at,
  1967. 'base': {
  1968. 'ref': self.branch,
  1969. 'repo': {
  1970. 'full_name': self.project
  1971. }
  1972. },
  1973. 'head': {
  1974. 'sha': self.head_sha
  1975. }
  1976. },
  1977. 'label': {
  1978. 'name': label
  1979. },
  1980. 'sender': {
  1981. 'login': 'ghuser'
  1982. }
  1983. }
  1984. return (name, data)
  1985. def _getUnlabelEvent(self, label):
  1986. name = 'pull_request'
  1987. data = {
  1988. 'action': 'unlabeled',
  1989. 'pull_request': {
  1990. 'number': self.number,
  1991. 'title': self.subject,
  1992. 'updated_at': self.updated_at,
  1993. 'base': {
  1994. 'ref': self.branch,
  1995. 'repo': {
  1996. 'full_name': self.project
  1997. }
  1998. },
  1999. 'head': {
  2000. 'sha': self.head_sha,
  2001. 'repo': {
  2002. 'full_name': self.project
  2003. }
  2004. }
  2005. },
  2006. 'label': {
  2007. 'name': label
  2008. },
  2009. 'sender': {
  2010. 'login': 'ghuser'
  2011. }
  2012. }
  2013. return (name, data)
  2014. def editBody(self, body):
  2015. self.body = body
  2016. self._updateTimeStamp()
  2017. def _getRepo(self):
  2018. repo_path = os.path.join(self.upstream_root, self.project)
  2019. return git.Repo(repo_path)
  2020. def _createPRRef(self, base_sha=None):
  2021. base_sha = base_sha or 'refs/tags/init'
  2022. repo = self._getRepo()
  2023. GithubChangeReference.create(
  2024. repo, self.getPRReference(), base_sha)
  2025. def _addCommitToRepo(self, files=[], reset=False):
  2026. repo = self._getRepo()
  2027. ref = repo.references[self.getPRReference()]
  2028. if reset:
  2029. self.number_of_commits = 0
  2030. ref.set_object('refs/tags/init')
  2031. self.number_of_commits += 1
  2032. repo.head.reference = ref
  2033. zuul.merger.merger.reset_repo_to_head(repo)
  2034. repo.git.clean('-x', '-f', '-d')
  2035. if files:
  2036. self.files = files
  2037. else:
  2038. fn = '%s-%s' % (self.branch.replace('/', '_'), self.number)
  2039. self.files = {fn: "test %s %s\n" % (self.branch, self.number)}
  2040. msg = self.subject + '-' + str(self.number_of_commits)
  2041. for fn, content in self.files.items():
  2042. fn = os.path.join(repo.working_dir, fn)
  2043. with open(fn, 'w') as f:
  2044. f.write(content)
  2045. repo.index.add([fn])
  2046. self.head_sha = repo.index.commit(msg).hexsha
  2047. repo.create_head(self.getPRReference(), self.head_sha, force=True)
  2048. # Create an empty set of statuses for the given sha,
  2049. # each sha on a PR may have a status set on it
  2050. self.statuses[self.head_sha] = []
  2051. repo.head.reference = 'master'
  2052. zuul.merger.merger.reset_repo_to_head(repo)
  2053. repo.git.clean('-x', '-f', '-d')
  2054. repo.heads['master'].checkout()
  2055. def _updateTimeStamp(self):
  2056. self.updated_at = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.localtime())
  2057. def getPRHeadSha(self):
  2058. repo = self._getRepo()
  2059. return repo.references[self.getPRReference()].commit.hexsha
  2060. def addReview(self, user, state, granted_on=None):
  2061. gh_time_format = '%Y-%m-%dT%H:%M:%SZ'
  2062. # convert the timestamp to a str format that would be returned
  2063. # from github as 'submitted_at' in the API response
  2064. if granted_on:
  2065. granted_on = datetime.datetime.utcfromtimestamp(granted_on)
  2066. submitted_at = time.strftime(
  2067. gh_time_format, granted_on.timetuple())
  2068. else:
  2069. # github timestamps only down to the second, so we need to make
  2070. # sure reviews that tests add appear to be added over a period of
  2071. # time in the past and not all at once.
  2072. if not self.reviews:
  2073. # the first review happens 10 mins ago
  2074. offset = 600
  2075. else:
  2076. # subsequent reviews happen 1 minute closer to now
  2077. offset = 600 - (len(self.reviews) * 60)
  2078. granted_on = datetime.datetime.utcfromtimestamp(
  2079. time.time() - offset)
  2080. submitted_at = time.strftime(
  2081. gh_time_format, granted_on.timetuple())
  2082. self.reviews.append(tests.fakegithub.FakeGHReview({
  2083. 'state': state,
  2084. 'user': {
  2085. 'login': user,
  2086. 'email': user + "@example.com",
  2087. },
  2088. 'submitted_at': submitted_at,
  2089. }))
  2090. def getPRReference(self):
  2091. return '%s/head' % self.number
  2092. def _getPullRequestEvent(self, action):
  2093. name = 'pull_request'
  2094. data = {
  2095. 'action': action,
  2096. 'number': self.number,
  2097. 'pull_request': {
  2098. 'number': self.number,
  2099. 'title': self.subject,
  2100. 'updated_at': self.updated_at,
  2101. 'base': {
  2102. 'ref': self.branch,
  2103. 'repo': {
  2104. 'full_name': self.project
  2105. }
  2106. },
  2107. 'head': {
  2108. 'sha': self.head_sha,
  2109. 'repo': {
  2110. 'full_name': self.project
  2111. }
  2112. },
  2113. 'body': self.body
  2114. },
  2115. 'sender': {
  2116. 'login': 'ghuser'
  2117. },
  2118. 'repository': {
  2119. 'full_name': self.project,
  2120. },
  2121. 'installation': {
  2122. 'id': 123,
  2123. },
  2124. 'labels': [{'name': l} for l in self.labels]
  2125. }
  2126. return (name, data)
  2127. def getCommitStatusEvent(self, context, state='success', user='zuul'):
  2128. name = 'status'
  2129. data = {
  2130. 'state': state,
  2131. 'sha': self.head_sha,
  2132. 'name': self.project,
  2133. 'description': 'Test results for %s: %s' % (self.head_sha, state),
  2134. 'target_url': 'http://zuul/%s' % self.head_sha,
  2135. 'branches': [],
  2136. 'context': context,
  2137. 'sender': {
  2138. 'login': user
  2139. }
  2140. }
  2141. return (name, data)
  2142. def getCheckRunRequestedEvent(self, cr_name, app="zuul"):
  2143. name = "check_run"
  2144. data = {
  2145. "action": "rerequested",
  2146. "check_run": {
  2147. "head_sha": self.head_sha,
  2148. "name": cr_name,
  2149. "app": {
  2150. "slug": app,
  2151. },
  2152. },
  2153. "repository": {
  2154. "full_name": self.project,
  2155. },
  2156. }
  2157. return (name, data)
  2158. def getCheckRunAbortEvent(self, check_run):
  2159. # A check run aborted event can only be created from a FakeCheckRun as
  2160. # we need some information like external_id which is "calculated"
  2161. # during the creation of the check run.
  2162. name = "check_run"
  2163. data = {
  2164. "action": "requested_action",
  2165. "requested_action": {
  2166. "identifier": "abort",
  2167. },
  2168. "check_run": {
  2169. "head_sha": self.head_sha,
  2170. "name": check_run["name"],
  2171. "app": {
  2172. "slug": check_run["app"]
  2173. },
  2174. "external_id": check_run["external_id"],
  2175. },
  2176. "repository": {
  2177. "full_name": self.project,
  2178. },
  2179. }
  2180. return (name, data)
  2181. def setMerged(self, commit_message):
  2182. self.is_merged = True
  2183. self.merge_message = commit_message
  2184. repo = self._getRepo()
  2185. repo.heads[self.branch].commit = repo.commit(self.head_sha)
  2186. class FakeGithubClientManager(GithubClientManager):
  2187. github_class = tests.fakegithub.FakeGithubClient
  2188. github_enterprise_class = tests.fakegithub.FakeGithubEnterpriseClient
  2189. def __init__(self, connection_config):
  2190. super().__init__(connection_config)
  2191. self.record_clients = False
  2192. self.recorded_clients = []
  2193. self.github_data = None
  2194. def getGithubClient(self,
  2195. project_name=None,
  2196. zuul_event_id=None):
  2197. client = super().getGithubClient(
  2198. project_name=project_name,
  2199. zuul_event_id=zuul_event_id)
  2200. # Some tests expect the installation id as part of the
  2201. if self.app_id:
  2202. inst_id = self.installation_map.get(project_name)
  2203. client.setInstId(inst_id)
  2204. # The super method creates a fake github client with empty data so
  2205. # add it here.
  2206. client.setData(self.github_data)
  2207. if self.record_clients:
  2208. self.recorded_clients.append(client)
  2209. return client
  2210. def _prime_installation_map(self):
  2211. if not self.app_id:
  2212. return
  2213. # simulate one installation per org
  2214. orgs = {}
  2215. latest_inst_id = 0
  2216. for repo in self.github_data.repos:
  2217. inst_id = orgs.get(repo[0])
  2218. if not inst_id:
  2219. latest_inst_id += 1
  2220. inst_id = latest_inst_id
  2221. orgs[repo[0]] = inst_id
  2222. self.installation_map['/'.join(repo)] = inst_id
  2223. class FakeGithubConnection(githubconnection.GithubConnection):
  2224. log = logging.getLogger("zuul.test.FakeGithubConnection")
  2225. client_manager_class = FakeGithubClientManager
  2226. def __init__(self, driver, connection_name, connection_config, rpcclient,
  2227. changes_db=None, upstream_root=None, git_url_with_auth=False):
  2228. super(FakeGithubConnection, self).__init__(driver, connection_name,
  2229. connection_config)
  2230. self.connection_name = connection_name
  2231. self.pr_number = 0
  2232. self.pull_requests = changes_db
  2233. self.statuses = {}
  2234. self.upstream_root = upstream_root
  2235. self.merge_failure = False
  2236. self.merge_not_allowed_count = 0
  2237. self.github_data = tests.fakegithub.FakeGithubData(changes_db)
  2238. self._github_client_manager.github_data = self.github_data
  2239. self.git_url_with_auth = git_url_with_auth
  2240. self.rpcclient = rpcclient
  2241. def setZuulWebPort(self, port):
  2242. self.zuul_web_port = port
  2243. def openFakePullRequest(self, project, branch, subject, files=[],
  2244. body=None, body_text=None, draft=False,
  2245. base_sha=None):
  2246. self.pr_number += 1
  2247. pull_request = FakeGithubPullRequest(
  2248. self, self.pr_number, project, branch, subject, self.upstream_root,
  2249. files=files, body=body, body_text=body_text, draft=draft,
  2250. base_sha=base_sha)
  2251. self.pull_requests[self.pr_number] = pull_request
  2252. return pull_request
  2253. def getPushEvent(self, project, ref, old_rev=None, new_rev=None,
  2254. added_files=None, removed_files=None,
  2255. modified_files=None):
  2256. if added_files is None:
  2257. added_files = []
  2258. if removed_files is None:
  2259. removed_files = []
  2260. if modified_files is None:
  2261. modified_files = []
  2262. if not old_rev:
  2263. old_rev = '0' * 40
  2264. if not new_rev:
  2265. new_rev = random_sha1()
  2266. name = 'push'
  2267. data = {
  2268. 'ref': ref,
  2269. 'before': old_rev,
  2270. 'after': new_rev,
  2271. 'repository': {
  2272. 'full_name': project
  2273. },
  2274. 'commits': [
  2275. {
  2276. 'added': added_files,
  2277. 'removed': removed_files,
  2278. 'modified': modified_files
  2279. }
  2280. ]
  2281. }
  2282. return (name, data)
  2283. def emitEvent(self, event, use_zuulweb=False):
  2284. """Emulates sending the GitHub webhook event to the connection."""
  2285. name, data = event
  2286. payload = json.dumps(data).encode('utf8')
  2287. secret = self.connection_config['webhook_token']
  2288. signature = githubconnection._sign_request(payload, secret)
  2289. headers = {'x-github-event': name,
  2290. 'x-hub-signature': signature,
  2291. 'x-github-delivery': str(uuid.uuid4())}
  2292. if use_zuulweb:
  2293. return requests.post(
  2294. 'http://127.0.0.1:%s/api/connection/%s/payload'
  2295. % (self.zuul_web_port, self.connection_name),
  2296. json=data, headers=headers)
  2297. else:
  2298. job = self.rpcclient.submitJob(
  2299. 'github:%s:payload' % self.connection_name,
  2300. {'headers': headers, 'body': data})
  2301. return json.loads(job.data[0])
  2302. def addProject(self, project):
  2303. # use the original method here and additionally register it in the
  2304. # fake github
  2305. super(FakeGithubConnection, self).addProject(project)
  2306. self.getGithubClient(project.name).addProject(project)
  2307. def getGitUrl(self, project):
  2308. if self.git_url_with_auth:
  2309. auth_token = ''.join(
  2310. random.choice(string.ascii_lowercase) for x in range(8))
  2311. prefix = 'file://x-access-token:%s@' % auth_token
  2312. else:
  2313. prefix = ''
  2314. return prefix + os.path.join(self.upstream_root, str(project))
  2315. def real_getGitUrl(self, project):
  2316. return super(FakeGithubConnection, self).getGitUrl(project)
  2317. def setCommitStatus(self, project, sha, state, url='', description='',
  2318. context='default', user='zuul', zuul_event_id=None):
  2319. # record that this got reported and call original method
  2320. self.github_data.reports.append(
  2321. (project, sha, 'status', (user, context, state)))
  2322. super(FakeGithubConnection, self).setCommitStatus(
  2323. project, sha, state,
  2324. url=url, description=description, context=context)
  2325. def labelPull(self, project, pr_number, label, zuul_event_id=None):
  2326. # record that this got reported
  2327. self.github_data.reports.append((project, pr_number, 'label', label))
  2328. pull_request = self.pull_requests[int(pr_number)]
  2329. pull_request.addLabel(label)
  2330. def unlabelPull(self, project, pr_number, label, zuul_event_id=None):
  2331. # record that this got reported
  2332. self.github_data.reports.append((project, pr_number, 'unlabel', label))
  2333. pull_request = self.pull_requests[pr_number]
  2334. pull_request.removeLabel(label)
  2335. class BuildHistory(object):
  2336. def __init__(self, **kw):
  2337. self.__dict__.update(kw)
  2338. def __repr__(self):
  2339. return ("<Completed build, result: %s name: %s uuid: %s "
  2340. "changes: %s ref: %s>" %
  2341. (self.result, self.name, self.uuid,
  2342. self.changes, self.ref))
  2343. class FakeStatsd(threading.Thread):
  2344. log = logging.getLogger("zuul.test.FakeStatsd")
  2345. def __init__(self):
  2346. threading.Thread.__init__(self)
  2347. self.daemon = True
  2348. self.sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
  2349. self.sock.bind(('', 0))
  2350. self.port = self.sock.getsockname()[1]
  2351. self.wake_read, self.wake_write = os.pipe()
  2352. self.stats = []
  2353. def run(self):
  2354. while True:
  2355. poll = select.poll()
  2356. poll.register(self.sock, select.POLLIN)
  2357. poll.register(self.wake_read, select.POLLIN)
  2358. ret = poll.poll()
  2359. for (fd, event) in ret:
  2360. if fd == self.sock.fileno():
  2361. data = self.sock.recvfrom(1024)
  2362. if not data:
  2363. return
  2364. self.log.debug("Appending: %s" % data[0])
  2365. self.stats.append(data[0])
  2366. if fd == self.wake_read:
  2367. return
  2368. def stop(self):
  2369. os.write(self.wake_write, b'1\n')
  2370. class FakeBuild(object):
  2371. log = logging.getLogger("zuul.test")
  2372. def __init__(self, executor_server, job):
  2373. self.daemon = True
  2374. self.executor_server = executor_server
  2375. self.job = job
  2376. self.jobdir = None
  2377. self.uuid = job.unique
  2378. self.parameters = json.loads(job.arguments)
  2379. # TODOv3(jeblair): self.node is really "the label of the node
  2380. # assigned". We should rename it (self.node_label?) if we
  2381. # keep using it like this, or we may end up exposing more of
  2382. # the complexity around multi-node jobs here
  2383. # (self.nodes[0].label?)
  2384. self.node = None
  2385. if len(self.parameters.get('nodes')) == 1:
  2386. self.node = self.parameters['nodes'][0]['label']
  2387. self.unique = self.parameters['zuul']['build']
  2388. self.pipeline = self.parameters['zuul']['pipeline']
  2389. self.project = self.parameters['zuul']['project']['name']
  2390. self.name = self.parameters['job']
  2391. self.wait_condition = threading.Condition()
  2392. self.waiting = False
  2393. self.paused = False
  2394. self.aborted = False
  2395. self.requeue = False
  2396. self.created = time.time()
  2397. self.changes = None
  2398. items = self.parameters['zuul']['items']
  2399. self.changes = ' '.join(['%s,%s' % (x['change'], x['patchset'])
  2400. for x in items if 'change' in x])
  2401. if 'change' in items[-1]:
  2402. self.change = ' '.join((items[-1]['change'],
  2403. items[-1]['patchset']))
  2404. else:
  2405. self.change = None
  2406. def __repr__(self):
  2407. waiting = ''
  2408. if self.waiting:
  2409. waiting = ' [waiting]'
  2410. return '<FakeBuild %s:%s %s%s>' % (self.pipeline, self.name,
  2411. self.changes, waiting)
  2412. def release(self):
  2413. """Release this build."""
  2414. self.wait_condition.acquire()
  2415. self.wait_condition.notify()
  2416. self.waiting = False
  2417. self.log.debug("Build %s released" % self.unique)
  2418. self.wait_condition.release()
  2419. def isWaiting(self):
  2420. """Return whether this build is being held.
  2421. :returns: Whether the build is being held.
  2422. :rtype: bool
  2423. """
  2424. self.wait_condition.acquire()
  2425. if self.waiting:
  2426. ret = True
  2427. else:
  2428. ret = False
  2429. self.wait_condition.release()
  2430. return ret
  2431. def _wait(self):
  2432. self.wait_condition.acquire()
  2433. self.waiting = True
  2434. self.log.debug("Build %s waiting" % self.unique)
  2435. self.wait_condition.wait()
  2436. self.wait_condition.release()
  2437. def run(self):
  2438. self.log.debug('Running build %s' % self.unique)
  2439. if self.executor_server.hold_jobs_in_build:
  2440. self.log.debug('Holding build %s' % self.unique)
  2441. self._wait()
  2442. self.log.debug("Build %s continuing" % self.unique)
  2443. self.writeReturnData()
  2444. result = (RecordingAnsibleJob.RESULT_NORMAL, 0) # Success
  2445. if self.shouldFail():
  2446. result = (RecordingAnsibleJob.RESULT_NORMAL, 1) # Failure
  2447. if self.aborted:
  2448. result = (RecordingAnsibleJob.RESULT_ABORTED, None)
  2449. if self.requeue:
  2450. result = (RecordingAnsibleJob.RESULT_UNREACHABLE, None)
  2451. return result
  2452. def shouldFail(self):
  2453. changes = self.executor_server.fail_tests.get(self.name, [])
  2454. for change in changes:
  2455. if self.hasChanges(change):
  2456. return True
  2457. return False
  2458. def writeReturnData(self):
  2459. changes = self.executor_server.return_data.get(self.name, {})
  2460. data = changes.get(self.change)
  2461. if data is None:
  2462. return
  2463. with open(self.jobdir.result_data_file, 'w') as f:
  2464. f.write(json.dumps(data))
  2465. def hasChanges(self, *changes):
  2466. """Return whether this build has certain changes in its git repos.
  2467. :arg FakeChange changes: One or more changes (varargs) that
  2468. are expected to be present (in order) in the git repository of
  2469. the active project.
  2470. :returns: Whether the build has the indicated changes.
  2471. :rtype: bool
  2472. """
  2473. for change in changes:
  2474. hostname = change.source.canonical_hostname
  2475. path = os.path.join(self.jobdir.src_root, hostname, change.project)
  2476. try:
  2477. repo = git.Repo(path)
  2478. except NoSuchPathError as e:
  2479. self.log.debug('%s' % e)
  2480. return False
  2481. repo_messages = [c.message.strip() for c in repo.iter_commits()]
  2482. commit_message = '%s-1' % change.subject
  2483. self.log.debug("Checking if build %s has changes; commit_message "
  2484. "%s; repo_messages %s" % (self, commit_message,
  2485. repo_messages))
  2486. if commit_message not in repo_messages:
  2487. self.log.debug(" messages do not match")
  2488. return False
  2489. self.log.debug(" OK")
  2490. return True
  2491. def getWorkspaceRepos(self, projects):
  2492. """Return workspace git repo objects for the listed projects
  2493. :arg list projects: A list of strings, each the canonical name
  2494. of a project.
  2495. :returns: A dictionary of {name: repo} for every listed
  2496. project.
  2497. :rtype: dict
  2498. """
  2499. repos = {}
  2500. for project in projects:
  2501. path = os.path.join(self.jobdir.src_root, project)
  2502. repo = git.Repo(path)
  2503. repos[project] = repo
  2504. return repos
  2505. class RecordingAnsibleJob(zuul.executor.server.AnsibleJob):
  2506. result = None
  2507. def doMergeChanges(self, merger, items, repo_state):
  2508. # Get a merger in order to update the repos involved in this job.
  2509. commit = super(RecordingAnsibleJob, self).doMergeChanges(
  2510. merger, items, repo_state)
  2511. if not commit: # merge conflict
  2512. self.recordResult('MERGER_FAILURE')
  2513. for _ in iterate_timeout(60, 'wait for merge'):
  2514. if not self.executor_server.hold_jobs_in_start:
  2515. break
  2516. time.sleep(1)
  2517. return commit
  2518. def recordResult(self, result):
  2519. self.executor_server.lock.acquire()
  2520. build = self.executor_server.job_builds.get(self.job.unique)
  2521. if not build:
  2522. self.executor_server.lock.release()
  2523. # Already recorded
  2524. return
  2525. self.executor_server.build_history.append(
  2526. BuildHistory(name=build.name, result=result, changes=build.changes,
  2527. node=build.node, uuid=build.unique,
  2528. ref=build.parameters['zuul']['ref'],
  2529. newrev=build.parameters['zuul'].get('newrev'),
  2530. parameters=build.parameters, jobdir=build.jobdir,
  2531. pipeline=build.parameters['zuul']['pipeline'])
  2532. )
  2533. self.executor_server.running_builds.remove(build)
  2534. del self.executor_server.job_builds[self.job.unique]
  2535. self.executor_server.lock.release()
  2536. def runPlaybooks(self, args):
  2537. build = self.executor_server.job_builds[self.job.unique]
  2538. build.jobdir = self.jobdir
  2539. self.result = super(RecordingAnsibleJob, self).runPlaybooks(args)
  2540. if self.result is None:
  2541. # Record result now because cleanup won't be performed
  2542. self.recordResult(None)
  2543. return self.result
  2544. def runCleanupPlaybooks(self, success):
  2545. super(RecordingAnsibleJob, self).runCleanupPlaybooks(success)
  2546. if self.result is not None:
  2547. self.recordResult(self.result)
  2548. def runAnsible(self, cmd, timeout, playbook, ansible_version,
  2549. wrapped=True, cleanup=False):
  2550. build = self.executor_server.job_builds[self.job.unique]
  2551. if self.executor_server._run_ansible:
  2552. # Call run on the fake build omitting the result so we also can
  2553. # hold real ansible jobs.
  2554. if playbook.path:
  2555. build.run()
  2556. result = super(RecordingAnsibleJob, self).runAnsible(
  2557. cmd, timeout, playbook, ansible_version, wrapped, cleanup)
  2558. else:
  2559. if playbook.path:
  2560. result = build.run()
  2561. else:
  2562. result = (self.RESULT_NORMAL, 0)
  2563. return result
  2564. def getHostList(self, args):
  2565. self.log.debug("hostlist")
  2566. hosts = super(RecordingAnsibleJob, self).getHostList(args)
  2567. for host in hosts:
  2568. if not host['host_vars'].get('ansible_connection'):
  2569. host['host_vars']['ansible_connection'] = 'local'
  2570. return hosts
  2571. def pause(self):
  2572. build = self.executor_server.job_builds[self.job.unique]
  2573. build.paused = True
  2574. super().pause()
  2575. def resume(self):
  2576. build = self.executor_server.job_builds.get(self.job.unique)
  2577. if build:
  2578. build.paused = False
  2579. super().resume()
  2580. def _send_aborted(self):
  2581. self.recordResult('ABORTED')
  2582. super()._send_aborted()
  2583. class RecordingMergeClient(zuul.merger.client.MergeClient):
  2584. def __init__(self, config, sched):
  2585. super().__init__(config, sched)
  2586. self.history = {}
  2587. def submitJob(self, name, data, build_set,
  2588. precedence=zuul.model.PRECEDENCE_NORMAL, event=None):
  2589. self.history.setdefault(name, [])
  2590. self.history[name].append((data, build_set))
  2591. return super().submitJob(
  2592. name, data, build_set, precedence, event=event)
  2593. class RecordingExecutorServer(zuul.executor.server.ExecutorServer):
  2594. """An Ansible executor to be used in tests.
  2595. :ivar bool hold_jobs_in_build: If true, when jobs are executed
  2596. they will report that they have started but then pause until
  2597. released before reporting completion. This attribute may be
  2598. changed at any time and will take effect for subsequently
  2599. executed builds, but previously held builds will still need to
  2600. be explicitly released.
  2601. """
  2602. _job_class = RecordingAnsibleJob
  2603. def __init__(self, *args, **kw):
  2604. self._run_ansible = kw.pop('_run_ansible', False)
  2605. self._test_root = kw.pop('_test_root', False)
  2606. if self._run_ansible:
  2607. self._ansible_manager_class = zuul.lib.ansible.AnsibleManager
  2608. else:
  2609. self._ansible_manager_class = FakeAnsibleManager
  2610. super(RecordingExecutorServer, self).__init__(*args, **kw)
  2611. self.hold_jobs_in_build = False
  2612. self.hold_jobs_in_start = False
  2613. self.lock = threading.Lock()
  2614. self.running_builds = []
  2615. self.build_history = []
  2616. self.fail_tests = {}
  2617. self.return_data = {}
  2618. self.job_builds = {}
  2619. def failJob(self, name, change):
  2620. """Instruct the executor to report matching builds as failures.
  2621. :arg str name: The name of the job to fail.
  2622. :arg Change change: The :py:class:`~tests.base.FakeChange`
  2623. instance which should cause the job to fail. This job
  2624. will also fail for changes depending on this change.
  2625. """
  2626. l = self.fail_tests.get(name, [])
  2627. l.append(change)
  2628. self.fail_tests[name] = l
  2629. def returnData(self, name, change, data):
  2630. """Instruct the executor to return data for this build.
  2631. :arg str name: The name of the job to return data.
  2632. :arg Change change: The :py:class:`~tests.base.FakeChange`
  2633. instance which should cause the job to return data.
  2634. :arg dict data: The data to return
  2635. """
  2636. # TODO(clarkb) We are incredibly change focused here and in FakeBuild
  2637. # above. This makes it very difficult to test non change items with
  2638. # return data. We currently rely on the hack that None is used as a
  2639. # key for the changes dict, but we should improve that to look up
  2640. # refnames or similar.
  2641. changes = self.return_data.setdefault(name, {})
  2642. if hasattr(change, 'number'):
  2643. cid = ' '.join((str(change.number), str(change.latest_patchset)))
  2644. else:
  2645. # Not actually a change, but a ref update event for tags/etc
  2646. # In this case a key of None is used by writeReturnData
  2647. cid = None
  2648. changes[cid] = data
  2649. def release(self, regex=None):
  2650. """Release a held build.
  2651. :arg str regex: A regular expression which, if supplied, will
  2652. cause only builds with matching names to be released. If
  2653. not supplied, all builds will be released.
  2654. """
  2655. builds = self.running_builds[:]
  2656. if len(builds) == 0:
  2657. self.log.debug('No running builds to release')
  2658. return
  2659. self.log.debug("Releasing build %s (%s)" % (regex, len(builds)))
  2660. for build in builds:
  2661. if not regex or re.match(regex, build.name):
  2662. self.log.debug("Releasing build %s" %
  2663. (build.parameters['zuul']['build']))
  2664. build.release()
  2665. else:
  2666. self.log.debug("Not releasing build %s" %
  2667. (build.parameters['zuul']['build']))
  2668. self.log.debug("Done releasing builds %s (%s)" %
  2669. (regex, len(builds)))
  2670. def executeJob(self, job):
  2671. build = FakeBuild(self, job)
  2672. job.build = build
  2673. self.running_builds.append(build)
  2674. self.job_builds[job.unique] = build
  2675. args = json.loads(job.arguments)
  2676. args['zuul']['_test'] = dict(test_root=self._test_root)
  2677. job.arguments = json.dumps(args)
  2678. super(RecordingExecutorServer, self).executeJob(job)
  2679. def stopJob(self, job):
  2680. self.log.debug("handle stop")
  2681. parameters = json.loads(job.arguments)
  2682. uuid = parameters['uuid']
  2683. for build in self.running_builds:
  2684. if build.unique == uuid:
  2685. build.aborted = True
  2686. build.release()
  2687. super(RecordingExecutorServer, self).stopJob(job)
  2688. def stop(self):
  2689. for build in self.running_builds:
  2690. build.release()
  2691. super(RecordingExecutorServer, self).stop()
  2692. class FakeGearmanServer(gear.Server):
  2693. """A Gearman server for use in tests.
  2694. :ivar bool hold_jobs_in_queue: If true, submitted jobs will be
  2695. added to the queue but will not be distributed to workers
  2696. until released. This attribute may be changed at any time and
  2697. will take effect for subsequently enqueued jobs, but
  2698. previously held jobs will still need to be explicitly
  2699. released.
  2700. """
  2701. def __init__(self, use_ssl=False):
  2702. self.hold_jobs_in_queue = False
  2703. self.hold_merge_jobs_in_queue = False
  2704. self.jobs_history = []
  2705. if use_ssl:
  2706. ssl_ca = os.path.join(FIXTURE_DIR, 'gearman/root-ca.pem')
  2707. ssl_cert = os.path.join(FIXTURE_DIR, 'gearman/server.pem')
  2708. ssl_key = os.path.join(FIXTURE_DIR, 'gearman/server.key')
  2709. else:
  2710. ssl_ca = None
  2711. ssl_cert = None
  2712. ssl_key = None
  2713. super(FakeGearmanServer, self).__init__(0, ssl_key=ssl_key,
  2714. ssl_cert=ssl_cert,
  2715. ssl_ca=ssl_ca)
  2716. def getJobForConnection(self, connection, peek=False):
  2717. for job_queue in [self.high_queue, self.normal_queue, self.low_queue]:
  2718. for job in job_queue:
  2719. self.jobs_history.append(job)
  2720. if not hasattr(job, 'waiting'):
  2721. if job.name.startswith(b'executor:execute'):
  2722. job.waiting = self.hold_jobs_in_queue
  2723. elif job.name.startswith(b'merger:'):
  2724. job.waiting = self.hold_merge_jobs_in_queue
  2725. else:
  2726. job.waiting = False
  2727. if job.waiting:
  2728. continue
  2729. if job.name in connection.functions:
  2730. if not peek:
  2731. job_queue.remove(job)
  2732. connection.related_jobs[job.handle] = job
  2733. job.worker_connection = connection
  2734. job.running = True
  2735. return job
  2736. return None
  2737. def release(self, regex=None):
  2738. """Release a held job.
  2739. :arg str regex: A regular expression which, if supplied, will
  2740. cause only jobs with matching names to be released. If
  2741. not supplied, all jobs will be released.
  2742. """
  2743. released = False
  2744. qlen = (len(self.high_queue) + len(self.normal_queue) +
  2745. len(self.low_queue))
  2746. self.log.debug("releasing queued job %s (%s)" % (regex, qlen))
  2747. for job in self.getQueue():
  2748. match = False
  2749. if job.name.startswith(b'executor:execute'):
  2750. parameters = json.loads(job.arguments.decode('utf8'))
  2751. if not regex or re.match(regex, parameters.get('job')):
  2752. match = True
  2753. if job.name.startswith(b'merger:'):
  2754. if not regex:
  2755. match = True
  2756. if match:
  2757. self.log.debug("releasing queued job %s" %
  2758. job.unique)
  2759. job.waiting = False
  2760. released = True
  2761. else:
  2762. self.log.debug("not releasing queued job %s" %
  2763. job.unique)
  2764. if released:
  2765. self.wakeConnections()
  2766. qlen = (len(self.high_queue) + len(self.normal_queue) +
  2767. len(self.low_queue))
  2768. self.log.debug("done releasing queued jobs %s (%s)" % (regex, qlen))
  2769. class FakeSMTP(object):
  2770. log = logging.getLogger('zuul.FakeSMTP')
  2771. def __init__(self, messages, server, port):
  2772. self.server = server
  2773. self.port = port
  2774. self.messages = messages
  2775. def sendmail(self, from_email, to_email, msg):
  2776. self.log.info("Sending email from %s, to %s, with msg %s" % (
  2777. from_email, to_email, msg))
  2778. headers = msg.split('\n\n', 1)[0]
  2779. body = msg.split('\n\n', 1)[1]
  2780. self.messages.append(dict(
  2781. from_email=from_email,
  2782. to_email=to_email,
  2783. msg=msg,
  2784. headers=headers,
  2785. body=body,
  2786. ))
  2787. return True
  2788. def quit(self):
  2789. return True
  2790. class FakeNodepool(object):
  2791. REQUEST_ROOT = '/nodepool/requests'
  2792. NODE_ROOT = '/nodepool/nodes'
  2793. LAUNCHER_ROOT = '/nodepool/launchers'
  2794. log = logging.getLogger("zuul.test.FakeNodepool")
  2795. def __init__(self, host, port, chroot):
  2796. self.complete_event = threading.Event()
  2797. self.host_keys = None
  2798. self.client = kazoo.client.KazooClient(
  2799. hosts='%s:%s%s' % (host, port, chroot))
  2800. self.client.start()
  2801. self.registerLauncher()
  2802. self._running = True
  2803. self.paused = False
  2804. self.thread = threading.Thread(target=self.run)
  2805. self.thread.daemon = True
  2806. self.thread.start()
  2807. self.fail_requests = set()
  2808. self.remote_ansible = False
  2809. self.attributes = None
  2810. self.resources = None
  2811. self.python_path = 'auto'
  2812. def stop(self):
  2813. self._running = False
  2814. self.thread.join()
  2815. self.client.stop()
  2816. self.client.close()
  2817. def pause(self):
  2818. self.complete_event.wait()
  2819. self.paused = True
  2820. def unpause(self):
  2821. self.paused = False
  2822. def run(self):
  2823. while self._running:
  2824. self.complete_event.clear()
  2825. try:
  2826. self._run()
  2827. except Exception:
  2828. self.log.exception("Error in fake nodepool:")
  2829. self.complete_event.set()
  2830. time.sleep(0.1)
  2831. def _run(self):
  2832. if self.paused:
  2833. return
  2834. for req in self.getNodeRequests():
  2835. self.fulfillRequest(req)
  2836. def registerLauncher(self, labels=["label1"], id="FakeLauncher"):
  2837. path = os.path.join(self.LAUNCHER_ROOT, id)
  2838. data = {'id': id, 'supported_labels': labels}
  2839. self.client.create(
  2840. path, json.dumps(data).encode('utf8'), makepath=True)
  2841. def getNodeRequests(self):
  2842. try:
  2843. reqids = self.client.get_children(self.REQUEST_ROOT)
  2844. except kazoo.exceptions.NoNodeError:
  2845. return []
  2846. reqs = []
  2847. for oid in reqids:
  2848. path = self.REQUEST_ROOT + '/' + oid
  2849. try:
  2850. data, stat = self.client.get(path)
  2851. data = json.loads(data.decode('utf8'))
  2852. data['_oid'] = oid
  2853. reqs.append(data)
  2854. except kazoo.exceptions.NoNodeError:
  2855. pass
  2856. reqs.sort(key=lambda r: (r['_oid'].split('-')[0],
  2857. r['relative_priority'],
  2858. r['_oid'].split('-')[1]))
  2859. return reqs
  2860. def getNodes(self):
  2861. try:
  2862. nodeids = self.client.get_children(self.NODE_ROOT)
  2863. except kazoo.exceptions.NoNodeError:
  2864. return []
  2865. nodes = []
  2866. for oid in sorted(nodeids):
  2867. path = self.NODE_ROOT + '/' + oid
  2868. data, stat = self.client.get(path)
  2869. data = json.loads(data.decode('utf8'))
  2870. data['_oid'] = oid
  2871. try:
  2872. lockfiles = self.client.get_children(path + '/lock')
  2873. except kazoo.exceptions.NoNodeError:
  2874. lockfiles = []
  2875. if lockfiles:
  2876. data['_lock'] = True
  2877. else:
  2878. data['_lock'] = False
  2879. nodes.append(data)
  2880. return nodes
  2881. def makeNode(self, request_id, node_type):
  2882. now = time.time()
  2883. path = '/nodepool/nodes/'
  2884. remote_ip = os.environ.get('ZUUL_REMOTE_IPV4', '127.0.0.1')
  2885. if self.remote_ansible and not self.host_keys:
  2886. self.host_keys = self.keyscan(remote_ip)
  2887. host_keys = self.host_keys or ["fake-key1", "fake-key2"]
  2888. data = dict(type=node_type,
  2889. cloud='test-cloud',
  2890. provider='test-provider',
  2891. region='test-region',
  2892. az='test-az',
  2893. attributes=self.attributes,
  2894. host_id='test-host-id',
  2895. interface_ip=remote_ip,
  2896. public_ipv4=remote_ip,
  2897. private_ipv4=None,
  2898. public_ipv6=None,
  2899. python_path=self.python_path,
  2900. allocated_to=request_id,
  2901. state='ready',
  2902. state_time=now,
  2903. created_time=now,
  2904. updated_time=now,
  2905. image_id=None,
  2906. host_keys=host_keys,
  2907. executor='fake-nodepool',
  2908. hold_expiration=None)
  2909. if self.resources:
  2910. data['resources'] = self.resources
  2911. if self.remote_ansible:
  2912. data['connection_type'] = 'ssh'
  2913. if 'fakeuser' in node_type:
  2914. data['username'] = 'fakeuser'
  2915. if 'windows' in node_type:
  2916. data['connection_type'] = 'winrm'
  2917. if 'network' in node_type:
  2918. data['connection_type'] = 'network_cli'
  2919. if 'kubernetes-namespace' in node_type or 'fedora-pod' in node_type:
  2920. data['connection_type'] = 'namespace'
  2921. data['connection_port'] = {
  2922. 'name': 'zuul-ci',
  2923. 'namespace': 'zuul-ci-abcdefg',
  2924. 'host': 'localhost',
  2925. 'skiptls': True,
  2926. 'token': 'FakeToken',
  2927. 'ca_crt': 'FakeCA',
  2928. 'user': 'zuul-worker',
  2929. }
  2930. if 'fedora-pod' in node_type:
  2931. data['connection_type'] = 'kubectl'
  2932. data['connection_port']['pod'] = 'fedora-abcdefg'
  2933. data = json.dumps(data).encode('utf8')
  2934. path = self.client.create(path, data,
  2935. makepath=True,
  2936. sequence=True)
  2937. nodeid = path.split("/")[-1]
  2938. return nodeid
  2939. def removeNode(self, node):
  2940. path = self.NODE_ROOT + '/' + node["_oid"]
  2941. self.client.delete(path, recursive=True)
  2942. def addFailRequest(self, request):
  2943. self.fail_requests.add(request['_oid'])
  2944. def fulfillRequest(self, request):
  2945. if request['state'] != 'requested':
  2946. return
  2947. request = request.copy()
  2948. oid = request['_oid']
  2949. del request['_oid']
  2950. if oid in self.fail_requests:
  2951. request['state'] = 'failed'
  2952. else:
  2953. request['state'] = 'fulfilled'
  2954. nodes = []
  2955. for node in request['node_types']:
  2956. nodeid = self.makeNode(oid, node)
  2957. nodes.append(nodeid)
  2958. request['nodes'] = nodes
  2959. request['state_time'] = time.time()
  2960. path = self.REQUEST_ROOT + '/' + oid
  2961. data = json.dumps(request).encode('utf8')
  2962. self.log.debug("Fulfilling node request: %s %s" % (oid, data))
  2963. try:
  2964. self.client.set(path, data)
  2965. except kazoo.exceptions.NoNodeError:
  2966. self.log.debug("Node request %s %s disappeared" % (oid, data))
  2967. def keyscan(self, ip, port=22, timeout=60):
  2968. '''
  2969. Scan the IP address for public SSH keys.
  2970. Keys are returned formatted as: "<type> <base64_string>"
  2971. '''
  2972. addrinfo = socket.getaddrinfo(ip, port)[0]
  2973. family = addrinfo[0]
  2974. sockaddr = addrinfo[4]
  2975. keys = []
  2976. key = None
  2977. for count in iterate_timeout(timeout, "ssh access"):
  2978. sock = None
  2979. t = None
  2980. try:
  2981. sock = socket.socket(family, socket.SOCK_STREAM)
  2982. sock.settimeout(timeout)
  2983. sock.connect(sockaddr)
  2984. t = paramiko.transport.Transport(sock)
  2985. t.start_client(timeout=timeout)
  2986. key = t.get_remote_server_key()
  2987. break
  2988. except socket.error as e:
  2989. if e.errno not in [
  2990. errno.ECONNREFUSED, errno.EHOSTUNREACH, None]:
  2991. self.log.exception(
  2992. 'Exception with ssh access to %s:' % ip)
  2993. except Exception as e:
  2994. self.log.exception("ssh-keyscan failure: %s", e)
  2995. finally:
  2996. try:
  2997. if t:
  2998. t.close()
  2999. except Exception as e:
  3000. self.log.exception('Exception closing paramiko: %s', e)
  3001. try:
  3002. if sock:
  3003. sock.close()
  3004. except Exception as e:
  3005. self.log.exception('Exception closing socket: %s', e)
  3006. # Paramiko, at this time, seems to return only the ssh-rsa key, so
  3007. # only the single key is placed into the list.
  3008. if key:
  3009. keys.append("%s %s" % (key.get_name(), key.get_base64()))
  3010. return keys
  3011. class ChrootedKazooFixture(fixtures.Fixture):
  3012. def __init__(self, test_id):
  3013. super(ChrootedKazooFixture, self).__init__()
  3014. if 'ZOOKEEPER_2181_TCP' in os.environ:
  3015. # prevent any nasty hobbits^H^H^H suprises
  3016. if 'NODEPOOL_ZK_HOST' in os.environ:
  3017. raise Exception(
  3018. 'Looks like tox-docker is being used but you have also '
  3019. 'configured NODEPOOL_ZK_HOST. Either avoid using the '
  3020. 'docker environment or unset NODEPOOL_ZK_HOST.')
  3021. zk_host = 'localhost:' + os.environ['ZOOKEEPER_2181_TCP']
  3022. elif 'NODEPOOL_ZK_HOST' in os.environ:
  3023. zk_host = os.environ['NODEPOOL_ZK_HOST']
  3024. else:
  3025. zk_host = 'localhost'
  3026. if ':' in zk_host:
  3027. host, port = zk_host.split(':')
  3028. else:
  3029. host = zk_host
  3030. port = None
  3031. self.zookeeper_host = host
  3032. if not port:
  3033. self.zookeeper_port = 2181
  3034. else:
  3035. self.zookeeper_port = int(port)
  3036. self.test_id = test_id
  3037. def _setUp(self):
  3038. # Make sure the test chroot paths do not conflict
  3039. random_bits = ''.join(random.choice(string.ascii_lowercase +
  3040. string.ascii_uppercase)
  3041. for x in range(8))
  3042. rand_test_path = '%s_%s_%s' % (random_bits, os.getpid(), self.test_id)
  3043. self.zookeeper_chroot = "/nodepool_test/%s" % rand_test_path
  3044. self.addCleanup(self._cleanup)
  3045. # Ensure the chroot path exists and clean up any pre-existing znodes.
  3046. _tmp_client = kazoo.client.KazooClient(
  3047. hosts='%s:%s' % (self.zookeeper_host, self.zookeeper_port))
  3048. _tmp_client.start()
  3049. if _tmp_client.exists(self.zookeeper_chroot):
  3050. _tmp_client.delete(self.zookeeper_chroot, recursive=True)
  3051. _tmp_client.ensure_path(self.zookeeper_chroot)
  3052. _tmp_client.stop()
  3053. _tmp_client.close()
  3054. def _cleanup(self):
  3055. '''Remove the chroot path.'''
  3056. # Need a non-chroot'ed client to remove the chroot path
  3057. _tmp_client = kazoo.client.KazooClient(
  3058. hosts='%s:%s' % (self.zookeeper_host, self.zookeeper_port))
  3059. _tmp_client.start()
  3060. _tmp_client.delete(self.zookeeper_chroot, recursive=True)
  3061. _tmp_client.stop()
  3062. _tmp_client.close()
  3063. class WebProxyFixture(fixtures.Fixture):
  3064. def __init__(self, rules):
  3065. super(WebProxyFixture, self).__init__()
  3066. self.rules = rules
  3067. def _setUp(self):
  3068. rules = self.rules
  3069. class Proxy(http.server.SimpleHTTPRequestHandler):
  3070. log = logging.getLogger('zuul.WebProxyFixture.Proxy')
  3071. def do_GET(self):
  3072. path = self.path
  3073. for (pattern, replace) in rules:
  3074. path = re.sub(pattern, replace, path)
  3075. resp = requests.get(path)
  3076. self.send_response(resp.status_code)
  3077. if resp.status_code >= 300:
  3078. self.end_headers()
  3079. return
  3080. for key, val in resp.headers.items():
  3081. self.send_header(key, val)
  3082. self.end_headers()
  3083. self.wfile.write(resp.content)
  3084. def log_message(self, fmt, *args):
  3085. self.log.debug(fmt, *args)
  3086. self.httpd = socketserver.ThreadingTCPServer(('', 0), Proxy)
  3087. self.port = self.httpd.socket.getsockname()[1]
  3088. self.thread = threading.Thread(target=self.httpd.serve_forever)
  3089. self.thread.start()
  3090. self.addCleanup(self._cleanup)
  3091. def _cleanup(self):
  3092. self.httpd.shutdown()
  3093. self.thread.join()
  3094. class ZuulWebFixture(fixtures.Fixture):
  3095. def __init__(self, gearman_server_port,
  3096. changes: Dict[str, Dict[str, Change]], config: ConfigParser,
  3097. additional_event_queues, upstream_root: str,
  3098. rpcclient: RPCClient, poller_events, git_url_with_auth: bool,
  3099. add_cleanup: Callable[[Callable[[], None]], None],
  3100. test_root, info=None, zk_hosts=None):
  3101. super(ZuulWebFixture, self).__init__()
  3102. self.gearman_server_port = gearman_server_port
  3103. self.connections = TestConnectionRegistry(
  3104. changes, config, additional_event_queues, upstream_root, rpcclient,
  3105. poller_events, git_url_with_auth, add_cleanup)
  3106. self.connections.configure(
  3107. config,
  3108. include_drivers=[zuul.driver.sql.SQLDriver,
  3109. GithubDriverMock,
  3110. GitlabDriverMock,
  3111. PagureDriverMock])
  3112. self.authenticators = zuul.lib.auth.AuthenticatorRegistry()
  3113. self.authenticators.configure(config)
  3114. if info is None:
  3115. self.info = zuul.model.WebInfo.fromConfig(config)
  3116. else:
  3117. self.info = info
  3118. self.zk_hosts = zk_hosts
  3119. self.test_root = test_root
  3120. def _setUp(self):
  3121. # Start the web server
  3122. self.web = zuul.web.ZuulWeb(
  3123. listen_address='::', listen_port=0,
  3124. gear_server='127.0.0.1', gear_port=self.gearman_server_port,
  3125. info=self.info,
  3126. connections=self.connections,
  3127. zk_hosts=self.zk_hosts,
  3128. zk_timeout=10,
  3129. command_socket=os.path.join(self.test_root, 'web.socket'),
  3130. authenticators=self.authenticators)
  3131. self.web.start()
  3132. self.addCleanup(self.stop)
  3133. self.host = 'localhost'
  3134. # Wait until web server is started
  3135. while True:
  3136. self.port = self.web.port
  3137. try:
  3138. with socket.create_connection((self.host, self.port)):
  3139. break
  3140. except ConnectionRefusedError:
  3141. pass
  3142. def stop(self):
  3143. self.web.stop()
  3144. self.connections.stop()
  3145. class MySQLSchemaFixture(fixtures.Fixture):
  3146. def setUp(self):
  3147. super(MySQLSchemaFixture, self).setUp()
  3148. random_bits = ''.join(random.choice(string.ascii_lowercase +
  3149. string.ascii_uppercase)
  3150. for x in range(8))
  3151. self.name = '%s_%s' % (random_bits, os.getpid())
  3152. self.passwd = uuid.uuid4().hex
  3153. self.host = os.environ.get('ZUUL_MYSQL_HOST', '127.0.0.1')
  3154. db = pymysql.connect(host=self.host,
  3155. user="openstack_citest",
  3156. passwd="openstack_citest",
  3157. db="openstack_citest")
  3158. try:
  3159. with db.cursor() as cur:
  3160. cur.execute("create database %s" % self.name)
  3161. cur.execute(
  3162. "create user '{user}'@'' identified by '{passwd}'".format(
  3163. user=self.name, passwd=self.passwd))
  3164. cur.execute("grant all on {name}.* to '{name}'@''".format(
  3165. name=self.name))
  3166. cur.execute("flush privileges")
  3167. finally:
  3168. db.close()
  3169. self.dburi = 'mysql+pymysql://{name}:{passwd}@{host}/{name}'.format(
  3170. name=self.name, passwd=self.passwd, host=self.host)
  3171. self.addDetail('dburi', testtools.content.text_content(self.dburi))
  3172. self.addCleanup(self.cleanup)
  3173. def cleanup(self):
  3174. db = pymysql.connect(host=self.host,
  3175. user="openstack_citest",
  3176. passwd="openstack_citest",
  3177. db="openstack_citest")
  3178. try:
  3179. with db.cursor() as cur:
  3180. cur.execute("drop database %s" % self.name)
  3181. cur.execute("drop user '%s'@''" % self.name)
  3182. cur.execute("flush privileges")
  3183. finally:
  3184. db.close()
  3185. class PostgresqlSchemaFixture(fixtures.Fixture):
  3186. def setUp(self):
  3187. super(PostgresqlSchemaFixture, self).setUp()
  3188. # Postgres lowercases user and table names during creation but not
  3189. # during authentication. Thus only use lowercase chars.
  3190. random_bits = ''.join(random.choice(string.ascii_lowercase)
  3191. for x in range(8))
  3192. self.name = '%s_%s' % (random_bits, os.getpid())
  3193. self.passwd = uuid.uuid4().hex
  3194. self.host = os.environ.get('ZUUL_POSTGRES_HOST', '127.0.0.1')
  3195. db = psycopg2.connect(host=self.host,
  3196. user="openstack_citest",
  3197. password="openstack_citest",
  3198. database="openstack_citest")
  3199. db.autocommit = True
  3200. cur = db.cursor()
  3201. cur.execute("create role %s with login password '%s';" % (
  3202. self.name, self.passwd))
  3203. cur.execute("create database %s OWNER %s TEMPLATE template0 "
  3204. "ENCODING 'UTF8';" % (self.name, self.name))
  3205. self.dburi = 'postgresql://{name}:{passwd}@{host}/{name}'.format(
  3206. name=self.name, passwd=self.passwd, host=self.host)
  3207. self.addDetail('dburi', testtools.content.text_content(self.dburi))
  3208. self.addCleanup(self.cleanup)
  3209. def cleanup(self):
  3210. db = psycopg2.connect(host=self.host,
  3211. user="openstack_citest",
  3212. password="openstack_citest",
  3213. database="openstack_citest")
  3214. db.autocommit = True
  3215. cur = db.cursor()
  3216. cur.execute("drop database %s" % self.name)
  3217. cur.execute("drop user %s" % self.name)
  3218. class FakeCPUTimes:
  3219. def __init__(self):
  3220. self.user = 0
  3221. self.system = 0
  3222. self.children_user = 0
  3223. self.children_system = 0
  3224. def cpu_times(self):
  3225. return FakeCPUTimes()
  3226. class BaseTestCase(testtools.TestCase):
  3227. log = logging.getLogger("zuul.test")
  3228. wait_timeout = 90
  3229. def attachLogs(self, *args):
  3230. def reader():
  3231. self._log_stream.seek(0)
  3232. while True:
  3233. x = self._log_stream.read(4096)
  3234. if not x:
  3235. break
  3236. yield x.encode('utf8')
  3237. content = testtools.content.content_from_reader(
  3238. reader,
  3239. testtools.content_type.UTF8_TEXT,
  3240. False)
  3241. self.addDetail('logging', content)
  3242. def shouldNeverCapture(self):
  3243. test_name = self.id().split('.')[-1]
  3244. test = getattr(self, test_name)
  3245. if hasattr(test, '__never_capture__'):
  3246. return getattr(test, '__never_capture__')
  3247. return False
  3248. def setUp(self):
  3249. super(BaseTestCase, self).setUp()
  3250. test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
  3251. try:
  3252. test_timeout = int(test_timeout)
  3253. except ValueError:
  3254. # If timeout value is invalid do not set a timeout.
  3255. test_timeout = 0
  3256. if test_timeout > 0:
  3257. # Try a gentle timeout first and as a safety net a hard timeout
  3258. # later.
  3259. self.useFixture(fixtures.Timeout(test_timeout, gentle=True))
  3260. self.useFixture(fixtures.Timeout(test_timeout + 20, gentle=False))
  3261. if not self.shouldNeverCapture():
  3262. if (os.environ.get('OS_STDOUT_CAPTURE') == 'True' or
  3263. os.environ.get('OS_STDOUT_CAPTURE') == '1'):
  3264. stdout = self.useFixture(
  3265. fixtures.StringStream('stdout')).stream
  3266. self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
  3267. if (os.environ.get('OS_STDERR_CAPTURE') == 'True' or
  3268. os.environ.get('OS_STDERR_CAPTURE') == '1'):
  3269. stderr = self.useFixture(
  3270. fixtures.StringStream('stderr')).stream
  3271. self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
  3272. if (os.environ.get('OS_LOG_CAPTURE') == 'True' or
  3273. os.environ.get('OS_LOG_CAPTURE') == '1'):
  3274. self._log_stream = StringIO()
  3275. self.addOnException(self.attachLogs)
  3276. else:
  3277. self._log_stream = sys.stdout
  3278. else:
  3279. self._log_stream = sys.stdout
  3280. handler = logging.StreamHandler(self._log_stream)
  3281. formatter = logging.Formatter('%(asctime)s %(name)-32s '
  3282. '%(levelname)-8s %(message)s')
  3283. handler.setFormatter(formatter)
  3284. logger = logging.getLogger()
  3285. logger.setLevel(logging.DEBUG)
  3286. logger.addHandler(handler)
  3287. # Make sure we don't carry old handlers around in process state
  3288. # which slows down test runs
  3289. self.addCleanup(logger.removeHandler, handler)
  3290. # NOTE(notmorgan): Extract logging overrides for specific
  3291. # libraries from the OS_LOG_DEFAULTS env and create loggers
  3292. # for each. This is used to limit the output during test runs
  3293. # from libraries that zuul depends on such as gear.
  3294. log_defaults_from_env = os.environ.get(
  3295. 'OS_LOG_DEFAULTS',
  3296. 'git.cmd=INFO,kazoo.client=WARNING,gear=WARNING')
  3297. if log_defaults_from_env:
  3298. for default in log_defaults_from_env.split(','):
  3299. try:
  3300. name, level_str = default.split('=', 1)
  3301. level = getattr(logging, level_str, logging.DEBUG)
  3302. logger = logging.getLogger(name)
  3303. logger.setLevel(level)
  3304. logger.addHandler(handler)
  3305. self.addCleanup(logger.removeHandler, handler)
  3306. logger.propagate = False
  3307. except ValueError:
  3308. # NOTE(notmorgan): Invalid format of the log default,
  3309. # skip and don't try and apply a logger for the
  3310. # specified module
  3311. pass
  3312. self.addCleanup(handler.close)
  3313. self.addCleanup(handler.flush)
  3314. if sys.platform == 'darwin':
  3315. # Popen.cpu_times() is broken on darwin so patch it with a fake.
  3316. Popen.cpu_times = cpu_times
  3317. class SymLink(object):
  3318. def __init__(self, target):
  3319. self.target = target
  3320. class SchedulerTestApp:
  3321. def __init__(self, log: Logger, config: ConfigParser, zk_config: str,
  3322. changes: Dict[str, Dict[str, Change]],
  3323. additional_event_queues, upstream_root: str,
  3324. rpcclient: RPCClient, poller_events, git_url_with_auth: bool,
  3325. source_only: bool,
  3326. add_cleanup: Callable[[Callable[[], None]], None]):
  3327. self.log = log
  3328. self.config = config
  3329. self.zk_config = zk_config
  3330. self.changes = changes
  3331. self.sched = zuul.scheduler.Scheduler(self.config)
  3332. self.sched.setZuulApp(self)
  3333. self.sched._stats_interval = 1
  3334. self.event_queues = [
  3335. self.sched.result_event_queue,
  3336. self.sched.trigger_event_queue,
  3337. self.sched.management_event_queue
  3338. ]
  3339. # Register connections from the config using fakes
  3340. self.connections = TestConnectionRegistry(
  3341. self.changes, self.config, additional_event_queues,
  3342. upstream_root, rpcclient, poller_events,
  3343. git_url_with_auth, add_cleanup)
  3344. self.connections.configure(self.config, source_only=source_only)
  3345. self.sched.registerConnections(self.connections)
  3346. executor_client = zuul.executor.client.ExecutorClient(
  3347. self.config, self.sched)
  3348. merge_client = RecordingMergeClient(self.config, self.sched)
  3349. nodepool = zuul.nodepool.Nodepool(self.sched)
  3350. zk = zuul.zk.ZooKeeper(enable_cache=True)
  3351. zk.connect(self.zk_config, timeout=30.0)
  3352. self.sched.setExecutor(executor_client)
  3353. self.sched.setMerger(merge_client)
  3354. self.sched.setNodepool(nodepool)
  3355. self.sched.setZooKeeper(zk)
  3356. self.sched.start()
  3357. executor_client.gearman.waitForServer()
  3358. self.sched.reconfigure(self.config)
  3359. self.sched.wakeUp()
  3360. def fullReconfigure(self):
  3361. try:
  3362. self.sched.reconfigure(self.config)
  3363. except Exception:
  3364. self.log.exception("Reconfiguration failed:")
  3365. def smartReconfigure(self, command_socket=False):
  3366. try:
  3367. if command_socket:
  3368. command_socket = self.config.get('scheduler', 'command_socket')
  3369. with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as s:
  3370. s.connect(command_socket)
  3371. s.sendall('smart-reconfigure\n'.encode('utf8'))
  3372. else:
  3373. self.sched.reconfigure(self.config, smart=True)
  3374. except Exception:
  3375. self.log.exception("Reconfiguration failed:")
  3376. class SchedulerTestManager:
  3377. def __init__(self):
  3378. self.instances = []
  3379. def create(self, log: Logger, config: ConfigParser, zk_config: str,
  3380. changes: Dict[str, Dict[str, Change]], additional_event_queues,
  3381. upstream_root: str, rpcclient: RPCClient, poller_events,
  3382. git_url_with_auth: bool, source_only: bool,
  3383. add_cleanup: Callable[[Callable[[], None]], None])\
  3384. -> SchedulerTestApp:
  3385. app = SchedulerTestApp(log, config, zk_config, changes,
  3386. additional_event_queues, upstream_root,
  3387. rpcclient, poller_events, git_url_with_auth,
  3388. source_only, add_cleanup)
  3389. self.instances.append(app)
  3390. return app
  3391. def __len__(self) -> int:
  3392. return len(self.instances)
  3393. def __getitem__(self, item: int) -> SchedulerTestApp:
  3394. return self.instances[item]
  3395. def __setitem__(self, key: int, value: SchedulerTestApp):
  3396. raise Exception("Not implemented, use create method!")
  3397. def __delitem__(self, key, value):
  3398. raise Exception("Not implemented!")
  3399. def __iter__(self):
  3400. return iter(self.instances)
  3401. @property
  3402. def first(self) -> SchedulerTestApp:
  3403. if len(self.instances) == 0:
  3404. raise Exception("No scheduler!")
  3405. return self.instances[0]
  3406. def filter(self, matcher=None) -> Iterable[SchedulerTestApp]:
  3407. fcn = None # type: Optional[Callable[[int, SchedulerTestApp], bool]]
  3408. if type(matcher) == list:
  3409. def fcn(_: int, app: SchedulerTestApp) -> bool:
  3410. return app in matcher
  3411. elif type(matcher).__name__ == 'function':
  3412. fcn = matcher
  3413. return [e[1] for e in enumerate(self.instances)
  3414. if fcn is None or fcn(e[0], e[1])]
  3415. def execute(self, function: Callable[[Any], None], matcher=None) -> None:
  3416. for instance in self.filter(matcher):
  3417. function(instance)
  3418. class ZuulTestCase(BaseTestCase):
  3419. """A test case with a functioning Zuul.
  3420. The following class variables are used during test setup and can
  3421. be overidden by subclasses but are effectively read-only once a
  3422. test method starts running:
  3423. :cvar str config_file: This points to the main zuul config file
  3424. within the fixtures directory. Subclasses may override this
  3425. to obtain a different behavior.
  3426. :cvar str tenant_config_file: This is the tenant config file
  3427. (which specifies from what git repos the configuration should
  3428. be loaded). It defaults to the value specified in
  3429. `config_file` but can be overidden by subclasses to obtain a
  3430. different tenant/project layout while using the standard main
  3431. configuration. See also the :py:func:`simple_layout`
  3432. decorator.
  3433. :cvar str tenant_config_script_file: This is the tenant config script
  3434. file. This attribute has the same meaning than tenant_config_file
  3435. except that the tenant configuration is loaded from a script.
  3436. When this attribute is set then tenant_config_file is ignored
  3437. by the scheduler.
  3438. :cvar bool create_project_keys: Indicates whether Zuul should
  3439. auto-generate keys for each project, or whether the test
  3440. infrastructure should insert dummy keys to save time during
  3441. startup. Defaults to False.
  3442. :cvar int log_console_port: The zuul_stream/zuul_console port.
  3443. The following are instance variables that are useful within test
  3444. methods:
  3445. :ivar FakeGerritConnection fake_<connection>:
  3446. A :py:class:`~tests.base.FakeGerritConnection` will be
  3447. instantiated for each connection present in the config file
  3448. and stored here. For instance, `fake_gerrit` will hold the
  3449. FakeGerritConnection object for a connection named `gerrit`.
  3450. :ivar FakeGearmanServer gearman_server: An instance of
  3451. :py:class:`~tests.base.FakeGearmanServer` which is the Gearman
  3452. server that all of the Zuul components in this test use to
  3453. communicate with each other.
  3454. :ivar RecordingExecutorServer executor_server: An instance of
  3455. :py:class:`~tests.base.RecordingExecutorServer` which is the
  3456. Ansible execute server used to run jobs for this test.
  3457. :ivar list builds: A list of :py:class:`~tests.base.FakeBuild` objects
  3458. representing currently running builds. They are appended to
  3459. the list in the order they are executed, and removed from this
  3460. list upon completion.
  3461. :ivar list history: A list of :py:class:`~tests.base.BuildHistory`
  3462. objects representing completed builds. They are appended to
  3463. the list in the order they complete.
  3464. """
  3465. config_file: str = 'zuul.conf'
  3466. run_ansible: bool = False
  3467. create_project_keys: bool = False
  3468. use_ssl: bool = False
  3469. git_url_with_auth: bool = False
  3470. log_console_port: int = 19885
  3471. source_only: bool = False
  3472. def __getattr__(self, name):
  3473. """Allows to access fake connections the old way, e.g., using
  3474. `fake_gerrit` for FakeGerritConnection.
  3475. This will access the connection of the first (default) scheduler
  3476. (`self.scheds.first`). To access connections of a different
  3477. scheduler use `self.scheds[{X}].connections.fake_{NAME}`.
  3478. """
  3479. if name.startswith('fake_') and\
  3480. hasattr(self.scheds.first.connections, name):
  3481. return getattr(self.scheds.first.connections, name)
  3482. raise AttributeError("'ZuulTestCase' object has no attribute '%s'"
  3483. % name)
  3484. def _startMerger(self):
  3485. self.merge_server = zuul.merger.server.MergeServer(
  3486. self.config, self.scheds.first.connections)
  3487. self.merge_server.start()
  3488. def setUp(self):
  3489. super(ZuulTestCase, self).setUp()
  3490. self.setupZK()
  3491. self.fake_nodepool = FakeNodepool(
  3492. self.zk_chroot_fixture.zookeeper_host,
  3493. self.zk_chroot_fixture.zookeeper_port,
  3494. self.zk_chroot_fixture.zookeeper_chroot)
  3495. if not KEEP_TEMPDIRS:
  3496. tmp_root = self.useFixture(fixtures.TempDir(
  3497. rootdir=os.environ.get("ZUUL_TEST_ROOT"))
  3498. ).path
  3499. else:
  3500. tmp_root = tempfile.mkdtemp(
  3501. dir=os.environ.get("ZUUL_TEST_ROOT", None))
  3502. self.test_root = os.path.join(tmp_root, "zuul-test")
  3503. self.upstream_root = os.path.join(self.test_root, "upstream")
  3504. self.merger_src_root = os.path.join(self.test_root, "merger-git")
  3505. self.executor_src_root = os.path.join(self.test_root, "executor-git")
  3506. self.state_root = os.path.join(self.test_root, "lib")
  3507. self.merger_state_root = os.path.join(self.test_root, "merger-lib")
  3508. self.executor_state_root = os.path.join(self.test_root, "executor-lib")
  3509. self.jobdir_root = os.path.join(self.test_root, "builds")
  3510. if os.path.exists(self.test_root):
  3511. shutil.rmtree(self.test_root)
  3512. os.makedirs(self.test_root)
  3513. os.makedirs(self.upstream_root)
  3514. os.makedirs(self.state_root)
  3515. os.makedirs(self.merger_state_root)
  3516. os.makedirs(self.executor_state_root)
  3517. os.makedirs(self.jobdir_root)
  3518. # Make per test copy of Configuration.
  3519. self.config = self.setup_config(self.config_file)
  3520. self.private_key_file = os.path.join(self.test_root, 'test_id_rsa')
  3521. if not os.path.exists(self.private_key_file):
  3522. src_private_key_file = os.environ.get(
  3523. 'ZUUL_SSH_KEY',
  3524. os.path.join(FIXTURE_DIR, 'test_id_rsa'))
  3525. shutil.copy(src_private_key_file, self.private_key_file)
  3526. shutil.copy('{}.pub'.format(src_private_key_file),
  3527. '{}.pub'.format(self.private_key_file))
  3528. os.chmod(self.private_key_file, 0o0600)
  3529. for cfg_attr in ('tenant_config', 'tenant_config_script'):
  3530. if self.config.has_option('scheduler', cfg_attr):
  3531. cfg_value = self.config.get('scheduler', cfg_attr)
  3532. self.config.set(
  3533. 'scheduler', cfg_attr,
  3534. os.path.join(FIXTURE_DIR, cfg_value))
  3535. self.config.set('scheduler', 'state_dir', self.state_root)
  3536. self.config.set(
  3537. 'scheduler', 'command_socket',
  3538. os.path.join(self.test_root, 'scheduler.socket'))
  3539. self.config.set('merger', 'git_dir', self.merger_src_root)
  3540. self.config.set('executor', 'git_dir', self.executor_src_root)
  3541. self.config.set('executor', 'private_key_file', self.private_key_file)
  3542. self.config.set('executor', 'state_dir', self.executor_state_root)
  3543. self.config.set(
  3544. 'executor', 'command_socket',
  3545. os.path.join(self.test_root, 'executor.socket'))
  3546. self.config.set(
  3547. 'merger', 'command_socket',
  3548. os.path.join(self.test_root, 'merger.socket'))
  3549. self.statsd = FakeStatsd()
  3550. if self.config.has_section('statsd'):
  3551. self.config.set('statsd', 'port', str(self.statsd.port))
  3552. self.statsd.start()
  3553. self.gearman_server = FakeGearmanServer(self.use_ssl)
  3554. self.config.set('gearman', 'port', str(self.gearman_server.port))
  3555. self.log.info("Gearman server on port %s" %
  3556. (self.gearman_server.port,))
  3557. if self.use_ssl:
  3558. self.log.info('SSL enabled for gearman')
  3559. self.config.set(
  3560. 'gearman', 'ssl_ca',
  3561. os.path.join(FIXTURE_DIR, 'gearman/root-ca.pem'))
  3562. self.config.set(
  3563. 'gearman', 'ssl_cert',
  3564. os.path.join(FIXTURE_DIR, 'gearman/client.pem'))
  3565. self.config.set(
  3566. 'gearman', 'ssl_key',
  3567. os.path.join(FIXTURE_DIR, 'gearman/client.key'))
  3568. self.rpcclient = zuul.rpcclient.RPCClient(
  3569. self.config.get('gearman', 'server'),
  3570. self.gearman_server.port,
  3571. get_default(self.config, 'gearman', 'ssl_key'),
  3572. get_default(self.config, 'gearman', 'ssl_cert'),
  3573. get_default(self.config, 'gearman', 'ssl_ca'))
  3574. gerritsource.GerritSource.replication_timeout = 1.5
  3575. gerritsource.GerritSource.replication_retry_interval = 0.5
  3576. gerritconnection.GerritEventConnector.delay = 0.0
  3577. self.changes: Dict[str, Dict[str, Change]] = {}
  3578. self.additional_event_queues = []
  3579. self.poller_events = {}
  3580. self._configureSmtp()
  3581. self._configureMqtt()
  3582. executor_connections = TestConnectionRegistry(
  3583. self.changes, self.config, self.additional_event_queues,
  3584. self.upstream_root, self.rpcclient, self.poller_events,
  3585. self.git_url_with_auth, self.addCleanup)
  3586. executor_connections.configure(self.config,
  3587. source_only=self.source_only)
  3588. self.executor_server = RecordingExecutorServer(
  3589. self.config, executor_connections,
  3590. jobdir_root=self.jobdir_root,
  3591. _run_ansible=self.run_ansible,
  3592. _test_root=self.test_root,
  3593. keep_jobdir=KEEP_TEMPDIRS,
  3594. log_console_port=self.log_console_port)
  3595. self.executor_server.start()
  3596. self.history = self.executor_server.build_history
  3597. self.builds = self.executor_server.running_builds
  3598. self.scheds = SchedulerTestManager()
  3599. self.scheds.create(
  3600. self.log, self.config, self.zk_config, self.changes,
  3601. self.additional_event_queues, self.upstream_root, self.rpcclient,
  3602. self.poller_events, self.git_url_with_auth, self.source_only,
  3603. self.addCleanup)
  3604. if hasattr(self, 'fake_github'):
  3605. self.additional_event_queues.append(
  3606. self.fake_github.github_event_connector._event_forward_queue)
  3607. self.merge_server = None
  3608. # Cleanups are run in reverse order
  3609. self.addCleanup(self.assertCleanShutdown)
  3610. self.addCleanup(self.shutdown)
  3611. self.addCleanup(self.assertFinalState)
  3612. def __event_queues(self, matcher) -> List[Queue]:
  3613. sched_queues = map(lambda app: app.event_queues,
  3614. self.scheds.filter(matcher))
  3615. return [item for sublist in sched_queues for item in sublist] + \
  3616. self.additional_event_queues
  3617. def _configureSmtp(self):
  3618. # Set up smtp related fakes
  3619. # TODO(jhesketh): This should come from lib.connections for better
  3620. # coverage
  3621. # Register connections from the config
  3622. self.smtp_messages = []
  3623. def FakeSMTPFactory(*args, **kw):
  3624. args = [self.smtp_messages] + list(args)
  3625. return FakeSMTP(*args, **kw)
  3626. self.useFixture(fixtures.MonkeyPatch('smtplib.SMTP', FakeSMTPFactory))
  3627. def _configureMqtt(self):
  3628. # Set up mqtt related fakes
  3629. self.mqtt_messages = []
  3630. def fakeMQTTPublish(_, topic, msg, qos, zuul_event_id):
  3631. log = logging.getLogger('zuul.FakeMQTTPubish')
  3632. log.info('Publishing message via mqtt')
  3633. self.mqtt_messages.append({'topic': topic, 'msg': msg, 'qos': qos})
  3634. self.useFixture(fixtures.MonkeyPatch(
  3635. 'zuul.driver.mqtt.mqttconnection.MQTTConnection.publish',
  3636. fakeMQTTPublish))
  3637. def setup_config(self, config_file: str):
  3638. # This creates the per-test configuration object. It can be
  3639. # overridden by subclasses, but should not need to be since it
  3640. # obeys the config_file and tenant_config_file attributes.
  3641. config = configparser.ConfigParser()
  3642. config.read(os.path.join(FIXTURE_DIR, config_file))
  3643. sections = ['zuul', 'scheduler', 'executor', 'merger']
  3644. for section in sections:
  3645. if not config.has_section(section):
  3646. config.add_section(section)
  3647. if not self.setupSimpleLayout(config):
  3648. tenant_config = None
  3649. for cfg_attr in ('tenant_config', 'tenant_config_script'):
  3650. if hasattr(self, cfg_attr + '_file'):
  3651. if getattr(self, cfg_attr + '_file'):
  3652. value = getattr(self, cfg_attr + '_file')
  3653. config.set('scheduler', cfg_attr, value)
  3654. tenant_config = value
  3655. else:
  3656. config.remove_option('scheduler', cfg_attr)
  3657. if tenant_config:
  3658. git_path = os.path.join(
  3659. os.path.dirname(
  3660. os.path.join(FIXTURE_DIR, tenant_config)),
  3661. 'git')
  3662. if os.path.exists(git_path):
  3663. for reponame in os.listdir(git_path):
  3664. project = reponame.replace('_', '/')
  3665. self.copyDirToRepo(project,
  3666. os.path.join(git_path, reponame))
  3667. # Make test_root persist after ansible run for .flag test
  3668. config.set('executor', 'trusted_rw_paths', self.test_root)
  3669. self.setupAllProjectKeys(config)
  3670. return config
  3671. def setupSimpleLayout(self, config: ConfigParser):
  3672. # If the test method has been decorated with a simple_layout,
  3673. # use that instead of the class tenant_config_file. Set up a
  3674. # single config-project with the specified layout, and
  3675. # initialize repos for all of the 'project' entries which
  3676. # appear in the layout.
  3677. test_name = self.id().split('.')[-1]
  3678. test = getattr(self, test_name)
  3679. if hasattr(test, '__simple_layout__'):
  3680. path, driver = getattr(test, '__simple_layout__')
  3681. else:
  3682. return False
  3683. files = {}
  3684. path = os.path.join(FIXTURE_DIR, path)
  3685. with open(path) as f:
  3686. data = f.read()
  3687. layout = yaml.safe_load(data)
  3688. files['zuul.yaml'] = data
  3689. untrusted_projects = []
  3690. for item in layout:
  3691. if 'project' in item:
  3692. name = item['project']['name']
  3693. if name.startswith('^'):
  3694. continue
  3695. untrusted_projects.append(name)
  3696. self.init_repo(name)
  3697. self.addCommitToRepo(name, 'initial commit',
  3698. files={'README': ''},
  3699. branch='master', tag='init')
  3700. if 'job' in item:
  3701. if 'run' in item['job']:
  3702. files['%s' % item['job']['run']] = ''
  3703. for fn in zuul.configloader.as_list(
  3704. item['job'].get('pre-run', [])):
  3705. files['%s' % fn] = ''
  3706. for fn in zuul.configloader.as_list(
  3707. item['job'].get('post-run', [])):
  3708. files['%s' % fn] = ''
  3709. root = os.path.join(self.test_root, "config")
  3710. if not os.path.exists(root):
  3711. os.makedirs(root)
  3712. f = tempfile.NamedTemporaryFile(dir=root, delete=False)
  3713. temp_config = [{
  3714. 'tenant': {
  3715. 'name': 'tenant-one',
  3716. 'source': {
  3717. driver: {
  3718. 'config-projects': ['org/common-config'],
  3719. 'untrusted-projects': untrusted_projects}}}}]
  3720. f.write(yaml.dump(temp_config).encode('utf8'))
  3721. f.close()
  3722. config.set('scheduler', 'tenant_config',
  3723. os.path.join(FIXTURE_DIR, f.name))
  3724. self.init_repo('org/common-config')
  3725. self.addCommitToRepo('org/common-config', 'add content from fixture',
  3726. files, branch='master', tag='init')
  3727. return True
  3728. def setupAllProjectKeys(self, config: ConfigParser):
  3729. if self.create_project_keys:
  3730. return
  3731. path = config.get('scheduler', 'tenant_config')
  3732. with open(os.path.join(FIXTURE_DIR, path)) as f:
  3733. tenant_config = yaml.safe_load(f.read())
  3734. for tenant in tenant_config:
  3735. if 'tenant' not in tenant.keys():
  3736. continue
  3737. sources = tenant['tenant']['source']
  3738. for source, conf in sources.items():
  3739. for project in conf.get('config-projects', []):
  3740. self.setupProjectKeys(source, project)
  3741. for project in conf.get('untrusted-projects', []):
  3742. self.setupProjectKeys(source, project)
  3743. def setupProjectKeys(self, source, project):
  3744. # Make sure we set up an RSA key for the project so that we
  3745. # don't spend time generating one:
  3746. if isinstance(project, dict):
  3747. project = list(project.keys())[0]
  3748. key_root = os.path.join(self.state_root, 'keys')
  3749. if not os.path.isdir(key_root):
  3750. os.mkdir(key_root, 0o700)
  3751. fn = os.path.join(key_root, '.version')
  3752. with open(fn, 'w') as f:
  3753. f.write('1')
  3754. # secrets key
  3755. private_key_file = os.path.join(
  3756. key_root, 'secrets', 'project', source, project, '0.pem')
  3757. private_key_dir = os.path.dirname(private_key_file)
  3758. self.log.debug("Installing test secrets keys for project %s at %s" % (
  3759. project, private_key_file))
  3760. if not os.path.isdir(private_key_dir):
  3761. os.makedirs(private_key_dir)
  3762. with open(os.path.join(FIXTURE_DIR, 'private.pem')) as i:
  3763. with open(private_key_file, 'w') as o:
  3764. o.write(i.read())
  3765. # ssh key
  3766. private_key_file = os.path.join(
  3767. key_root, 'ssh', 'project', source, project, '0.pem')
  3768. private_key_dir = os.path.dirname(private_key_file)
  3769. self.log.debug("Installing test ssh keys for project %s at %s" % (
  3770. project, private_key_file))
  3771. if not os.path.isdir(private_key_dir):
  3772. os.makedirs(private_key_dir)
  3773. with open(os.path.join(FIXTURE_DIR, 'ssh.pem')) as i:
  3774. with open(private_key_file, 'w') as o:
  3775. o.write(i.read())
  3776. def setupZK(self):
  3777. self.zk_chroot_fixture = self.useFixture(
  3778. ChrootedKazooFixture(self.id()))
  3779. self.zk_config = '%s:%s%s' % (
  3780. self.zk_chroot_fixture.zookeeper_host,
  3781. self.zk_chroot_fixture.zookeeper_port,
  3782. self.zk_chroot_fixture.zookeeper_chroot)
  3783. def copyDirToRepo(self, project, source_path):
  3784. self.init_repo(project)
  3785. files = {}
  3786. for (dirpath, dirnames, filenames) in os.walk(source_path):
  3787. for filename in filenames:
  3788. test_tree_filepath = os.path.join(dirpath, filename)
  3789. common_path = os.path.commonprefix([test_tree_filepath,
  3790. source_path])
  3791. relative_filepath = test_tree_filepath[len(common_path) + 1:]
  3792. with open(test_tree_filepath, 'rb') as f:
  3793. content = f.read()
  3794. # dynamically create symlinks if the content is of the form
  3795. # symlink: <target>
  3796. match = re.match(rb'symlink: ([^\s]+)', content)
  3797. if match:
  3798. content = SymLink(match.group(1))
  3799. files[relative_filepath] = content
  3800. self.addCommitToRepo(project, 'add content from fixture',
  3801. files, branch='master', tag='init')
  3802. def assertNodepoolState(self):
  3803. # Make sure that there are no pending requests
  3804. requests = None
  3805. for x in iterate_timeout(30, "zk getNodeRequests"):
  3806. try:
  3807. requests = self.fake_nodepool.getNodeRequests()
  3808. break
  3809. except kazoo.exceptions.ConnectionLoss:
  3810. # NOTE(pabelanger): We lost access to zookeeper, iterate again
  3811. pass
  3812. self.assertEqual(len(requests), 0)
  3813. nodes = None
  3814. for x in iterate_timeout(30, "zk getNodeRequests"):
  3815. try:
  3816. nodes = self.fake_nodepool.getNodes()
  3817. break
  3818. except kazoo.exceptions.ConnectionLoss:
  3819. # NOTE(pabelanger): We lost access to zookeeper, iterate again
  3820. pass
  3821. for node in nodes:
  3822. self.assertFalse(node['_lock'], "Node %s is locked" %
  3823. (node['_oid'],))
  3824. def assertNoGeneratedKeys(self):
  3825. # Make sure that Zuul did not generate any project keys
  3826. # (unless it was supposed to).
  3827. if self.create_project_keys:
  3828. return
  3829. test_keys = []
  3830. key_fns = ['private.pem', 'ssh.pem']
  3831. for fn in key_fns:
  3832. with open(os.path.join(FIXTURE_DIR, fn)) as i:
  3833. test_keys.append(i.read())
  3834. key_root = os.path.join(self.state_root, 'keys')
  3835. for root, dirname, files in os.walk(key_root):
  3836. for fn in files:
  3837. if fn == '.version':
  3838. continue
  3839. with open(os.path.join(root, fn)) as f:
  3840. self.assertTrue(f.read() in test_keys)
  3841. def assertFinalState(self):
  3842. self.log.debug("Assert final state")
  3843. # Make sure no jobs are running
  3844. self.assertEqual({}, self.executor_server.job_workers)
  3845. # Make sure that git.Repo objects have been garbage collected.
  3846. gc.disable()
  3847. try:
  3848. gc.collect()
  3849. for obj in gc.get_objects():
  3850. if isinstance(obj, git.Repo):
  3851. self.log.debug("Leaked git repo object: 0x%x %s" %
  3852. (id(obj), repr(obj)))
  3853. finally:
  3854. gc.enable()
  3855. self.assertEmptyQueues()
  3856. self.assertNodepoolState()
  3857. self.assertNoGeneratedKeys()
  3858. ipm = zuul.manager.independent.IndependentPipelineManager
  3859. for tenant in self.scheds.first.sched.abide.tenants.values():
  3860. for pipeline in tenant.layout.pipelines.values():
  3861. if isinstance(pipeline.manager, ipm):
  3862. self.assertEqual(len(pipeline.queues), 0)
  3863. def shutdown(self):
  3864. self.log.debug("Shutting down after tests")
  3865. self.executor_server.hold_jobs_in_build = False
  3866. self.executor_server.release()
  3867. self.scheds.execute(lambda app: app.sched.executor.stop())
  3868. self.scheds.execute(lambda app: app.sched.merger.stop())
  3869. if self.merge_server:
  3870. self.merge_server.stop()
  3871. self.merge_server.join()
  3872. self.executor_server.stop()
  3873. self.executor_server.join()
  3874. self.scheds.execute(lambda app: app.sched.stop())
  3875. self.scheds.execute(lambda app: app.sched.join())
  3876. self.statsd.stop()
  3877. self.statsd.join()
  3878. self.rpcclient.shutdown()
  3879. self.gearman_server.shutdown()
  3880. self.fake_nodepool.stop()
  3881. self.scheds.execute(lambda app: app.sched.zk.disconnect())
  3882. self.printHistory()
  3883. # We whitelist watchdog threads as they have relatively long delays
  3884. # before noticing they should exit, but they should exit on their own.
  3885. whitelist = ['watchdog',
  3886. 'socketserver_Thread',
  3887. 'GerritWebServer',
  3888. ]
  3889. # Ignore threads that start with
  3890. # * Thread- : Kazoo TreeCache
  3891. # * Dummy- : Seen during debugging in VS Code
  3892. # * pydevd : Debug helper threads of pydevd (used by many IDEs)
  3893. # * ptvsd : Debug helper threads used by VS Code
  3894. threads = [t for t in threading.enumerate()
  3895. if t.name not in whitelist
  3896. and not t.name.startswith("Thread-")
  3897. and not t.name.startswith('Dummy-')
  3898. and not t.name.startswith('pydevd.')
  3899. and not t.name.startswith('ptvsd.')
  3900. ]
  3901. if len(threads) > 1:
  3902. thread_map = dict(map(lambda x: (x.ident, x.name),
  3903. threading.enumerate()))
  3904. log_str = ""
  3905. for thread_id, stack_frame in sys._current_frames().items():
  3906. log_str += "Thread id: %s, name: %s\n" % (
  3907. thread_id, thread_map.get(thread_id, 'UNKNOWN'))
  3908. log_str += "".join(traceback.format_stack(stack_frame))
  3909. self.log.debug(log_str)
  3910. raise Exception("More than one thread is running: %s" % threads)
  3911. def assertCleanShutdown(self):
  3912. pass
  3913. def init_repo(self, project, tag=None):
  3914. parts = project.split('/')
  3915. path = os.path.join(self.upstream_root, *parts[:-1])
  3916. if not os.path.exists(path):
  3917. os.makedirs(path)
  3918. path = os.path.join(self.upstream_root, project)
  3919. repo = git.Repo.init(path)
  3920. with repo.config_writer() as config_writer:
  3921. config_writer.set_value('user', 'email', 'user@example.com')
  3922. config_writer.set_value('user', 'name', 'User Name')
  3923. repo.index.commit('initial commit')
  3924. master = repo.create_head('master')
  3925. if tag:
  3926. repo.create_tag(tag)
  3927. repo.head.reference = master
  3928. zuul.merger.merger.reset_repo_to_head(repo)
  3929. repo.git.clean('-x', '-f', '-d')
  3930. def create_branch(self, project, branch, commit_filename='README'):
  3931. path = os.path.join(self.upstream_root, project)
  3932. repo = git.Repo(path)
  3933. fn = os.path.join(path, commit_filename)
  3934. branch_head = repo.create_head(branch)
  3935. repo.head.reference = branch_head
  3936. f = open(fn, 'a')
  3937. f.write("test %s\n" % branch)
  3938. f.close()
  3939. repo.index.add([fn])
  3940. repo.index.commit('%s commit' % branch)
  3941. repo.head.reference = repo.heads['master']
  3942. zuul.merger.merger.reset_repo_to_head(repo)
  3943. repo.git.clean('-x', '-f', '-d')
  3944. def delete_branch(self, project, branch):
  3945. path = os.path.join(self.upstream_root, project)
  3946. repo = git.Repo(path)
  3947. repo.head.reference = repo.heads['master']
  3948. zuul.merger.merger.reset_repo_to_head(repo)
  3949. repo.delete_head(repo.heads[branch], force=True)
  3950. def create_commit(self, project, files=None, head='master',
  3951. message='Creating a fake commit', **kwargs):
  3952. path = os.path.join(self.upstream_root, project)
  3953. repo = git.Repo(path)
  3954. repo.head.reference = repo.heads[head]
  3955. repo.head.reset(index=True, working_tree=True)
  3956. files = files or {"README": "creating fake commit\n"}
  3957. for name, content in files.items():
  3958. file_name = os.path.join(path, name)
  3959. with open(file_name, 'a') as f:
  3960. f.write(content)
  3961. repo.index.add([file_name])
  3962. commit = repo.index.commit(message, **kwargs)
  3963. return commit.hexsha
  3964. def orderedRelease(self, count=None):
  3965. # Run one build at a time to ensure non-race order:
  3966. i = 0
  3967. while len(self.builds):
  3968. self.release(self.builds[0])
  3969. self.waitUntilSettled()
  3970. i += 1
  3971. if count is not None and i >= count:
  3972. break
  3973. def getSortedBuilds(self):
  3974. "Return the list of currently running builds sorted by name"
  3975. return sorted(self.builds, key=lambda x: x.name)
  3976. def release(self, job):
  3977. if isinstance(job, FakeBuild):
  3978. job.release()
  3979. else:
  3980. job.waiting = False
  3981. self.log.debug("Queued job %s released" % job.unique)
  3982. self.gearman_server.wakeConnections()
  3983. def getParameter(self, job, name):
  3984. if isinstance(job, FakeBuild):
  3985. return job.parameters[name]
  3986. else:
  3987. parameters = json.loads(job.arguments)
  3988. return parameters[name]
  3989. def __haveAllBuildsReported(self, matcher) -> bool:
  3990. for app in self.scheds.filter(matcher):
  3991. executor_client = app.sched.executor
  3992. # See if Zuul is waiting on a meta job to complete
  3993. if executor_client.meta_jobs:
  3994. return False
  3995. # Find out if every build that the worker has completed has been
  3996. # reported back to Zuul. If it hasn't then that means a Gearman
  3997. # event is still in transit and the system is not stable.
  3998. for build in self.history:
  3999. zbuild = executor_client.builds.get(build.uuid)
  4000. if not zbuild:
  4001. # It has already been reported
  4002. continue
  4003. # It hasn't been reported yet.
  4004. return False
  4005. # Make sure that none of the worker connections are in GRAB_WAIT
  4006. worker = self.executor_server.executor_gearworker.gearman
  4007. for connection in worker.active_connections:
  4008. if connection.state == 'GRAB_WAIT':
  4009. return False
  4010. return True
  4011. def __areAllBuildsWaiting(self, matcher) -> bool:
  4012. for app in self.scheds.filter(matcher):
  4013. executor_client = app.sched.executor
  4014. builds = executor_client.builds.values()
  4015. seen_builds = set()
  4016. for build in builds:
  4017. seen_builds.add(build.uuid)
  4018. client_job = None
  4019. for conn in executor_client.gearman.active_connections:
  4020. for j in conn.related_jobs.values():
  4021. if j.unique == build.uuid:
  4022. client_job = j
  4023. break
  4024. if not client_job:
  4025. self.log.debug("%s is not known to the gearman client" %
  4026. build)
  4027. return False
  4028. if not client_job.handle:
  4029. self.log.debug("%s has no handle" % client_job)
  4030. return False
  4031. server_job = self.gearman_server.jobs.get(client_job.handle)
  4032. if not server_job:
  4033. self.log.debug("%s is not known to the gearman server" %
  4034. client_job)
  4035. return False
  4036. if not hasattr(server_job, 'waiting'):
  4037. self.log.debug("%s is being enqueued" % server_job)
  4038. return False
  4039. if server_job.waiting:
  4040. continue
  4041. if build.url is None:
  4042. self.log.debug("%s has not reported start" % build)
  4043. return False
  4044. # using internal ServerJob which offers no Text interface
  4045. worker_build = self.executor_server.job_builds.get(
  4046. server_job.unique.decode('utf8'))
  4047. if worker_build:
  4048. if build.paused:
  4049. continue
  4050. if worker_build.isWaiting():
  4051. continue
  4052. self.log.debug("%s is running" % worker_build)
  4053. return False
  4054. else:
  4055. self.log.debug("%s is unassigned" % server_job)
  4056. return False
  4057. for (build_uuid, job_worker) in \
  4058. self.executor_server.job_workers.items():
  4059. if build_uuid not in seen_builds:
  4060. self.log.debug("%s is not finalized" % build_uuid)
  4061. return False
  4062. return True
  4063. def __areAllNodeRequestsComplete(self, matcher) -> bool:
  4064. if self.fake_nodepool.paused:
  4065. return True
  4066. for app in self.scheds.filter(matcher):
  4067. if app.sched.nodepool.requests:
  4068. return False
  4069. return True
  4070. def __areAllMergeJobsWaiting(self, matcher) -> bool:
  4071. for app in self.scheds.filter(matcher):
  4072. merge_client = app.sched.merger
  4073. for client_job in list(merge_client.jobs):
  4074. if not client_job.handle:
  4075. self.log.debug("%s has no handle" % client_job)
  4076. return False
  4077. server_job = self.gearman_server.jobs.get(client_job.handle)
  4078. if not server_job:
  4079. self.log.debug("%s is not known to the gearman server" %
  4080. client_job)
  4081. return False
  4082. if not hasattr(server_job, 'waiting'):
  4083. self.log.debug("%s is being enqueued" % server_job)
  4084. return False
  4085. if server_job.waiting:
  4086. self.log.debug("%s is waiting" % server_job)
  4087. continue
  4088. self.log.debug("%s is not waiting" % server_job)
  4089. return False
  4090. return True
  4091. def __eventQueuesEmpty(self, matcher) -> Generator[bool, None, None]:
  4092. for event_queue in self.__event_queues(matcher):
  4093. yield event_queue.empty()
  4094. def __eventQueuesJoin(self, matcher) -> None:
  4095. for app in self.scheds.filter(matcher):
  4096. for event_queue in app.event_queues:
  4097. event_queue.join()
  4098. for event_queue in self.additional_event_queues:
  4099. event_queue.join()
  4100. def waitUntilSettled(self, msg="", matcher=None) -> None:
  4101. self.log.debug("Waiting until settled... (%s)", msg)
  4102. start = time.time()
  4103. i = 0
  4104. while True:
  4105. i = i + 1
  4106. if time.time() - start > self.wait_timeout:
  4107. self.log.error("Timeout waiting for Zuul to settle")
  4108. self.log.error("Queue status:")
  4109. for event_queue in self.__event_queues(matcher):
  4110. self.log.error(" %s: %s" %
  4111. (event_queue, event_queue.empty()))
  4112. self.log.error("All builds waiting: %s" %
  4113. (self.__areAllBuildsWaiting(matcher),))
  4114. self.log.error("All merge jobs waiting: %s" %
  4115. (self.__areAllMergeJobsWaiting(matcher),))
  4116. self.log.error("All builds reported: %s" %
  4117. (self.__haveAllBuildsReported(matcher),))
  4118. self.log.error("All requests completed: %s" %
  4119. (self.__areAllNodeRequestsComplete(matcher),))
  4120. self.log.error("All event queues empty: %s" %
  4121. (all(self.__eventQueuesEmpty(matcher)),))
  4122. for app in self.scheds.filter(matcher):
  4123. self.log.error("[Sched: %s] Merge client jobs: %s" %
  4124. (app.sched, app.sched.merger.jobs,))
  4125. raise Exception("Timeout waiting for Zuul to settle")
  4126. # Make sure no new events show up while we're checking
  4127. self.executor_server.lock.acquire()
  4128. # have all build states propogated to zuul?
  4129. if self.__haveAllBuildsReported(matcher):
  4130. # Join ensures that the queue is empty _and_ events have been
  4131. # processed
  4132. self.__eventQueuesJoin(matcher)
  4133. self.scheds.execute(
  4134. lambda app: app.sched.run_handler_lock.acquire())
  4135. if (self.__areAllMergeJobsWaiting(matcher) and
  4136. self.__haveAllBuildsReported(matcher) and
  4137. self.__areAllBuildsWaiting(matcher) and
  4138. self.__areAllNodeRequestsComplete(matcher) and
  4139. all(self.__eventQueuesEmpty(matcher))):
  4140. # The queue empty check is placed at the end to
  4141. # ensure that if a component adds an event between
  4142. # when locked the run handler and checked that the
  4143. # components were stable, we don't erroneously
  4144. # report that we are settled.
  4145. self.scheds.execute(
  4146. lambda app: app.sched.run_handler_lock.release())
  4147. self.executor_server.lock.release()
  4148. self.log.debug("...settled after %.3f ms / %s loops (%s)",
  4149. time.time() - start, i, msg)
  4150. self.logState()
  4151. return
  4152. self.scheds.execute(
  4153. lambda app: app.sched.run_handler_lock.release())
  4154. self.executor_server.lock.release()
  4155. self.scheds.execute(lambda app: app.sched.wake_event.wait(0.1))
  4156. def waitForPoll(self, poller, timeout=30):
  4157. self.log.debug("Wait for poll on %s", poller)
  4158. self.poller_events[poller].clear()
  4159. self.log.debug("Waiting for poll 1 on %s", poller)
  4160. self.poller_events[poller].wait(timeout)
  4161. self.poller_events[poller].clear()
  4162. self.log.debug("Waiting for poll 2 on %s", poller)
  4163. self.poller_events[poller].wait(timeout)
  4164. self.log.debug("Done waiting for poll on %s", poller)
  4165. def logState(self):
  4166. """ Log the current state of the system """
  4167. self.log.info("Begin state dump --------------------")
  4168. for build in self.history:
  4169. self.log.info("Completed build: %s" % build)
  4170. for build in self.builds:
  4171. self.log.info("Running build: %s" % build)
  4172. for tenant in self.scheds.first.sched.abide.tenants.values():
  4173. for pipeline in tenant.layout.pipelines.values():
  4174. for pipeline_queue in pipeline.queues:
  4175. if len(pipeline_queue.queue) != 0:
  4176. status = ''
  4177. for item in pipeline_queue.queue:
  4178. status += item.formatStatus()
  4179. self.log.info(
  4180. 'Tenant %s pipeline %s queue %s contents:' % (
  4181. tenant.name, pipeline.name,
  4182. pipeline_queue.name))
  4183. for l in status.split('\n'):
  4184. if l.strip():
  4185. self.log.info(l)
  4186. self.log.info("End state dump --------------------")
  4187. def countJobResults(self, jobs, result):
  4188. jobs = filter(lambda x: x.result == result, jobs)
  4189. return len(list(jobs))
  4190. def getBuildByName(self, name):
  4191. for build in self.builds:
  4192. if build.name == name:
  4193. return build
  4194. raise Exception("Unable to find build %s" % name)
  4195. def assertJobNotInHistory(self, name, project=None):
  4196. for job in self.history:
  4197. if (project is None or
  4198. job.parameters['zuul']['project']['name'] == project):
  4199. self.assertNotEqual(job.name, name,
  4200. 'Job %s found in history' % name)
  4201. def getJobFromHistory(self, name, project=None, result=None):
  4202. for job in self.history:
  4203. if (job.name == name and
  4204. (project is None or
  4205. job.parameters['zuul']['project']['name'] == project) and
  4206. (result is None or job.result == result)):
  4207. return job
  4208. raise Exception("Unable to find job %s in history" % name)
  4209. def assertEmptyQueues(self):
  4210. # Make sure there are no orphaned jobs
  4211. for tenant in self.scheds.first.sched.abide.tenants.values():
  4212. for pipeline in tenant.layout.pipelines.values():
  4213. for pipeline_queue in pipeline.queues:
  4214. if len(pipeline_queue.queue) != 0:
  4215. print('pipeline %s queue %s contents %s' % (
  4216. pipeline.name, pipeline_queue.name,
  4217. pipeline_queue.queue))
  4218. self.assertEqual(len(pipeline_queue.queue), 0,
  4219. "Pipelines queues should be empty")
  4220. def assertReportedStat(self, key, value=None, kind=None):
  4221. """Check statsd output
  4222. Check statsd return values. A ``value`` should specify a
  4223. ``kind``, however a ``kind`` may be specified without a
  4224. ``value`` for a generic match. Leave both empy to just check
  4225. for key presence.
  4226. :arg str key: The statsd key
  4227. :arg str value: The expected value of the metric ``key``
  4228. :arg str kind: The expected type of the metric ``key`` For example
  4229. - ``c`` counter
  4230. - ``g`` gauge
  4231. - ``ms`` timing
  4232. - ``s`` set
  4233. """
  4234. if value:
  4235. self.assertNotEqual(kind, None)
  4236. start = time.time()
  4237. while time.time() < (start + 5):
  4238. # Note our fake statsd just queues up results in a queue.
  4239. # We just keep going through them until we find one that
  4240. # matches, or fail out. If statsd pipelines are used,
  4241. # large single packets are sent with stats separated by
  4242. # newlines; thus we first flatten the stats out into
  4243. # single entries.
  4244. stats = list(itertools.chain.from_iterable(
  4245. [s.decode('utf-8').split('\n') for s in self.statsd.stats]))
  4246. # Check that we don't have already have a counter value
  4247. # that we then try to extend a sub-key under; this doesn't
  4248. # work on the server. e.g.
  4249. # zuul.new.stat is already a counter
  4250. # zuul.new.stat.sub.value will silently not work
  4251. #
  4252. # note only valid for gauges and counters; timers are
  4253. # slightly different because statsd flushes them out but
  4254. # actually writes a bunch of different keys like "mean,
  4255. # std, count", so the "key" isn't so much a key, but a
  4256. # path to the folder where the actual values will be kept.
  4257. # Thus you can extend timer keys OK.
  4258. already_set_keys = set()
  4259. for stat in stats:
  4260. k, v = stat.split(':')
  4261. s_value, s_kind = v.split('|')
  4262. if s_kind == 'c' or s_kind == 'g':
  4263. already_set_keys.update([k])
  4264. for k in already_set_keys:
  4265. if key != k and key.startswith(k):
  4266. raise Exception(
  4267. "Key %s is a gauge/counter and "
  4268. "we are trying to set subkey %s" % (k, key))
  4269. for stat in stats:
  4270. k, v = stat.split(':')
  4271. s_value, s_kind = v.split('|')
  4272. if key == k:
  4273. if kind is None:
  4274. # key with no qualifiers is found
  4275. return True
  4276. # if no kind match, look for other keys
  4277. if kind != s_kind:
  4278. continue
  4279. if value:
  4280. # special-case value|ms because statsd can turn
  4281. # timing results into float of indeterminate
  4282. # length, hence foiling string matching.
  4283. if kind == 'ms':
  4284. if float(value) == float(s_value):
  4285. return True
  4286. if value == s_value:
  4287. return True
  4288. # otherwise keep looking for other matches
  4289. continue
  4290. # this key matches
  4291. return True
  4292. time.sleep(0.1)
  4293. raise Exception("Key %s not found in reported stats" % key)
  4294. def assertBuilds(self, builds):
  4295. """Assert that the running builds are as described.
  4296. The list of running builds is examined and must match exactly
  4297. the list of builds described by the input.
  4298. :arg list builds: A list of dictionaries. Each item in the
  4299. list must match the corresponding build in the build
  4300. history, and each element of the dictionary must match the
  4301. corresponding attribute of the build.
  4302. """
  4303. try:
  4304. self.assertEqual(len(self.builds), len(builds))
  4305. for i, d in enumerate(builds):
  4306. for k, v in d.items():
  4307. self.assertEqual(
  4308. getattr(self.builds[i], k), v,
  4309. "Element %i in builds does not match" % (i,))
  4310. except Exception:
  4311. for build in self.builds:
  4312. self.log.error("Running build: %s" % build)
  4313. else:
  4314. self.log.error("No running builds")
  4315. raise
  4316. def assertHistory(self, history, ordered=True):
  4317. """Assert that the completed builds are as described.
  4318. The list of completed builds is examined and must match
  4319. exactly the list of builds described by the input.
  4320. :arg list history: A list of dictionaries. Each item in the
  4321. list must match the corresponding build in the build
  4322. history, and each element of the dictionary must match the
  4323. corresponding attribute of the build.
  4324. :arg bool ordered: If true, the history must match the order
  4325. supplied, if false, the builds are permitted to have
  4326. arrived in any order.
  4327. """
  4328. def matches(history_item, item):
  4329. for k, v in item.items():
  4330. if getattr(history_item, k) != v:
  4331. return False
  4332. return True
  4333. try:
  4334. self.assertEqual(len(self.history), len(history))
  4335. if ordered:
  4336. for i, d in enumerate(history):
  4337. if not matches(self.history[i], d):
  4338. raise Exception(
  4339. "Element %i in history does not match %s" %
  4340. (i, self.history[i]))
  4341. else:
  4342. unseen = self.history[:]
  4343. for i, d in enumerate(history):
  4344. found = False
  4345. for unseen_item in unseen:
  4346. if matches(unseen_item, d):
  4347. found = True
  4348. unseen.remove(unseen_item)
  4349. break
  4350. if not found:
  4351. raise Exception("No match found for element %i "
  4352. "in history" % (i,))
  4353. if unseen:
  4354. raise Exception("Unexpected items in history")
  4355. except Exception:
  4356. for build in self.history:
  4357. self.log.error("Completed build: %s" % build)
  4358. if not self.history:
  4359. self.log.error("No completed builds")
  4360. raise
  4361. def printHistory(self):
  4362. """Log the build history.
  4363. This can be useful during tests to summarize what jobs have
  4364. completed.
  4365. """
  4366. if not self.history:
  4367. self.log.debug("Build history: no builds ran")
  4368. return
  4369. self.log.debug("Build history:")
  4370. for build in self.history:
  4371. self.log.debug(build)
  4372. def updateConfigLayout(self, path):
  4373. root = os.path.join(self.test_root, "config")
  4374. if not os.path.exists(root):
  4375. os.makedirs(root)
  4376. f = tempfile.NamedTemporaryFile(dir=root, delete=False)
  4377. f.write("""
  4378. - tenant:
  4379. name: openstack
  4380. source:
  4381. gerrit:
  4382. config-projects:
  4383. - %s
  4384. untrusted-projects:
  4385. - org/project
  4386. - org/project1
  4387. - org/project2\n""" % path)
  4388. f.close()
  4389. self.config.set('scheduler', 'tenant_config',
  4390. os.path.join(FIXTURE_DIR, f.name))
  4391. self.setupAllProjectKeys(self.config)
  4392. def addTagToRepo(self, project, name, sha):
  4393. path = os.path.join(self.upstream_root, project)
  4394. repo = git.Repo(path)
  4395. repo.git.tag(name, sha)
  4396. def delTagFromRepo(self, project, name):
  4397. path = os.path.join(self.upstream_root, project)
  4398. repo = git.Repo(path)
  4399. repo.git.tag('-d', name)
  4400. def addCommitToRepo(self, project, message, files,
  4401. branch='master', tag=None):
  4402. path = os.path.join(self.upstream_root, project)
  4403. repo = git.Repo(path)
  4404. repo.head.reference = branch
  4405. zuul.merger.merger.reset_repo_to_head(repo)
  4406. for fn, content in files.items():
  4407. fn = os.path.join(path, fn)
  4408. try:
  4409. os.makedirs(os.path.dirname(fn))
  4410. except OSError:
  4411. pass
  4412. if isinstance(content, SymLink):
  4413. os.symlink(content.target, fn)
  4414. else:
  4415. mode = 'w'
  4416. if isinstance(content, bytes):
  4417. # the file fixtures are loaded as bytes such that
  4418. # we also support binary files
  4419. mode = 'wb'
  4420. with open(fn, mode) as f:
  4421. f.write(content)
  4422. repo.index.add([fn])
  4423. commit = repo.index.commit(message)
  4424. before = repo.heads[branch].commit
  4425. repo.heads[branch].commit = commit
  4426. repo.head.reference = branch
  4427. repo.git.clean('-x', '-f', '-d')
  4428. repo.heads[branch].checkout()
  4429. if tag:
  4430. repo.create_tag(tag)
  4431. return before
  4432. def commitConfigUpdate(self, project_name, source_name):
  4433. """Commit an update to zuul.yaml
  4434. This overwrites the zuul.yaml in the specificed project with
  4435. the contents specified.
  4436. :arg str project_name: The name of the project containing
  4437. zuul.yaml (e.g., common-config)
  4438. :arg str source_name: The path to the file (underneath the
  4439. test fixture directory) whose contents should be used to
  4440. replace zuul.yaml.
  4441. """
  4442. source_path = os.path.join(FIXTURE_DIR, source_name)
  4443. files = {}
  4444. with open(source_path, 'r') as f:
  4445. data = f.read()
  4446. layout = yaml.safe_load(data)
  4447. files['zuul.yaml'] = data
  4448. for item in layout:
  4449. if 'job' in item:
  4450. jobname = item['job']['name']
  4451. files['playbooks/%s.yaml' % jobname] = ''
  4452. before = self.addCommitToRepo(
  4453. project_name, 'Pulling content from %s' % source_name,
  4454. files)
  4455. return before
  4456. def newTenantConfig(self, source_name):
  4457. """ Use this to update the tenant config file in tests
  4458. This will update self.tenant_config_file to point to a temporary file
  4459. for the duration of this particular test. The content of that file will
  4460. be taken from FIXTURE_DIR/source_name
  4461. After the test the original value of self.tenant_config_file will be
  4462. restored.
  4463. :arg str source_name: The path of the file under
  4464. FIXTURE_DIR that will be used to populate the new tenant
  4465. config file.
  4466. """
  4467. source_path = os.path.join(FIXTURE_DIR, source_name)
  4468. orig_tenant_config_file = self.tenant_config_file
  4469. with tempfile.NamedTemporaryFile(
  4470. delete=False, mode='wb') as new_tenant_config:
  4471. self.tenant_config_file = new_tenant_config.name
  4472. with open(source_path, mode='rb') as source_tenant_config:
  4473. new_tenant_config.write(source_tenant_config.read())
  4474. self.config['scheduler']['tenant_config'] = self.tenant_config_file
  4475. self.setupAllProjectKeys(self.config)
  4476. self.log.debug(
  4477. 'tenant_config_file = {}'.format(self.tenant_config_file))
  4478. def _restoreTenantConfig():
  4479. self.log.debug(
  4480. 'restoring tenant_config_file = {}'.format(
  4481. orig_tenant_config_file))
  4482. os.unlink(self.tenant_config_file)
  4483. self.tenant_config_file = orig_tenant_config_file
  4484. self.config['scheduler']['tenant_config'] = orig_tenant_config_file
  4485. self.addCleanup(_restoreTenantConfig)
  4486. def addEvent(self, connection, event):
  4487. """Inject a Fake (Gerrit) event.
  4488. This method accepts a JSON-encoded event and simulates Zuul
  4489. having received it from Gerrit. It could (and should)
  4490. eventually apply to any connection type, but is currently only
  4491. used with Gerrit connections. The name of the connection is
  4492. used to look up the corresponding server, and the event is
  4493. simulated as having been received by all Zuul connections
  4494. attached to that server. So if two Gerrit connections in Zuul
  4495. are connected to the same Gerrit server, and you invoke this
  4496. method specifying the name of one of them, the event will be
  4497. received by both.
  4498. .. note::
  4499. "self.fake_gerrit.addEvent" calls should be migrated to
  4500. this method.
  4501. :arg str connection: The name of the connection corresponding
  4502. to the gerrit server.
  4503. :arg str event: The JSON-encoded event.
  4504. """
  4505. specified_conn = self.scheds.first.connections.connections[connection]
  4506. for conn in self.scheds.first.connections.connections.values():
  4507. if (isinstance(conn, specified_conn.__class__) and
  4508. specified_conn.server == conn.server):
  4509. conn.addEvent(event)
  4510. def getUpstreamRepos(self, projects):
  4511. """Return upstream git repo objects for the listed projects
  4512. :arg list projects: A list of strings, each the canonical name
  4513. of a project.
  4514. :returns: A dictionary of {name: repo} for every listed
  4515. project.
  4516. :rtype: dict
  4517. """
  4518. repos = {}
  4519. for project in projects:
  4520. # FIXME(jeblair): the upstream root does not yet have a
  4521. # hostname component; that needs to be added, and this
  4522. # line removed:
  4523. tmp_project_name = '/'.join(project.split('/')[1:])
  4524. path = os.path.join(self.upstream_root, tmp_project_name)
  4525. repo = git.Repo(path)
  4526. repos[project] = repo
  4527. return repos
  4528. class AnsibleZuulTestCase(ZuulTestCase):
  4529. """ZuulTestCase but with an actual ansible executor running"""
  4530. run_ansible = True
  4531. @contextmanager
  4532. def jobLog(self, build):
  4533. """Print job logs on assertion errors
  4534. This method is a context manager which, if it encounters an
  4535. ecxeption, adds the build log to the debug output.
  4536. :arg Build build: The build that's being asserted.
  4537. """
  4538. try:
  4539. yield
  4540. except Exception:
  4541. path = os.path.join(self.jobdir_root, build.uuid,
  4542. 'work', 'logs', 'job-output.txt')
  4543. with open(path) as f:
  4544. self.log.debug(f.read())
  4545. path = os.path.join(self.jobdir_root, build.uuid,
  4546. 'work', 'logs', 'job-output.json')
  4547. with open(path) as f:
  4548. self.log.debug(f.read())
  4549. raise
  4550. class SSLZuulTestCase(ZuulTestCase):
  4551. """ZuulTestCase but using SSL when possible"""
  4552. use_ssl = True
  4553. class ZuulDBTestCase(ZuulTestCase):
  4554. def setup_config(self, config_file: str):
  4555. config = super(ZuulDBTestCase, self).setup_config(config_file)
  4556. for section_name in config.sections():
  4557. con_match = re.match(r'^connection ([\'\"]?)(.*)(\1)$',
  4558. section_name, re.I)
  4559. if not con_match:
  4560. continue
  4561. if config.get(section_name, 'driver') == 'sql':
  4562. if (config.get(section_name, 'dburi') ==
  4563. '$MYSQL_FIXTURE_DBURI$'):
  4564. f = MySQLSchemaFixture()
  4565. self.useFixture(f)
  4566. config.set(section_name, 'dburi', f.dburi)
  4567. elif (config.get(section_name, 'dburi') ==
  4568. '$POSTGRESQL_FIXTURE_DBURI$'):
  4569. f = PostgresqlSchemaFixture()
  4570. self.useFixture(f)
  4571. config.set(section_name, 'dburi', f.dburi)
  4572. return config
  4573. class ZuulGithubAppTestCase(ZuulTestCase):
  4574. def setup_config(self, config_file: str):
  4575. config = super(ZuulGithubAppTestCase, self).setup_config(config_file)
  4576. for section_name in config.sections():
  4577. con_match = re.match(r'^connection ([\'\"]?)(.*)(\1)$',
  4578. section_name, re.I)
  4579. if not con_match:
  4580. continue
  4581. if config.get(section_name, 'driver') == 'github':
  4582. if (config.get(section_name, 'app_key',
  4583. fallback=None) ==
  4584. '$APP_KEY_FIXTURE$'):
  4585. config.set(section_name, 'app_key',
  4586. os.path.join(FIXTURE_DIR, 'app_key'))
  4587. return config