The Gatekeeper, or a project gating system
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

model.py 145KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097
  1. # Copyright 2012 Hewlett-Packard Development Company, L.P.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"); you may
  4. # not use this file except in compliance with the License. You may obtain
  5. # a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
  11. # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
  12. # License for the specific language governing permissions and limitations
  13. # under the License.
  14. import abc
  15. from collections import OrderedDict
  16. import copy
  17. import logging
  18. import os
  19. import re2
  20. import struct
  21. import time
  22. from uuid import uuid4
  23. import urllib.parse
  24. import textwrap
  25. import types
  26. from zuul import change_matcher
  27. from zuul.lib.config import get_default
  28. MERGER_MERGE = 1 # "git merge"
  29. MERGER_MERGE_RESOLVE = 2 # "git merge -s resolve"
  30. MERGER_CHERRY_PICK = 3 # "git cherry-pick"
  31. MERGER_MAP = {
  32. 'merge': MERGER_MERGE,
  33. 'merge-resolve': MERGER_MERGE_RESOLVE,
  34. 'cherry-pick': MERGER_CHERRY_PICK,
  35. }
  36. PRECEDENCE_NORMAL = 0
  37. PRECEDENCE_LOW = 1
  38. PRECEDENCE_HIGH = 2
  39. PRECEDENCE_MAP = {
  40. None: PRECEDENCE_NORMAL,
  41. 'low': PRECEDENCE_LOW,
  42. 'normal': PRECEDENCE_NORMAL,
  43. 'high': PRECEDENCE_HIGH,
  44. }
  45. PRIORITY_MAP = {
  46. PRECEDENCE_NORMAL: 200,
  47. PRECEDENCE_LOW: 300,
  48. PRECEDENCE_HIGH: 100,
  49. }
  50. # Request states
  51. STATE_REQUESTED = 'requested'
  52. STATE_FULFILLED = 'fulfilled'
  53. STATE_FAILED = 'failed'
  54. REQUEST_STATES = set([STATE_REQUESTED,
  55. STATE_FULFILLED,
  56. STATE_FAILED])
  57. # Node states
  58. STATE_BUILDING = 'building'
  59. STATE_TESTING = 'testing'
  60. STATE_READY = 'ready'
  61. STATE_IN_USE = 'in-use'
  62. STATE_USED = 'used'
  63. STATE_HOLD = 'hold'
  64. STATE_DELETING = 'deleting'
  65. NODE_STATES = set([STATE_BUILDING,
  66. STATE_TESTING,
  67. STATE_READY,
  68. STATE_IN_USE,
  69. STATE_USED,
  70. STATE_HOLD,
  71. STATE_DELETING])
  72. class ConfigurationErrorKey(object):
  73. """A class which attempts to uniquely identify configuration errors
  74. based on their file location. It's not perfect, but it's usually
  75. sufficient to determine whether we should show an error to a user.
  76. """
  77. def __init__(self, context, mark, error_text):
  78. self.context = context
  79. self.mark = mark
  80. self.error_text = error_text
  81. elements = []
  82. if context:
  83. elements.extend([
  84. context.project.canonical_name,
  85. context.branch,
  86. context.path,
  87. ])
  88. else:
  89. elements.extend([None, None, None])
  90. if mark:
  91. elements.extend([
  92. mark.line,
  93. mark.snippet,
  94. ])
  95. else:
  96. elements.extend([None, None])
  97. elements.append(error_text)
  98. self._hash = hash('|'.join([str(x) for x in elements]))
  99. def __hash__(self):
  100. return self._hash
  101. def __ne__(self, other):
  102. return not self.__eq__(other)
  103. def __eq__(self, other):
  104. if not isinstance(other, ConfigurationErrorKey):
  105. return False
  106. return (self.context == other.context and
  107. self.mark.line == other.mark.line and
  108. self.mark.snippet == other.mark.snippet and
  109. self.error_text == other.error_text)
  110. class ConfigurationError(object):
  111. """A configuration error"""
  112. def __init__(self, context, mark, error, short_error=None):
  113. self.error = str(error)
  114. self.short_error = short_error
  115. self.key = ConfigurationErrorKey(context, mark, self.error)
  116. class LoadingErrors(object):
  117. """A configuration errors accumalator attached to a layout object
  118. """
  119. def __init__(self):
  120. self.errors = []
  121. self.error_keys = set()
  122. def addError(self, context, mark, error, short_error=None):
  123. e = ConfigurationError(context, mark, error, short_error)
  124. self.errors.append(e)
  125. self.error_keys.add(e.key)
  126. def __getitem__(self, index):
  127. return self.errors[index]
  128. def __len__(self):
  129. return len(self.errors)
  130. class NoMatchingParentError(Exception):
  131. """A job referenced a parent, but that parent had no variants which
  132. matched the current change."""
  133. pass
  134. class TemplateNotFoundError(Exception):
  135. """A project referenced a template that does not exist."""
  136. pass
  137. class Attributes(object):
  138. """A class to hold attributes for string formatting."""
  139. def __init__(self, **kw):
  140. setattr(self, '__dict__', kw)
  141. class Freezable(object):
  142. """A mix-in class so that an object can be made immutable"""
  143. def __init__(self):
  144. super(Freezable, self).__setattr__('_frozen', False)
  145. def freeze(self):
  146. """Make this object immutable"""
  147. def _freezelist(l):
  148. for i, v in enumerate(l):
  149. if isinstance(v, Freezable):
  150. if not v._frozen:
  151. v.freeze()
  152. elif isinstance(v, dict):
  153. l[i] = _freezedict(v)
  154. elif isinstance(v, list):
  155. l[i] = _freezelist(v)
  156. return tuple(l)
  157. def _freezedict(d):
  158. for k, v in list(d.items()):
  159. if isinstance(v, Freezable):
  160. if not v._frozen:
  161. v.freeze()
  162. elif isinstance(v, dict):
  163. d[k] = _freezedict(v)
  164. elif isinstance(v, list):
  165. d[k] = _freezelist(v)
  166. return types.MappingProxyType(d)
  167. _freezedict(self.__dict__)
  168. # Ignore return value from freezedict because __dict__ can't
  169. # be a mappingproxy.
  170. self._frozen = True
  171. def __setattr__(self, name, value):
  172. if self._frozen:
  173. raise Exception("Unable to modify frozen object %s" %
  174. (repr(self),))
  175. super(Freezable, self).__setattr__(name, value)
  176. class ConfigObject(Freezable):
  177. def __init__(self):
  178. super().__init__()
  179. self.source_context = None
  180. self.start_mark = None
  181. class Pipeline(object):
  182. """A configuration that ties together triggers, reporters and managers
  183. Trigger
  184. A description of which events should be processed
  185. Manager
  186. Responsible for enqueing and dequeing Changes
  187. Reporter
  188. Communicates success and failure results somewhere
  189. """
  190. def __init__(self, name, tenant):
  191. self.name = name
  192. # Note that pipelines are not portable across tenants (new
  193. # pipeline objects must be made when a tenant is
  194. # reconfigured). A pipeline requires a tenant in order to
  195. # reach the currently active layout for that tenant.
  196. self.tenant = tenant
  197. self.source_context = None
  198. self.description = None
  199. self.failure_message = None
  200. self.merge_failure_message = None
  201. self.success_message = None
  202. self.footer_message = None
  203. self.start_message = None
  204. self.post_review = False
  205. self.dequeue_on_new_patchset = True
  206. self.ignore_dependencies = False
  207. self.manager = None
  208. self.queues = []
  209. self.relative_priority_queues = {}
  210. self.precedence = PRECEDENCE_NORMAL
  211. self.triggers = []
  212. self.start_actions = []
  213. self.success_actions = []
  214. self.failure_actions = []
  215. self.merge_failure_actions = []
  216. self.disabled_actions = []
  217. self.disable_at = None
  218. self._consecutive_failures = 0
  219. self._disabled = False
  220. self.window = None
  221. self.window_floor = None
  222. self.window_increase_type = None
  223. self.window_increase_factor = None
  224. self.window_decrease_type = None
  225. self.window_decrease_factor = None
  226. @property
  227. def actions(self):
  228. return (
  229. self.start_actions +
  230. self.success_actions +
  231. self.failure_actions +
  232. self.merge_failure_actions +
  233. self.disabled_actions
  234. )
  235. def __repr__(self):
  236. return '<Pipeline %s>' % self.name
  237. def getSafeAttributes(self):
  238. return Attributes(name=self.name)
  239. def setManager(self, manager):
  240. self.manager = manager
  241. def addQueue(self, queue):
  242. self.queues.append(queue)
  243. def getQueue(self, project):
  244. for queue in self.queues:
  245. if project in queue.projects:
  246. return queue
  247. return None
  248. def getRelativePriorityQueue(self, project):
  249. for queue in self.relative_priority_queues.values():
  250. if project in queue:
  251. return queue
  252. return [project]
  253. def removeQueue(self, queue):
  254. if queue in self.queues:
  255. self.queues.remove(queue)
  256. def getChangesInQueue(self):
  257. changes = []
  258. for shared_queue in self.queues:
  259. changes.extend([x.change for x in shared_queue.queue])
  260. return changes
  261. def getAllItems(self):
  262. items = []
  263. for shared_queue in self.queues:
  264. items.extend(shared_queue.queue)
  265. return items
  266. def formatStatusJSON(self, websocket_url=None):
  267. j_pipeline = dict(name=self.name,
  268. description=self.description)
  269. j_queues = []
  270. j_pipeline['change_queues'] = j_queues
  271. for queue in self.queues:
  272. j_queue = dict(name=queue.name)
  273. j_queues.append(j_queue)
  274. j_queue['heads'] = []
  275. j_queue['window'] = queue.window
  276. j_changes = []
  277. for e in queue.queue:
  278. if not e.item_ahead:
  279. if j_changes:
  280. j_queue['heads'].append(j_changes)
  281. j_changes = []
  282. j_changes.append(e.formatJSON(websocket_url))
  283. if (len(j_changes) > 1 and
  284. (j_changes[-2]['remaining_time'] is not None) and
  285. (j_changes[-1]['remaining_time'] is not None)):
  286. j_changes[-1]['remaining_time'] = max(
  287. j_changes[-2]['remaining_time'],
  288. j_changes[-1]['remaining_time'])
  289. if j_changes:
  290. j_queue['heads'].append(j_changes)
  291. return j_pipeline
  292. class ChangeQueue(object):
  293. """A ChangeQueue contains Changes to be processed for related projects.
  294. A Pipeline with a DependentPipelineManager has multiple parallel
  295. ChangeQueues shared by different projects. For instance, there may a
  296. ChangeQueue shared by interrelated projects foo and bar, and a second queue
  297. for independent project baz.
  298. A Pipeline with an IndependentPipelineManager puts every Change into its
  299. own ChangeQueue.
  300. The ChangeQueue Window is inspired by TCP windows and controlls how many
  301. Changes in a given ChangeQueue will be considered active and ready to
  302. be processed. If a Change succeeds, the Window is increased by
  303. `window_increase_factor`. If a Change fails, the Window is decreased by
  304. `window_decrease_factor`.
  305. A ChangeQueue may be a dynamically created queue, which may be removed
  306. from a DependentPipelineManager once empty.
  307. """
  308. def __init__(self, pipeline, window=0, window_floor=1,
  309. window_increase_type='linear', window_increase_factor=1,
  310. window_decrease_type='exponential', window_decrease_factor=2,
  311. name=None, dynamic=False):
  312. self.pipeline = pipeline
  313. if name:
  314. self.name = name
  315. else:
  316. self.name = ''
  317. self.projects = []
  318. self._jobs = set()
  319. self.queue = []
  320. self.window = window
  321. self.window_floor = window_floor
  322. self.window_increase_type = window_increase_type
  323. self.window_increase_factor = window_increase_factor
  324. self.window_decrease_type = window_decrease_type
  325. self.window_decrease_factor = window_decrease_factor
  326. self.dynamic = dynamic
  327. def __repr__(self):
  328. return '<ChangeQueue %s: %s>' % (self.pipeline.name, self.name)
  329. def getJobs(self):
  330. return self._jobs
  331. def addProject(self, project):
  332. if project not in self.projects:
  333. self.projects.append(project)
  334. if not self.name:
  335. self.name = project.name
  336. def enqueueChange(self, change):
  337. item = QueueItem(self, change)
  338. self.enqueueItem(item)
  339. item.enqueue_time = time.time()
  340. return item
  341. def enqueueItem(self, item):
  342. item.pipeline = self.pipeline
  343. item.queue = self
  344. if self.queue:
  345. item.item_ahead = self.queue[-1]
  346. item.item_ahead.items_behind.append(item)
  347. self.queue.append(item)
  348. def dequeueItem(self, item):
  349. if item in self.queue:
  350. self.queue.remove(item)
  351. if item.item_ahead:
  352. item.item_ahead.items_behind.remove(item)
  353. for item_behind in item.items_behind:
  354. if item.item_ahead:
  355. item.item_ahead.items_behind.append(item_behind)
  356. item_behind.item_ahead = item.item_ahead
  357. item.item_ahead = None
  358. item.items_behind = []
  359. item.dequeue_time = time.time()
  360. def moveItem(self, item, item_ahead):
  361. if item.item_ahead == item_ahead:
  362. return False
  363. # Remove from current location
  364. if item.item_ahead:
  365. item.item_ahead.items_behind.remove(item)
  366. for item_behind in item.items_behind:
  367. if item.item_ahead:
  368. item.item_ahead.items_behind.append(item_behind)
  369. item_behind.item_ahead = item.item_ahead
  370. # Add to new location
  371. item.item_ahead = item_ahead
  372. item.items_behind = []
  373. if item.item_ahead:
  374. item.item_ahead.items_behind.append(item)
  375. return True
  376. def isActionable(self, item):
  377. if self.window:
  378. return item in self.queue[:self.window]
  379. else:
  380. return True
  381. def increaseWindowSize(self):
  382. if self.window:
  383. if self.window_increase_type == 'linear':
  384. self.window += self.window_increase_factor
  385. elif self.window_increase_type == 'exponential':
  386. self.window *= self.window_increase_factor
  387. def decreaseWindowSize(self):
  388. if self.window:
  389. if self.window_decrease_type == 'linear':
  390. self.window = max(
  391. self.window_floor,
  392. self.window - self.window_decrease_factor)
  393. elif self.window_decrease_type == 'exponential':
  394. self.window = max(
  395. self.window_floor,
  396. int(self.window / self.window_decrease_factor))
  397. class Project(object):
  398. """A Project represents a git repository such as openstack/nova."""
  399. # NOTE: Projects should only be instantiated via a Source object
  400. # so that they are associated with and cached by their Connection.
  401. # This makes a Project instance a unique identifier for a given
  402. # project from a given source.
  403. def __init__(self, name, source, foreign=False):
  404. self.name = name
  405. self.source = source
  406. self.connection_name = source.connection.connection_name
  407. self.canonical_hostname = source.canonical_hostname
  408. self.canonical_name = source.canonical_hostname + '/' + name
  409. # foreign projects are those referenced in dependencies
  410. # of layout projects, this should matter
  411. # when deciding whether to enqueue their changes
  412. # TODOv3 (jeblair): re-add support for foreign projects if needed
  413. self.foreign = foreign
  414. def __str__(self):
  415. return self.name
  416. def __repr__(self):
  417. return '<Project %s>' % (self.name)
  418. def getSafeAttributes(self):
  419. return Attributes(name=self.name)
  420. def toDict(self):
  421. d = {}
  422. d['name'] = self.name
  423. d['connection_name'] = self.connection_name
  424. d['canonical_name'] = self.canonical_name
  425. return d
  426. class Node(ConfigObject):
  427. """A single node for use by a job.
  428. This may represent a request for a node, or an actual node
  429. provided by Nodepool.
  430. """
  431. def __init__(self, name, label):
  432. super(Node, self).__init__()
  433. self.name = name
  434. self.label = label
  435. self.id = None
  436. self.lock = None
  437. self.hold_job = None
  438. self.comment = None
  439. # Attributes from Nodepool
  440. self._state = 'unknown'
  441. self.state_time = time.time()
  442. self.host_id = None
  443. self.interface_ip = None
  444. self.public_ipv4 = None
  445. self.private_ipv4 = None
  446. self.public_ipv6 = None
  447. self.connection_port = 22
  448. self.connection_type = None
  449. self._keys = []
  450. self.az = None
  451. self.provider = None
  452. self.region = None
  453. self.username = None
  454. self.hold_expiration = None
  455. @property
  456. def state(self):
  457. return self._state
  458. @state.setter
  459. def state(self, value):
  460. if value not in NODE_STATES:
  461. raise TypeError("'%s' is not a valid state" % value)
  462. self._state = value
  463. self.state_time = time.time()
  464. def __repr__(self):
  465. return '<Node %s %s:%s>' % (self.id, self.name, self.label)
  466. def __ne__(self, other):
  467. return not self.__eq__(other)
  468. def __eq__(self, other):
  469. if not isinstance(other, Node):
  470. return False
  471. return (self.name == other.name and
  472. self.label == other.label and
  473. self.id == other.id)
  474. def toDict(self, internal_attributes=False):
  475. d = {}
  476. d['state'] = self.state
  477. d['hold_job'] = self.hold_job
  478. d['comment'] = self.comment
  479. for k in self._keys:
  480. d[k] = getattr(self, k)
  481. if internal_attributes:
  482. # These attributes are only useful for the rpc serialization
  483. d['name'] = self.name[0]
  484. d['aliases'] = self.name[1:]
  485. d['label'] = self.label
  486. return d
  487. def updateFromDict(self, data):
  488. self._state = data['state']
  489. keys = []
  490. for k, v in data.items():
  491. if k == 'state':
  492. continue
  493. keys.append(k)
  494. setattr(self, k, v)
  495. self._keys = keys
  496. class Group(ConfigObject):
  497. """A logical group of nodes for use by a job.
  498. A Group is a named set of node names that will be provided to
  499. jobs in the inventory to describe logical units where some subset of tasks
  500. run.
  501. """
  502. def __init__(self, name, nodes):
  503. super(Group, self).__init__()
  504. self.name = name
  505. self.nodes = nodes
  506. def __repr__(self):
  507. return '<Group %s %s>' % (self.name, str(self.nodes))
  508. def __ne__(self, other):
  509. return not self.__eq__(other)
  510. def __eq__(self, other):
  511. if not isinstance(other, Group):
  512. return False
  513. return (self.name == other.name and
  514. self.nodes == other.nodes)
  515. def toDict(self):
  516. return {
  517. 'name': self.name,
  518. 'nodes': self.nodes
  519. }
  520. class NodeSet(ConfigObject):
  521. """A set of nodes.
  522. In configuration, NodeSets are attributes of Jobs indicating that
  523. a Job requires nodes matching this description.
  524. They may appear as top-level configuration objects and be named,
  525. or they may appears anonymously in in-line job definitions.
  526. """
  527. def __init__(self, name=None):
  528. super(NodeSet, self).__init__()
  529. self.name = name or ''
  530. self.nodes = OrderedDict()
  531. self.groups = OrderedDict()
  532. def __ne__(self, other):
  533. return not self.__eq__(other)
  534. def __eq__(self, other):
  535. if not isinstance(other, NodeSet):
  536. return False
  537. return (self.name == other.name and
  538. self.nodes == other.nodes)
  539. def toDict(self):
  540. d = {}
  541. d['name'] = self.name
  542. d['nodes'] = []
  543. for node in self.nodes.values():
  544. d['nodes'].append(node.toDict(internal_attributes=True))
  545. d['groups'] = []
  546. for group in self.groups.values():
  547. d['groups'].append(group.toDict())
  548. return d
  549. def copy(self):
  550. n = NodeSet(self.name)
  551. for name, node in self.nodes.items():
  552. n.addNode(Node(node.name, node.label))
  553. for name, group in self.groups.items():
  554. n.addGroup(Group(group.name, group.nodes[:]))
  555. return n
  556. def addNode(self, node):
  557. for name in node.name:
  558. if name in self.nodes:
  559. raise Exception("Duplicate node in %s" % (self,))
  560. self.nodes[tuple(node.name)] = node
  561. def getNodes(self):
  562. return list(self.nodes.values())
  563. def addGroup(self, group):
  564. if group.name in self.groups:
  565. raise Exception("Duplicate group in %s" % (self,))
  566. self.groups[group.name] = group
  567. def getGroups(self):
  568. return list(self.groups.values())
  569. def __repr__(self):
  570. if self.name:
  571. name = self.name + ' '
  572. else:
  573. name = ''
  574. return '<NodeSet %s%s>' % (name, list(self.nodes.values()))
  575. def __len__(self):
  576. return len(self.nodes)
  577. class NodeRequest(object):
  578. """A request for a set of nodes."""
  579. def __init__(self, requestor, build_set, job, nodeset, relative_priority):
  580. self.requestor = requestor
  581. self.build_set = build_set
  582. self.job = job
  583. self.nodeset = nodeset
  584. self._state = STATE_REQUESTED
  585. self.requested_time = time.time()
  586. self.state_time = time.time()
  587. self.created_time = None
  588. self.stat = None
  589. self.uid = uuid4().hex
  590. self.relative_priority = relative_priority
  591. self.id = None
  592. self._zk_data = {} # Data that we read back from ZK
  593. # Zuul internal flags (not stored in ZK so they are not
  594. # overwritten).
  595. self.failed = False
  596. self.canceled = False
  597. @property
  598. def priority(self):
  599. if self.build_set:
  600. precedence = self.build_set.item.pipeline.precedence
  601. else:
  602. precedence = PRECEDENCE_NORMAL
  603. return PRIORITY_MAP[precedence]
  604. @property
  605. def fulfilled(self):
  606. return (self._state == STATE_FULFILLED) and not self.failed
  607. @property
  608. def state(self):
  609. return self._state
  610. @state.setter
  611. def state(self, value):
  612. if value not in REQUEST_STATES:
  613. raise TypeError("'%s' is not a valid state" % value)
  614. self._state = value
  615. self.state_time = time.time()
  616. def __repr__(self):
  617. return '<NodeRequest %s %s>' % (self.id, self.nodeset)
  618. def toDict(self):
  619. # Start with any previously read data
  620. d = self._zk_data.copy()
  621. nodes = [n.label for n in self.nodeset.getNodes()]
  622. # These are immutable once set
  623. d.setdefault('node_types', nodes)
  624. d.setdefault('requestor', self.requestor)
  625. d.setdefault('created_time', self.created_time)
  626. # We might change these
  627. d['state'] = self.state
  628. d['state_time'] = self.state_time
  629. d['relative_priority'] = self.relative_priority
  630. return d
  631. def updateFromDict(self, data):
  632. self._zk_data = data
  633. self._state = data['state']
  634. self.state_time = data['state_time']
  635. self.relative_priority = data.get('relative_priority', 0)
  636. class Secret(ConfigObject):
  637. """A collection of private data.
  638. In configuration, Secrets are collections of private data in
  639. key-value pair format. They are defined as top-level
  640. configuration objects and then referenced by Jobs.
  641. """
  642. def __init__(self, name, source_context):
  643. super(Secret, self).__init__()
  644. self.name = name
  645. self.source_context = source_context
  646. # The secret data may or may not be encrypted. This attribute
  647. # is named 'secret_data' to make it easy to search for and
  648. # spot where it is directly used.
  649. self.secret_data = {}
  650. def __ne__(self, other):
  651. return not self.__eq__(other)
  652. def __eq__(self, other):
  653. if not isinstance(other, Secret):
  654. return False
  655. return (self.name == other.name and
  656. self.source_context == other.source_context and
  657. self.secret_data == other.secret_data)
  658. def areDataEqual(self, other):
  659. return (self.secret_data == other.secret_data)
  660. def __repr__(self):
  661. return '<Secret %s>' % (self.name,)
  662. def _decrypt(self, private_key, secret_data):
  663. # recursive function to decrypt data
  664. if hasattr(secret_data, 'decrypt'):
  665. return secret_data.decrypt(private_key)
  666. if isinstance(secret_data, (dict, types.MappingProxyType)):
  667. decrypted_secret_data = {}
  668. for k, v in secret_data.items():
  669. decrypted_secret_data[k] = self._decrypt(private_key, v)
  670. return decrypted_secret_data
  671. if isinstance(secret_data, (list, tuple)):
  672. decrypted_secret_data = []
  673. for v in secret_data:
  674. decrypted_secret_data.append(self._decrypt(private_key, v))
  675. return decrypted_secret_data
  676. return secret_data
  677. def decrypt(self, private_key):
  678. """Return a copy of this secret with any encrypted data decrypted.
  679. Note that the original remains encrypted."""
  680. r = Secret(self.name, self.source_context)
  681. r.secret_data = self._decrypt(private_key, self.secret_data)
  682. return r
  683. class SourceContext(ConfigObject):
  684. """A reference to the branch of a project in configuration.
  685. Jobs and playbooks reference this to keep track of where they
  686. originate."""
  687. def __init__(self, project, branch, path, trusted):
  688. super(SourceContext, self).__init__()
  689. self.project = project
  690. self.branch = branch
  691. self.path = path
  692. self.trusted = trusted
  693. self.implied_branch_matchers = None
  694. self.implied_branches = None
  695. def __str__(self):
  696. return '%s/%s@%s' % (self.project, self.path, self.branch)
  697. def __repr__(self):
  698. return '<SourceContext %s trusted:%s>' % (str(self),
  699. self.trusted)
  700. def __deepcopy__(self, memo):
  701. return self.copy()
  702. def copy(self):
  703. return self.__class__(self.project, self.branch, self.path,
  704. self.trusted)
  705. def isSameProject(self, other):
  706. if not isinstance(other, SourceContext):
  707. return False
  708. return (self.project == other.project and
  709. self.trusted == other.trusted)
  710. def __ne__(self, other):
  711. return not self.__eq__(other)
  712. def __eq__(self, other):
  713. if not isinstance(other, SourceContext):
  714. return False
  715. return (self.project == other.project and
  716. self.branch == other.branch and
  717. self.path == other.path and
  718. self.trusted == other.trusted)
  719. def toDict(self):
  720. return dict(
  721. project=self.project.name,
  722. branch=self.branch,
  723. path=self.path,
  724. )
  725. class PlaybookContext(ConfigObject):
  726. """A reference to a playbook in the context of a project.
  727. Jobs refer to objects of this class for their main, pre, and post
  728. playbooks so that we can keep track of which repos and security
  729. contexts are needed in order to run them.
  730. We also keep a list of roles so that playbooks only run with the
  731. roles which were defined at the point the playbook was defined.
  732. """
  733. def __init__(self, source_context, path, roles, secrets):
  734. super(PlaybookContext, self).__init__()
  735. self.source_context = source_context
  736. self.path = path
  737. self.roles = roles
  738. self.secrets = secrets
  739. self.decrypted_secrets = ()
  740. def __repr__(self):
  741. return '<PlaybookContext %s %s>' % (self.source_context,
  742. self.path)
  743. def __ne__(self, other):
  744. return not self.__eq__(other)
  745. def __eq__(self, other):
  746. if not isinstance(other, PlaybookContext):
  747. return False
  748. return (self.source_context == other.source_context and
  749. self.path == other.path and
  750. self.roles == other.roles and
  751. self.secrets == other.secrets)
  752. def copy(self):
  753. r = PlaybookContext(self.source_context,
  754. self.path,
  755. self.roles,
  756. self.secrets)
  757. return r
  758. def validateReferences(self, layout):
  759. # Verify that references to other objects in the layout are
  760. # valid.
  761. for (secret_name, secret_alias) in self.secrets:
  762. secret = layout.secrets.get(secret_name)
  763. if secret is None:
  764. raise Exception(
  765. 'The secret "{name}" was not found.'.format(
  766. name=secret_name))
  767. if secret_alias == 'zuul' or secret_alias == 'nodepool':
  768. raise Exception('Secrets named "zuul" or "nodepool" '
  769. 'are not allowed.')
  770. if not secret.source_context.isSameProject(self.source_context):
  771. raise Exception(
  772. "Unable to use secret {name}. Secrets must be "
  773. "defined in the same project in which they "
  774. "are used".format(
  775. name=secret_name))
  776. # Decrypt a copy of the secret to verify it can be done
  777. secret.decrypt(self.source_context.project.private_secrets_key)
  778. def freezeSecrets(self, layout):
  779. secrets = []
  780. for (secret_name, secret_alias) in self.secrets:
  781. secret = layout.secrets.get(secret_name)
  782. decrypted_secret = secret.decrypt(
  783. self.source_context.project.private_secrets_key)
  784. decrypted_secret.name = secret_alias
  785. secrets.append(decrypted_secret)
  786. self.decrypted_secrets = tuple(secrets)
  787. def toDict(self):
  788. # Render to a dict to use in passing json to the executor
  789. secrets = {}
  790. for secret in self.decrypted_secrets:
  791. secrets[secret.name] = secret.secret_data
  792. return dict(
  793. connection=self.source_context.project.connection_name,
  794. project=self.source_context.project.name,
  795. branch=self.source_context.branch,
  796. trusted=self.source_context.trusted,
  797. roles=[r.toDict() for r in self.roles],
  798. secrets=secrets,
  799. path=self.path)
  800. class Role(ConfigObject, metaclass=abc.ABCMeta):
  801. """A reference to an ansible role."""
  802. def __init__(self, target_name):
  803. super(Role, self).__init__()
  804. self.target_name = target_name
  805. @abc.abstractmethod
  806. def __repr__(self):
  807. pass
  808. def __ne__(self, other):
  809. return not self.__eq__(other)
  810. @abc.abstractmethod
  811. def __eq__(self, other):
  812. if not isinstance(other, Role):
  813. return False
  814. return (self.target_name == other.target_name)
  815. @abc.abstractmethod
  816. def toDict(self):
  817. # Render to a dict to use in passing json to the executor
  818. return dict(target_name=self.target_name)
  819. class ZuulRole(Role):
  820. """A reference to an ansible role in a Zuul project."""
  821. def __init__(self, target_name, project_canonical_name, implicit=False):
  822. super(ZuulRole, self).__init__(target_name)
  823. self.project_canonical_name = project_canonical_name
  824. self.implicit = implicit
  825. def __repr__(self):
  826. return '<ZuulRole %s %s>' % (self.project_canonical_name,
  827. self.target_name)
  828. __hash__ = object.__hash__
  829. def __eq__(self, other):
  830. if not isinstance(other, ZuulRole):
  831. return False
  832. # Implicit is not consulted for equality so that we can handle
  833. # implicit to explicit conversions.
  834. return (super(ZuulRole, self).__eq__(other) and
  835. self.project_canonical_name == other.project_canonical_name)
  836. def toDict(self):
  837. # Render to a dict to use in passing json to the executor
  838. d = super(ZuulRole, self).toDict()
  839. d['type'] = 'zuul'
  840. d['project_canonical_name'] = self.project_canonical_name
  841. d['implicit'] = self.implicit
  842. return d
  843. class Job(ConfigObject):
  844. """A Job represents the defintion of actions to perform.
  845. A Job is an abstract configuration concept. It describes what,
  846. where, and under what circumstances something should be run
  847. (contrast this with Build which is a concrete single execution of
  848. a Job).
  849. NB: Do not modify attributes of this class, set them directly
  850. (e.g., "job.run = ..." rather than "job.run.append(...)").
  851. """
  852. BASE_JOB_MARKER = object()
  853. def __init__(self, name):
  854. super(Job, self).__init__()
  855. # These attributes may override even the final form of a job
  856. # in the context of a project-pipeline. They can not affect
  857. # the execution of the job, but only whether the job is run
  858. # and how it is reported.
  859. self.context_attributes = dict(
  860. voting=True,
  861. hold_following_changes=False,
  862. failure_message=None,
  863. success_message=None,
  864. failure_url=None,
  865. success_url=None,
  866. branch_matcher=None,
  867. file_matcher=None,
  868. irrelevant_file_matcher=None, # skip-if
  869. tags=frozenset(),
  870. dependencies=frozenset(),
  871. )
  872. # These attributes affect how the job is actually run and more
  873. # care must be taken when overriding them. If a job is
  874. # declared "final", these may not be overridden in a
  875. # project-pipeline.
  876. self.execution_attributes = dict(
  877. parent=None,
  878. timeout=None,
  879. post_timeout=None,
  880. variables={},
  881. extra_variables={},
  882. host_variables={},
  883. group_variables={},
  884. nodeset=NodeSet(),
  885. workspace=None,
  886. pre_run=(),
  887. post_run=(),
  888. run=(),
  889. semaphore=None,
  890. attempts=3,
  891. final=False,
  892. abstract=False,
  893. protected=None,
  894. roles=(),
  895. required_projects={},
  896. allowed_projects=None,
  897. override_branch=None,
  898. override_checkout=None,
  899. post_review=None,
  900. )
  901. # These are generally internal attributes which are not
  902. # accessible via configuration.
  903. self.other_attributes = dict(
  904. name=None,
  905. source_context=None,
  906. start_mark=None,
  907. inheritance_path=(),
  908. parent_data=None,
  909. description=None,
  910. variant_description=None,
  911. protected_origin=None,
  912. _branches=(),
  913. _implied_branch=None,
  914. _files=(),
  915. _irrelevant_files=(),
  916. )
  917. self.inheritable_attributes = {}
  918. self.inheritable_attributes.update(self.context_attributes)
  919. self.inheritable_attributes.update(self.execution_attributes)
  920. self.attributes = {}
  921. self.attributes.update(self.inheritable_attributes)
  922. self.attributes.update(self.other_attributes)
  923. self.name = name
  924. def toDict(self, tenant):
  925. '''
  926. Convert a Job object's attributes to a dictionary.
  927. '''
  928. d = {}
  929. d['name'] = self.name
  930. d['branches'] = self._branches
  931. d['files'] = self._files
  932. d['irrelevant_files'] = self._irrelevant_files
  933. d['variant_description'] = self.variant_description
  934. d['implied_branch'] = self._implied_branch
  935. d['source_context'] = self.source_context.toDict()
  936. d['description'] = self.description
  937. d['required_projects'] = []
  938. for project in self.required_projects.values():
  939. d['required_projects'].append(project.toDict())
  940. if self.semaphore:
  941. # For now just leave the semaphore name here until we really need
  942. # more information in zuul-web about this
  943. d['semaphore'] = self.semaphore.name
  944. else:
  945. d['semaphore'] = None
  946. d['variables'] = self.variables
  947. d['final'] = self.final
  948. d['abstract'] = self.abstract
  949. d['protected'] = self.protected
  950. d['voting'] = self.voting
  951. d['timeout'] = self.timeout
  952. d['attempts'] = self.attempts
  953. d['roles'] = list(map(lambda x: x.toDict(), self.roles))
  954. d['post_review'] = self.post_review
  955. if self.isBase():
  956. d['parent'] = None
  957. elif self.parent:
  958. d['parent'] = self.parent
  959. else:
  960. d['parent'] = tenant.default_base_job
  961. d['dependencies'] = []
  962. for dependency in self.dependencies:
  963. d['dependencies'].append(dependency)
  964. if isinstance(self.nodeset, str):
  965. ns = tenant.layout.nodesets.get(self.nodeset)
  966. else:
  967. ns = self.nodeset
  968. if ns:
  969. d['nodeset'] = ns.toDict()
  970. return d
  971. def __ne__(self, other):
  972. return not self.__eq__(other)
  973. def __eq__(self, other):
  974. # Compare the name and all inheritable attributes to determine
  975. # whether two jobs with the same name are identically
  976. # configured. Useful upon reconfiguration.
  977. if not isinstance(other, Job):
  978. return False
  979. if self.name != other.name:
  980. return False
  981. for k, v in self.attributes.items():
  982. if getattr(self, k) != getattr(other, k):
  983. return False
  984. return True
  985. __hash__ = object.__hash__
  986. def __str__(self):
  987. return self.name
  988. def __repr__(self):
  989. ln = 0
  990. if self.start_mark:
  991. ln = self.start_mark.line + 1
  992. return '<Job %s branches: %s source: %s#%s>' % (
  993. self.name,
  994. self.branch_matcher,
  995. self.source_context,
  996. ln)
  997. def __getattr__(self, name):
  998. v = self.__dict__.get(name)
  999. if v is None:
  1000. return self.attributes[name]
  1001. return v
  1002. def _get(self, name):
  1003. return self.__dict__.get(name)
  1004. def getSafeAttributes(self):
  1005. return Attributes(name=self.name)
  1006. def isBase(self):
  1007. return self.parent is self.BASE_JOB_MARKER
  1008. def setBase(self, layout):
  1009. self.inheritance_path = self.inheritance_path + (repr(self),)
  1010. if self._get('run') is not None:
  1011. self.run = self.freezePlaybooks(self.run, layout)
  1012. if self._get('pre_run') is not None:
  1013. self.pre_run = self.freezePlaybooks(self.pre_run, layout)
  1014. if self._get('post_run') is not None:
  1015. self.post_run = self.freezePlaybooks(self.post_run, layout)
  1016. def getNodeSet(self, layout):
  1017. if isinstance(self.nodeset, str):
  1018. # This references an existing named nodeset in the layout.
  1019. ns = layout.nodesets.get(self.nodeset)
  1020. if ns is None:
  1021. raise Exception(
  1022. 'The nodeset "{nodeset}" was not found.'.format(
  1023. nodeset=self.nodeset))
  1024. return ns
  1025. return self.nodeset
  1026. def validateReferences(self, layout):
  1027. # Verify that references to other objects in the layout are
  1028. # valid.
  1029. if not self.isBase() and self.parent:
  1030. layout.getJob(self.parent)
  1031. ns = self.getNodeSet(layout)
  1032. if layout.tenant.max_nodes_per_job != -1 and \
  1033. len(ns) > layout.tenant.max_nodes_per_job:
  1034. raise Exception(
  1035. 'The job "{job}" exceeds tenant '
  1036. 'max-nodes-per-job {maxnodes}.'.format(
  1037. job=self.name,
  1038. maxnodes=layout.tenant.max_nodes_per_job))
  1039. for pb in self.pre_run + self.run + self.post_run:
  1040. pb.validateReferences(layout)
  1041. def addRoles(self, roles):
  1042. newroles = []
  1043. # Start with a copy of the existing roles, but if any of them
  1044. # are implicit roles which are identified as explicit in the
  1045. # new roles list, replace them with the explicit version.
  1046. changed = False
  1047. for existing_role in self.roles:
  1048. if existing_role in roles:
  1049. new_role = roles[roles.index(existing_role)]
  1050. else:
  1051. new_role = None
  1052. if (new_role and
  1053. isinstance(new_role, ZuulRole) and
  1054. isinstance(existing_role, ZuulRole) and
  1055. existing_role.implicit and not new_role.implicit):
  1056. newroles.append(new_role)
  1057. changed = True
  1058. else:
  1059. newroles.append(existing_role)
  1060. # Now add the new roles.
  1061. for role in reversed(roles):
  1062. if role not in newroles:
  1063. newroles.insert(0, role)
  1064. changed = True
  1065. if changed:
  1066. self.roles = tuple(newroles)
  1067. def setBranchMatcher(self, branches):
  1068. # Set the branch matcher to match any of the supplied branches
  1069. self._branches = branches
  1070. matchers = []
  1071. for branch in branches:
  1072. matchers.append(change_matcher.BranchMatcher(branch))
  1073. self.branch_matcher = change_matcher.MatchAny(matchers)
  1074. def setFileMatcher(self, files):
  1075. # Set the file matcher to match any of the change files
  1076. self._files = files
  1077. matchers = []
  1078. for fn in files:
  1079. matchers.append(change_matcher.FileMatcher(fn))
  1080. self.file_matcher = change_matcher.MatchAny(matchers)
  1081. def setIrrelevantFileMatcher(self, irrelevant_files):
  1082. # Set the irrelevant file matcher to match any of the change files
  1083. self._irrelevant_files = irrelevant_files
  1084. matchers = []
  1085. for fn in irrelevant_files:
  1086. matchers.append(change_matcher.FileMatcher(fn))
  1087. self.irrelevant_file_matcher = change_matcher.MatchAllFiles(matchers)
  1088. def getSimpleBranchMatcher(self):
  1089. # If the job has a simple branch matcher, return it; otherwise None.
  1090. if not self.branch_matcher:
  1091. return None
  1092. m = self.branch_matcher
  1093. if not isinstance(m, change_matcher.AbstractMatcherCollection):
  1094. return None
  1095. if len(m.matchers) != 1:
  1096. return None
  1097. m = m.matchers[0]
  1098. if not isinstance(m, change_matcher.BranchMatcher):
  1099. return None
  1100. return m
  1101. def addImpliedBranchMatcher(self, branch):
  1102. # Add a branch matcher that combines as a boolean *and* with
  1103. # existing branch matchers, if any.
  1104. self._implied_branch = branch
  1105. matchers = [change_matcher.ImpliedBranchMatcher(branch)]
  1106. if self.branch_matcher:
  1107. matchers.append(self.branch_matcher)
  1108. self.branch_matcher = change_matcher.MatchAll(matchers)
  1109. def updateVariables(self, other_vars, other_extra_vars, other_host_vars,
  1110. other_group_vars):
  1111. if other_vars is not None:
  1112. self.variables = Job._deepUpdate(self.variables, other_vars)
  1113. if other_extra_vars is not None:
  1114. self.extra_variables = Job._deepUpdate(
  1115. self.extra_variables, other_extra_vars)
  1116. if other_host_vars is not None:
  1117. self.host_variables = Job._deepUpdate(
  1118. self.host_variables, other_host_vars)
  1119. if other_group_vars is not None:
  1120. self.group_variables = Job._deepUpdate(
  1121. self.group_variables, other_group_vars)
  1122. def updateParentData(self, other_vars):
  1123. # Update variables, but give the current values priority (used
  1124. # for job return data which is lower precedence than defined
  1125. # job vars).
  1126. v = self.parent_data or {}
  1127. v = Job._deepUpdate(v, other_vars)
  1128. # To avoid running afoul of checks that jobs don't set zuul
  1129. # variables, remove them from parent data here.
  1130. if 'zuul' in v:
  1131. del v['zuul']
  1132. self.parent_data = v
  1133. self.variables = Job._deepUpdate(self.parent_data, self.variables)
  1134. def updateProjectVariables(self, project_vars):
  1135. # Merge project/template variables directly into the job
  1136. # variables. Job variables override project variables.
  1137. self.variables = Job._deepUpdate(project_vars, self.variables)
  1138. def updateProjects(self, other_projects):
  1139. required_projects = self.required_projects.copy()
  1140. required_projects.update(other_projects)
  1141. self.required_projects = required_projects
  1142. @staticmethod
  1143. def _deepUpdate(a, b):
  1144. # Merge nested dictionaries if possible, otherwise, overwrite
  1145. # the value in 'a' with the value in 'b'.
  1146. ret = {}
  1147. for k, av in a.items():
  1148. if k not in b:
  1149. ret[k] = av
  1150. for k, bv in b.items():
  1151. av = a.get(k)
  1152. if (isinstance(av, (dict, types.MappingProxyType)) and
  1153. isinstance(bv, (dict, types.MappingProxyType))):
  1154. ret[k] = Job._deepUpdate(av, bv)
  1155. else:
  1156. ret[k] = bv
  1157. return ret
  1158. def copy(self):
  1159. job = Job(self.name)
  1160. for k in self.attributes:
  1161. v = self._get(k)
  1162. if v is not None:
  1163. # If this is a config object, it's frozen, so it's
  1164. # safe to shallow copy.
  1165. setattr(job, k, v)
  1166. return job
  1167. def freezePlaybooks(self, pblist, layout):
  1168. """Take a list of playbooks, and return a copy of it updated with this
  1169. job's roles.
  1170. """
  1171. ret = []
  1172. for old_pb in pblist:
  1173. pb = old_pb.copy()
  1174. pb.roles = self.roles
  1175. pb.freezeSecrets(layout)
  1176. ret.append(pb)
  1177. return tuple(ret)
  1178. def applyVariant(self, other, layout):
  1179. """Copy the attributes which have been set on the other job to this
  1180. job."""
  1181. if not isinstance(other, Job):
  1182. raise Exception("Job unable to inherit from %s" % (other,))
  1183. for k in self.execution_attributes:
  1184. if (other._get(k) is not None and
  1185. k not in set(['final', 'abstract', 'protected'])):
  1186. if self.final:
  1187. raise Exception("Unable to modify final job %s attribute "
  1188. "%s=%s with variant %s" % (
  1189. repr(self), k, other._get(k),
  1190. repr(other)))
  1191. if self.protected_origin:
  1192. # this is a protected job, check origin of job definition
  1193. this_origin = self.protected_origin
  1194. other_origin = other.source_context.project.canonical_name
  1195. if this_origin != other_origin:
  1196. raise Exception("Job %s which is defined in %s is "
  1197. "protected and cannot be inherited "
  1198. "from other projects."
  1199. % (repr(self), this_origin))
  1200. if k not in set(['pre_run', 'run', 'post_run', 'roles',
  1201. 'variables', 'extra_variables',
  1202. 'host_variables', 'group_variables',
  1203. 'required_projects', 'allowed_projects']):
  1204. setattr(self, k, other._get(k))
  1205. # Don't set final above so that we don't trip an error halfway
  1206. # through assignment.
  1207. if other.final != self.attributes['final']:
  1208. self.final = other.final
  1209. # Abstract may not be reset by a variant, it may only be
  1210. # cleared by inheriting.
  1211. if other.name != self.name:
  1212. self.abstract = other.abstract
  1213. elif other.abstract:
  1214. self.abstract = True
  1215. # Protected may only be set to true
  1216. if other.protected is not None:
  1217. # don't allow to reset protected flag
  1218. if not other.protected and self.protected_origin:
  1219. raise Exception("Unable to reset protected attribute of job"
  1220. " %s by job %s" % (
  1221. repr(self), repr(other)))
  1222. if not self.protected_origin:
  1223. self.protected_origin = \
  1224. other.source_context.project.canonical_name
  1225. # We must update roles before any playbook contexts
  1226. if other._get('roles') is not None:
  1227. self.addRoles(other.roles)
  1228. # Freeze the nodeset
  1229. self.nodeset = self.getNodeSet(layout)
  1230. if other._get('run') is not None:
  1231. other_run = self.freezePlaybooks(other.run, layout)
  1232. self.run = other_run
  1233. if other._get('pre_run') is not None:
  1234. other_pre_run = self.freezePlaybooks(other.pre_run, layout)
  1235. self.pre_run = self.pre_run + other_pre_run
  1236. if other._get('post_run') is not None:
  1237. other_post_run = self.freezePlaybooks(other.post_run, layout)
  1238. self.post_run = other_post_run + self.post_run
  1239. self.updateVariables(other.variables, other.extra_variables,
  1240. other.host_variables, other.group_variables)
  1241. if other._get('required_projects') is not None:
  1242. self.updateProjects(other.required_projects)
  1243. if (other._get('allowed_projects') is not None and
  1244. self._get('allowed_projects') is not None):
  1245. self.allowed_projects = frozenset(
  1246. self.allowed_projects.intersection(
  1247. other.allowed_projects))
  1248. elif other._get('allowed_projects') is not None:
  1249. self.allowed_projects = other.allowed_projects
  1250. for k in self.context_attributes:
  1251. if (other._get(k) is not None and
  1252. k not in set(['tags'])):
  1253. setattr(self, k, other._get(k))
  1254. if other._get('tags') is not None:
  1255. self.tags = frozenset(self.tags.union(other.tags))
  1256. self.inheritance_path = self.inheritance_path + (repr(other),)
  1257. def changeMatchesBranch(self, change, override_branch=None):
  1258. if override_branch is None:
  1259. branch_change = change
  1260. else:
  1261. # If an override branch is supplied, create a very basic
  1262. # change (a Ref) and set its branch to the override
  1263. # branch.
  1264. branch_change = Ref(change.project)
  1265. branch_change.ref = override_branch
  1266. if self.branch_matcher and not self.branch_matcher.matches(
  1267. branch_change):
  1268. return False
  1269. return True
  1270. def changeMatchesFiles(self, change):
  1271. if self.file_matcher and not self.file_matcher.matches(change):
  1272. return False
  1273. # NB: This is a negative match.
  1274. if (self.irrelevant_file_matcher and
  1275. self.irrelevant_file_matcher.matches(change)):
  1276. return False
  1277. return True
  1278. class JobProject(ConfigObject):
  1279. """ A reference to a project from a job. """
  1280. def __init__(self, project_name, override_branch=None,
  1281. override_checkout=None):
  1282. super(JobProject, self).__init__()
  1283. self.project_name = project_name
  1284. self.override_branch = override_branch
  1285. self.override_checkout = override_checkout
  1286. def toDict(self):
  1287. d = dict()
  1288. d['project_name'] = self.project_name
  1289. d['override_branch'] = self.override_branch
  1290. d['override_checkout'] = self.override_checkout
  1291. return d
  1292. class JobSemaphore(ConfigObject):
  1293. """ A reference to a semaphore from a job. """
  1294. def __init__(self, semaphore_name, resources_first=False):
  1295. super().__init__()
  1296. self.name = semaphore_name
  1297. self.resources_first = resources_first
  1298. def toDict(self):
  1299. d = dict()
  1300. d['name'] = self.name
  1301. d['resources_first'] = self.resources_first
  1302. return d
  1303. class JobList(ConfigObject):
  1304. """ A list of jobs in a project's pipeline. """
  1305. def __init__(self):
  1306. super(JobList, self).__init__()
  1307. self.jobs = OrderedDict() # job.name -> [job, ...]
  1308. def addJob(self, job):
  1309. if job.name in self.jobs:
  1310. self.jobs[job.name].append(job)
  1311. else:
  1312. self.jobs[job.name] = [job]
  1313. def inheritFrom(self, other):
  1314. for jobname, jobs in other.jobs.items():
  1315. joblist = self.jobs.setdefault(jobname, [])
  1316. for job in jobs:
  1317. if job not in joblist:
  1318. joblist.append(job)
  1319. class JobGraph(object):
  1320. """ A JobGraph represents the dependency graph between Job."""
  1321. def __init__(self):
  1322. self.jobs = OrderedDict() # job_name -> Job
  1323. self._dependencies = {} # dependent_job_name -> set(parent_job_names)
  1324. def __repr__(self):
  1325. return '<JobGraph %s>' % (self.jobs)
  1326. def addJob(self, job):
  1327. # A graph must be created after the job list is frozen,
  1328. # therefore we should only get one job with the same name.
  1329. if job.name in self.jobs:
  1330. raise Exception("Job %s already added" % (job.name,))
  1331. self.jobs[job.name] = job
  1332. # Append the dependency information
  1333. self._dependencies.setdefault(job.name, set())
  1334. try:
  1335. for dependency in job.dependencies:
  1336. # Make sure a circular dependency is never created
  1337. ancestor_jobs = self._getParentJobNamesRecursively(
  1338. dependency, soft=True)
  1339. ancestor_jobs.add(dependency)
  1340. if any((job.name == anc_job) for anc_job in ancestor_jobs):
  1341. raise Exception("Dependency cycle detected in job %s" %
  1342. (job.name,))
  1343. self._dependencies[job.name].add(dependency)
  1344. except Exception:
  1345. del self.jobs[job.name]
  1346. del self._dependencies[job.name]
  1347. raise
  1348. def getJobs(self):
  1349. return list(self.jobs.values()) # Report in the order of layout cfg
  1350. def getDirectDependentJobs(self, parent_job):
  1351. ret = set()
  1352. for dependent_name, parent_names in self._dependencies.items():
  1353. if parent_job in parent_names:
  1354. ret.add(dependent_name)
  1355. return ret
  1356. def getDependentJobsRecursively(self, parent_job):
  1357. all_dependent_jobs = set()
  1358. jobs_to_iterate = set([parent_job])
  1359. while len(jobs_to_iterate) > 0:
  1360. current_job = jobs_to_iterate.pop()
  1361. current_dependent_jobs = self.getDirectDependentJobs(current_job)
  1362. new_dependent_jobs = current_dependent_jobs - all_dependent_jobs
  1363. jobs_to_iterate |= new_dependent_jobs
  1364. all_dependent_jobs |= new_dependent_jobs
  1365. return [self.jobs[name] for name in all_dependent_jobs]
  1366. def getParentJobsRecursively(self, dependent_job, soft=False):
  1367. return [self.jobs[name] for name in
  1368. self._getParentJobNamesRecursively(dependent_job, soft)]
  1369. def _getParentJobNamesRecursively(self, dependent_job, soft=False):
  1370. all_parent_jobs = set()
  1371. jobs_to_iterate = set([dependent_job])
  1372. while len(jobs_to_iterate) > 0:
  1373. current_job = jobs_to_iterate.pop()
  1374. current_parent_jobs = self._dependencies.get(current_job)
  1375. if current_parent_jobs is None:
  1376. if soft:
  1377. current_parent_jobs = set()
  1378. else:
  1379. raise Exception("Job %s depends on %s which was not run." %
  1380. (dependent_job, current_job))
  1381. new_parent_jobs = current_parent_jobs - all_parent_jobs
  1382. jobs_to_iterate |= new_parent_jobs
  1383. all_parent_jobs |= new_parent_jobs
  1384. return all_parent_jobs
  1385. class Build(object):
  1386. """A Build is an instance of a single execution of a Job.
  1387. While a Job describes what to run, a Build describes an actual
  1388. execution of that Job. Each build is associated with exactly one
  1389. Job (related builds are grouped together in a BuildSet).
  1390. """
  1391. def __init__(self, job, uuid):
  1392. self.job = job
  1393. self.uuid = uuid
  1394. self.url = None
  1395. self.result = None
  1396. self.result_data = {}
  1397. self.error_detail = None
  1398. self.build_set = None
  1399. self.execute_time = time.time()
  1400. self.start_time = None
  1401. self.end_time = None
  1402. self.estimated_time = None
  1403. self.canceled = False
  1404. self.paused = False
  1405. self.retry = False
  1406. self.parameters = {}
  1407. self.worker = Worker()
  1408. self.node_labels = []
  1409. self.node_name = None
  1410. self.nodeset = None
  1411. def __repr__(self):
  1412. return ('<Build %s of %s voting:%s on %s>' %
  1413. (self.uuid, self.job.name, self.job.voting, self.worker))
  1414. @property
  1415. def pipeline(self):
  1416. return self.build_set.item.pipeline
  1417. def getSafeAttributes(self):
  1418. return Attributes(uuid=self.uuid,
  1419. result=self.result,
  1420. error_detail=self.error_detail,
  1421. result_data=self.result_data)
  1422. class Worker(object):
  1423. """Information about the specific worker executing a Build."""
  1424. def __init__(self):
  1425. self.name = "Unknown"
  1426. self.hostname = None
  1427. self.log_port = None
  1428. def updateFromData(self, data):
  1429. """Update worker information if contained in the WORK_DATA response."""
  1430. self.name = data.get('worker_name', self.name)
  1431. self.hostname = data.get('worker_hostname', self.hostname)
  1432. self.log_port = data.get('worker_log_port', self.log_port)
  1433. def __repr__(self):
  1434. return '<Worker %s>' % self.name
  1435. class RepoFiles(object):
  1436. """RepoFiles holds config-file content for per-project job config.
  1437. When Zuul asks a merger to prepare a future multiple-repo state
  1438. and collect Zuul configuration files so that we can dynamically
  1439. load our configuration, this class provides cached access to that
  1440. data for use by the Change which updated the config files and any
  1441. changes that follow it in a ChangeQueue.
  1442. It is attached to a BuildSet since the content of Zuul
  1443. configuration files can change with each new BuildSet.
  1444. """
  1445. def __init__(self):
  1446. self.connections = {}
  1447. def __repr__(self):
  1448. return '<RepoFiles %s>' % self.connections
  1449. def setFiles(self, items):
  1450. self.hostnames = {}
  1451. for item in items:
  1452. connection = self.connections.setdefault(
  1453. item['connection'], {})
  1454. project = connection.setdefault(item['project'], {})
  1455. branch = project.setdefault(item['branch'], {})
  1456. branch.update(item['files'])
  1457. def getFile(self, connection_name, project_name, branch, fn):
  1458. host = self.connections.get(connection_name, {})
  1459. return host.get(project_name, {}).get(branch, {}).get(fn)
  1460. class BuildSet(object):
  1461. """A collection of Builds for one specific potential future repository
  1462. state.
  1463. When Zuul executes Builds for a change, it creates a Build to
  1464. represent each execution of each job and a BuildSet to keep track
  1465. of all the Builds running for that Change. When Zuul re-executes
  1466. Builds for a Change with a different configuration, all of the
  1467. running Builds in the BuildSet for that change are aborted, and a
  1468. new BuildSet is created to hold the Builds for the Jobs being
  1469. run with the new configuration.
  1470. A BuildSet also holds the UUID used to produce the Zuul Ref that
  1471. builders check out.
  1472. """
  1473. # Merge states:
  1474. NEW = 1
  1475. PENDING = 2
  1476. COMPLETE = 3
  1477. states_map = {
  1478. 1: 'NEW',
  1479. 2: 'PENDING',
  1480. 3: 'COMPLETE',
  1481. }
  1482. def __init__(self, item):
  1483. self.item = item
  1484. self.builds = {}
  1485. self.result = None
  1486. self.uuid = None
  1487. self.commit = None
  1488. self.dependent_changes = None
  1489. self.merger_items = None
  1490. self.unable_to_merge = False
  1491. self.config_errors = [] # list of ConfigurationErrors
  1492. self.failing_reasons = []
  1493. self.debug_messages = []
  1494. self.warning_messages = []
  1495. self.merge_state = self.NEW
  1496. self.nodesets = {} # job -> nodeset
  1497. self.node_requests = {} # job -> reqs
  1498. self.files = RepoFiles()
  1499. self.repo_state = {}
  1500. self.tries = {}
  1501. @property
  1502. def ref(self):
  1503. # NOTE(jamielennox): The concept of buildset ref is to be removed and a
  1504. # buildset UUID identifier available instead. Currently the ref is
  1505. # checked to see if the BuildSet has been configured.
  1506. return 'Z' + self.uuid if self.uuid else None
  1507. def __repr__(self):
  1508. return '<BuildSet item: %s #builds: %s merge state: %s>' % (
  1509. self.item,
  1510. len(self.builds),
  1511. self.getStateName(self.merge_state))
  1512. def setConfiguration(self):
  1513. # The change isn't enqueued until after it's created
  1514. # so we don't know what the other changes ahead will be
  1515. # until jobs start.
  1516. if not self.uuid:
  1517. self.uuid = uuid4().hex
  1518. if self.dependent_changes is None:
  1519. items = [self.item]
  1520. next_item = self.item.item_ahead
  1521. while next_item:
  1522. items.append(next_item)
  1523. next_item = next_item.item_ahead
  1524. items.reverse()
  1525. self.dependent_changes = [i.change.toDict() for i in items]
  1526. self.merger_items = [i.makeMergerItem() for i in items]
  1527. def getStateName(self, state_num):
  1528. return self.states_map.get(
  1529. state_num, 'UNKNOWN (%s)' % state_num)
  1530. def addBuild(self, build):
  1531. self.builds[build.job.name] = build
  1532. if build.job.name not in self.tries:
  1533. self.tries[build.job.name] = 1
  1534. build.build_set = self
  1535. def removeBuild(self, build):
  1536. if build.job.name not in self.builds:
  1537. return
  1538. self.tries[build.job.name] += 1
  1539. del self.builds[build.job.name]
  1540. def getBuild(self, job_name):
  1541. return self.builds.get(job_name)
  1542. def getBuilds(self):
  1543. keys = list(self.builds.keys())
  1544. keys.sort()
  1545. return [self.builds.get(x) for x in keys]
  1546. def getJobNodeSet(self, job_name):
  1547. # Return None if not provisioned; empty NodeSet if no nodes
  1548. # required
  1549. return self.nodesets.get(job_name)
  1550. def removeJobNodeSet(self, job_name):
  1551. if job_name not in self.nodesets:
  1552. raise Exception("No job set for %s" % (job_name))
  1553. del self.nodesets[job_name]
  1554. def setJobNodeRequest(self, job_name, req):
  1555. if job_name in self.node_requests:
  1556. raise Exception("Prior node request for %s" % (job_name))
  1557. self.node_requests[job_name] = req
  1558. def getJobNodeRequest(self, job_name):
  1559. return self.node_requests.get(job_name)
  1560. def jobNodeRequestComplete(self, job_name, req, nodeset):
  1561. if job_name in self.nodesets:
  1562. raise Exception("Prior node request for %s" % (job_name))
  1563. self.nodesets[job_name] = nodeset
  1564. del self.node_requests[job_name]
  1565. def getTries(self, job_name):
  1566. return self.tries.get(job_name, 0)
  1567. def getMergeMode(self):
  1568. # We may be called before this build set has a shadow layout
  1569. # (ie, we are called to perform the merge to create that
  1570. # layout). It's possible that the change we are merging will
  1571. # update the merge-mode for the project, but there's not much
  1572. # we can do about that here. Instead, do the best we can by
  1573. # using the nearest shadow layout to determine the merge mode,
  1574. # or if that fails, the current live layout, or if that fails,
  1575. # use the default: merge-resolve.
  1576. item = self.item
  1577. layout = None
  1578. while item:
  1579. layout = item.layout
  1580. if layout:
  1581. break
  1582. item = item.item_ahead
  1583. if not layout:
  1584. layout = self.item.pipeline.tenant.layout
  1585. if layout:
  1586. project = self.item.change.project
  1587. project_metadata = layout.getProjectMetadata(
  1588. project.canonical_name)
  1589. if project_metadata:
  1590. return project_metadata.merge_mode
  1591. return MERGER_MERGE_RESOLVE
  1592. def getSafeAttributes(self):
  1593. return Attributes(uuid=self.uuid)
  1594. class QueueItem(object):
  1595. """Represents the position of a Change in a ChangeQueue.
  1596. All Changes are enqueued into ChangeQueue in a QueueItem. The QueueItem
  1597. holds the current `BuildSet` as well as all previous `BuildSets` that were
  1598. produced for this `QueueItem`.
  1599. """
  1600. log = logging.getLogger("zuul.QueueItem")
  1601. def __init__(self, queue, change):
  1602. self.pipeline = queue.pipeline
  1603. self.queue = queue
  1604. self.change = change # a ref
  1605. self.dequeued_needing_change = False
  1606. self.current_build_set = BuildSet(self)
  1607. self.item_ahead = None
  1608. self.items_behind = []
  1609. self.enqueue_time = None
  1610. self.dequeue_time = None
  1611. self.reported = False
  1612. self.reported_start = False
  1613. self.quiet = False
  1614. self.active = False # Whether an item is within an active window
  1615. self.live = True # Whether an item is intended to be processed at all
  1616. self.layout = None
  1617. self.project_pipeline_config = None
  1618. self.job_graph = None
  1619. def __repr__(self):
  1620. if self.pipeline:
  1621. pipeline = self.pipeline.name
  1622. else:
  1623. pipeline = None
  1624. return '<QueueItem 0x%x for %s in %s>' % (
  1625. id(self), self.change, pipeline)
  1626. def resetAllBuilds(self):
  1627. self.current_build_set = BuildSet(self)
  1628. self.layout = None
  1629. self.project_pipeline_config = None
  1630. self.job_graph = None
  1631. def addBuild(self, build):
  1632. self.current_build_set.addBuild(build)
  1633. def removeBuild(self, build):
  1634. self.current_build_set.removeBuild(build)
  1635. def setReportedResult(self, result):
  1636. self.current_build_set.result = result
  1637. def debug(self, msg, indent=0):
  1638. if (not self.project_pipeline_config or
  1639. not self.project_pipeline_config.debug):
  1640. return
  1641. if indent:
  1642. indent = ' ' * indent
  1643. else:
  1644. indent = ''
  1645. self.current_build_set.debug_messages.append(indent + msg)
  1646. def warning(self, msg):
  1647. self.current_build_set.warning_messages.append(msg)
  1648. def freezeJobGraph(self):
  1649. """Find or create actual matching jobs for this item's change and
  1650. store the resulting job tree."""
  1651. ppc = self.layout.getProjectPipelineConfig(self)
  1652. try:
  1653. # Conditionally set self.ppc so that the debug method can
  1654. # consult it as we resolve the jobs.
  1655. self.project_pipeline_config = ppc
  1656. if ppc:
  1657. for msg in ppc.debug_messages:
  1658. self.debug(msg)
  1659. job_graph = self.layout.createJobGraph(self, ppc)
  1660. for job in job_graph.getJobs():
  1661. # Ensure that each jobs's dependencies are fully
  1662. # accessible. This will raise an exception if not.
  1663. job_graph.getParentJobsRecursively(job.name)
  1664. self.job_graph = job_graph
  1665. except Exception:
  1666. self.project_pipeline_config = None
  1667. self.job_graph = None
  1668. raise
  1669. def hasJobGraph(self):
  1670. """Returns True if the item has a job graph."""
  1671. return self.job_graph is not None
  1672. def getJobs(self):
  1673. if not self.live or not self.job_graph:
  1674. return []
  1675. return self.job_graph.getJobs()
  1676. def getJob(self, name):
  1677. if not self.job_graph:
  1678. return None
  1679. return self.job_graph.jobs.get(name)
  1680. def haveAllJobsStarted(self):
  1681. if not self.hasJobGraph():
  1682. return False
  1683. for job in self.getJobs():
  1684. build = self.current_build_set.getBuild(job.name)
  1685. if not build or not build.start_time:
  1686. return False
  1687. return True
  1688. def areAllJobsComplete(self):
  1689. if (self.current_build_set.config_errors or
  1690. self.current_build_set.unable_to_merge):
  1691. return True
  1692. if not self.hasJobGraph():
  1693. return False
  1694. for job in self.getJobs():
  1695. build = self.current_build_set.getBuild(job.name)
  1696. if not build or not build.result:
  1697. return False
  1698. return True
  1699. def didAllJobsSucceed(self):
  1700. """Check if all jobs have completed with status SUCCESS.
  1701. Return True if all voting jobs have completed with status
  1702. SUCCESS. Non-voting jobs are ignored. Skipped jobs are
  1703. ignored, but skipping all jobs returns a failure. Incomplete
  1704. builds are considered a failure, hence this is unlikely to be
  1705. useful unless all builds are complete.
  1706. """
  1707. if not self.hasJobGraph():
  1708. return False
  1709. all_jobs_skipped = True
  1710. for job in self.getJobs():
  1711. build = self.current_build_set.getBuild(job.name)
  1712. if build:
  1713. # If the build ran, record whether or not it was skipped
  1714. # and return False if the build was voting and has an
  1715. # unsuccessful return value
  1716. if build.result != 'SKIPPED':
  1717. all_jobs_skipped = False
  1718. if job.voting and build.result not in ['SUCCESS', 'SKIPPED']:
  1719. return False
  1720. elif job.voting:
  1721. # If the build failed to run and was voting that is an
  1722. # unsuccessful build. But we don't count against it if not
  1723. # voting.
  1724. return False
  1725. # NOTE(pabelanger): We shouldn't be able to skip all jobs.
  1726. if all_jobs_skipped:
  1727. return False
  1728. return True
  1729. def hasAnyJobFailed(self):
  1730. """Check if any jobs have finished with a non-success result.
  1731. Return True if any job in the job graph has returned with a
  1732. status not equal to SUCCESS, else return False. Non-voting
  1733. and in-flight jobs are ignored.
  1734. """
  1735. if not self.hasJobGraph():
  1736. return False
  1737. for job in self.getJobs():
  1738. if not job.voting:
  1739. continue
  1740. build = self.current_build_set.getBuild(job.name)
  1741. if build and build.result and (build.result != 'SUCCESS'):
  1742. return True
  1743. return False
  1744. def didMergerFail(self):
  1745. return self.current_build_set.unable_to_merge
  1746. def getConfigErrors(self):
  1747. return self.current_build_set.config_errors
  1748. def wasDequeuedNeedingChange(self):
  1749. return self.dequeued_needing_change
  1750. def includesConfigUpdates(self):
  1751. includes_trusted = False
  1752. includes_untrusted = False
  1753. tenant = self.pipeline.tenant
  1754. item = self
  1755. while item:
  1756. if item.change.updatesConfig():
  1757. (trusted, project) = tenant.getProject(
  1758. item.change.project.canonical_name)
  1759. if trusted:
  1760. includes_trusted = True
  1761. else:
  1762. includes_untrusted = True
  1763. if includes_trusted and includes_untrusted:
  1764. # We're done early
  1765. return (includes_trusted, includes_untrusted)
  1766. item = item.item_ahead
  1767. return (includes_trusted, includes_untrusted)
  1768. def isHoldingFollowingChanges(self):
  1769. if not self.live:
  1770. return False
  1771. if not self.hasJobGraph():
  1772. return False
  1773. for job in self.getJobs():
  1774. if not job.hold_following_changes:
  1775. continue
  1776. build = self.current_build_set.getBuild(job.name)
  1777. if not build:
  1778. return True
  1779. if build.result != 'SUCCESS':
  1780. return True
  1781. if not self.item_ahead:
  1782. return False
  1783. return self.item_ahead.isHoldingFollowingChanges()
  1784. def findJobsToRun(self, semaphore_handler):
  1785. torun = []
  1786. if not self.live:
  1787. return []
  1788. if not self.job_graph:
  1789. return []
  1790. if self.item_ahead:
  1791. # Only run jobs if any 'hold' jobs on the change ahead
  1792. # have completed successfully.
  1793. if self.item_ahead.isHoldingFollowingChanges():
  1794. return []
  1795. successful_job_names = set()
  1796. jobs_not_started = set()
  1797. for job in self.job_graph.getJobs():
  1798. build = self.current_build_set.getBuild(job.name)
  1799. if build:
  1800. if build.result == 'SUCCESS' or build.paused:
  1801. successful_job_names.add(job.name)
  1802. else:
  1803. jobs_not_started.add(job)
  1804. # Attempt to run jobs in the order they appear in
  1805. # configuration.
  1806. for job in self.job_graph.getJobs():
  1807. if job not in jobs_not_started:
  1808. continue
  1809. all_parent_jobs_successful = True
  1810. parent_builds_with_data = {}
  1811. for parent_job in self.job_graph.getParentJobsRecursively(
  1812. job.name):
  1813. if parent_job.name not in successful_job_names:
  1814. all_parent_jobs_successful = False
  1815. break
  1816. parent_build = self.current_build_set.getBuild(parent_job.name)
  1817. if parent_build.result_data:
  1818. parent_builds_with_data[parent_job.name] = parent_build
  1819. if all_parent_jobs_successful:
  1820. # Iterate in reverse order over all jobs of the graph (which is
  1821. # in sorted config order) and apply parent data of the jobs we
  1822. # already found.
  1823. if len(parent_builds_with_data) > 0:
  1824. for parent_job in reversed(self.job_graph.getJobs()):
  1825. parent_build = parent_builds_with_data.get(
  1826. parent_job.name)
  1827. if parent_build:
  1828. job.updateParentData(parent_build.result_data)
  1829. nodeset = self.current_build_set.getJobNodeSet(job.name)
  1830. if nodeset is None:
  1831. # The nodes for this job are not ready, skip
  1832. # it for now.
  1833. continue
  1834. if semaphore_handler.acquire(self, job, False):
  1835. # If this job needs a semaphore, either acquire it or
  1836. # make sure that we have it before running the job.
  1837. torun.append(job)
  1838. return torun
  1839. def findJobsToRequest(self, semaphore_handler):
  1840. build_set = self.current_build_set
  1841. toreq = []
  1842. if not self.live:
  1843. return []
  1844. if not self.job_graph:
  1845. return []
  1846. if self.item_ahead:
  1847. if self.item_ahead.isHoldingFollowingChanges():
  1848. return []
  1849. successful_job_names = set()
  1850. jobs_not_requested = set()
  1851. for job in self.job_graph.getJobs():
  1852. build = build_set.getBuild(job.name)
  1853. if build and (build.result == 'SUCCESS' or build.paused):
  1854. successful_job_names.add(job.name)
  1855. else:
  1856. nodeset = build_set.getJobNodeSet(job.name)
  1857. if nodeset is None:
  1858. req = build_set.getJobNodeRequest(job.name)
  1859. if req is None:
  1860. jobs_not_requested.add(job)
  1861. # Attempt to request nodes for jobs in the order jobs appear
  1862. # in configuration.
  1863. for job in self.job_graph.getJobs():
  1864. if job not in jobs_not_requested:
  1865. continue
  1866. all_parent_jobs_successful = True
  1867. for parent_job in self.job_graph.getParentJobsRecursively(
  1868. job.name):
  1869. if parent_job.name not in successful_job_names:
  1870. all_parent_jobs_successful = False
  1871. break
  1872. if all_parent_jobs_successful:
  1873. if semaphore_handler.acquire(self, job, True):
  1874. # If this job needs a semaphore, either acquire it or
  1875. # make sure that we have it before requesting the nodes.
  1876. toreq.append(job)
  1877. return toreq
  1878. def setResult(self, build):
  1879. if build.retry:
  1880. self.removeBuild(build)
  1881. return
  1882. skipped = []
  1883. # NOTE(pabelanger): Check successful jobs to see if zuul_return
  1884. # includes zuul.child_jobs.
  1885. build_result = build.result_data.get('zuul', {})
  1886. if 'child_jobs' in build_result:
  1887. zuul_return = build_result.get('child_jobs', [])
  1888. dependent_jobs = self.job_graph.getDirectDependentJobs(
  1889. build.job.name)
  1890. if not zuul_return:
  1891. # If zuul.child_jobs exists and is empty, user want to skip all
  1892. # child jobs.
  1893. skipped += self.job_graph.getDependentJobsRecursively(
  1894. build.job.name)
  1895. else:
  1896. # We have list of jobs to run.
  1897. intersect_jobs = dependent_jobs.intersection(zuul_return)
  1898. for skip in (dependent_jobs - intersect_jobs):
  1899. skipped.append(self.job_graph.jobs.get(skip))
  1900. skipped += self.job_graph.getDependentJobsRecursively(
  1901. skip)
  1902. elif build.result != 'SUCCESS' and not build.paused:
  1903. skipped += self.job_graph.getDependentJobsRecursively(
  1904. build.job.name)
  1905. for job in skipped:
  1906. fakebuild = Build(job, None)
  1907. fakebuild.result = 'SKIPPED'
  1908. self.addBuild(fakebuild)
  1909. def setNodeRequestFailure(self, job):
  1910. fakebuild = Build(job, None)
  1911. fakebuild.start_time = time.time()
  1912. fakebuild.end_time = time.time()
  1913. self.addBuild(fakebuild)
  1914. fakebuild.result = 'NODE_FAILURE'
  1915. self.setResult(fakebuild)
  1916. def setDequeuedNeedingChange(self):
  1917. self.dequeued_needing_change = True
  1918. self._setAllJobsSkipped()
  1919. def setUnableToMerge(self):
  1920. self.current_build_set.unable_to_merge = True
  1921. self._setAllJobsSkipped()
  1922. def setConfigError(self, error):
  1923. err = ConfigurationError(None, None, error)
  1924. self.setConfigErrors([err])
  1925. def setConfigErrors(self, errors):
  1926. self.current_build_set.config_errors = errors
  1927. self._setAllJobsSkipped()
  1928. def _setAllJobsSkipped(self):
  1929. for job in self.getJobs():
  1930. fakebuild = Build(job, None)
  1931. fakebuild.result = 'SKIPPED'
  1932. self.addBuild(fakebuild)
  1933. def getNodePriority(self):
  1934. return self.pipeline.manager.getNodePriority(self)
  1935. def formatUrlPattern(self, url_pattern, job=None, build=None):
  1936. url = None
  1937. # Produce safe versions of objects which may be useful in
  1938. # result formatting, but don't allow users to crawl through
  1939. # the entire data structure where they might be able to access
  1940. # secrets, etc.
  1941. safe_change = self.change.getSafeAttributes()
  1942. safe_pipeline = self.pipeline.getSafeAttributes()
  1943. safe_tenant = self.pipeline.tenant.getSafeAttributes()
  1944. safe_buildset = self.current_build_set.getSafeAttributes()
  1945. safe_job = job.getSafeAttributes() if job else {}
  1946. safe_build = build.getSafeAttributes() if build else {}
  1947. try:
  1948. url = url_pattern.format(change=safe_change,
  1949. pipeline=safe_pipeline,
  1950. tenant=safe_tenant,
  1951. buildset=safe_buildset,
  1952. job=safe_job,
  1953. build=safe_build)
  1954. except KeyError as e:
  1955. self.log.error("Error while formatting url for job %s: unknown "
  1956. "key %s in pattern %s"
  1957. % (job, e.args[0], url_pattern))
  1958. except AttributeError as e:
  1959. self.log.error("Error while formatting url for job %s: unknown "
  1960. "attribute %s in pattern %s"
  1961. % (job, e.args[0], url_pattern))
  1962. except Exception:
  1963. self.log.exception("Error while formatting url for job %s with "
  1964. "pattern %s:" % (job, url_pattern))
  1965. return url
  1966. def formatJobResult(self, job):
  1967. build = self.current_build_set.getBuild(job.name)
  1968. result = build.result
  1969. pattern = None
  1970. if result == 'SUCCESS':
  1971. if job.success_message:
  1972. result = job.success_message
  1973. if job.success_url:
  1974. pattern = job.success_url
  1975. else:
  1976. if job.failure_message:
  1977. result = job.failure_message
  1978. if job.failure_url:
  1979. pattern = job.failure_url
  1980. url = None # The final URL
  1981. default_url = build.result_data.get('zuul', {}).get('log_url')
  1982. if pattern:
  1983. job_url = self.formatUrlPattern(pattern, job, build)
  1984. else:
  1985. job_url = None
  1986. try:
  1987. if job_url:
  1988. u = urllib.parse.urlparse(job_url)
  1989. if u.scheme:
  1990. # The job success or failure url is absolute, so it's
  1991. # our final url.
  1992. url = job_url
  1993. else:
  1994. # We have a relative job url. Combine it with our
  1995. # default url.
  1996. if default_url:
  1997. url = urllib.parse.urljoin(default_url, job_url)
  1998. except Exception:
  1999. self.log.exception("Error while parsing url for job %s:"
  2000. % (job,))
  2001. if not url:
  2002. url = default_url or build.url or job.name
  2003. return (result, url)
  2004. def formatJSON(self, websocket_url=None):
  2005. ret = {}
  2006. ret['active'] = self.active
  2007. ret['live'] = self.live
  2008. if hasattr(self.change, 'url') and self.change.url is not None:
  2009. ret['url'] = self.change.url
  2010. else:
  2011. ret['url'] = None
  2012. if hasattr(self.change, 'ref') and self.change.ref is not None:
  2013. ret['ref'] = self.change.ref
  2014. else:
  2015. ret['ref'] = None
  2016. ret['id'] = self.change._id()
  2017. if self.item_ahead:
  2018. ret['item_ahead'] = self.item_ahead.change._id()
  2019. else:
  2020. ret['item_ahead'] = None
  2021. ret['items_behind'] = [i.change._id() for i in self.items_behind]
  2022. ret['failing_reasons'] = self.current_build_set.failing_reasons
  2023. ret['zuul_ref'] = self.current_build_set.ref
  2024. if self.change.project:
  2025. ret['project'] = self.change.project.name
  2026. ret['project_canonical'] = self.change.project.canonical_name
  2027. else:
  2028. # For cross-project dependencies with the depends-on
  2029. # project not known to zuul, the project is None
  2030. # Set it to a static value
  2031. ret['project'] = "Unknown Project"
  2032. ret['project_canonical'] = "Unknown Project"
  2033. ret['enqueue_time'] = int(self.enqueue_time * 1000)
  2034. ret['jobs'] = []
  2035. if hasattr(self.change, 'owner'):
  2036. ret['owner'] = self.change.owner
  2037. else:
  2038. ret['owner'] = None
  2039. max_remaining = 0
  2040. for job in self.getJobs():
  2041. now = time.time()
  2042. build = self.current_build_set.getBuild(job.name)
  2043. elapsed = None
  2044. remaining = None
  2045. result = None
  2046. build_url = None
  2047. finger_url = None
  2048. report_url = None
  2049. worker = None
  2050. if build:
  2051. result = build.result
  2052. finger_url = build.url
  2053. # TODO(tobiash): add support for custom web root
  2054. urlformat = 'stream/{build.uuid}?' \
  2055. 'logfile=console.log'
  2056. if websocket_url:
  2057. urlformat += '&websocket_url={websocket_url}'
  2058. build_url = urlformat.format(
  2059. build=build, websocket_url=websocket_url)
  2060. (unused, report_url) = self.formatJobResult(job)
  2061. if build.start_time:
  2062. if build.end_time:
  2063. elapsed = int((build.end_time -
  2064. build.start_time) * 1000)
  2065. remaining = 0
  2066. else:
  2067. elapsed = int((now - build.start_time) * 1000)
  2068. if build.estimated_time:
  2069. remaining = max(
  2070. int(build.estimated_time * 1000) - elapsed,
  2071. 0)
  2072. worker = {
  2073. 'name': build.worker.name,
  2074. 'hostname': build.worker.hostname,
  2075. }
  2076. if remaining and remaining > max_remaining:
  2077. max_remaining = remaining
  2078. ret['jobs'].append({
  2079. 'name': job.name,
  2080. 'dependencies': list(job.dependencies),
  2081. 'elapsed_time': elapsed,
  2082. 'remaining_time': remaining,
  2083. 'url': build_url,
  2084. 'finger_url': finger_url,
  2085. 'report_url': report_url,
  2086. 'result': result,
  2087. 'voting': job.voting,
  2088. 'uuid': build.uuid if build else None,
  2089. 'execute_time': build.execute_time if build else None,
  2090. 'start_time': build.start_time if build else None,
  2091. 'end_time': build.end_time if build else None,
  2092. 'estimated_time': build.estimated_time if build else None,
  2093. 'pipeline': build.pipeline.name if build else None,
  2094. 'canceled': build.canceled if build else None,
  2095. 'paused': build.paused if build else None,
  2096. 'retry': build.retry if build else None,
  2097. 'node_labels': build.node_labels if build else [],
  2098. 'node_name': build.node_name if build else None,
  2099. 'worker': worker,
  2100. })
  2101. if self.haveAllJobsStarted():
  2102. ret['remaining_time'] = max_remaining
  2103. else:
  2104. ret['remaining_time'] = None
  2105. return ret
  2106. def formatStatus(self, indent=0, html=False):
  2107. indent_str = ' ' * indent
  2108. ret = ''
  2109. if html and getattr(self.change, 'url', None) is not None:
  2110. ret += '%sProject %s change <a href="%s">%s</a>\n' % (
  2111. indent_str,
  2112. self.change.project.name,
  2113. self.change.url,
  2114. self.change._id())
  2115. else:
  2116. ret += '%sProject %s change %s based on %s\n' % (
  2117. indent_str,
  2118. self.change.project.name,
  2119. self.change._id(),
  2120. self.item_ahead)
  2121. for job in self.getJobs():
  2122. build = self.current_build_set.getBuild(job.name)
  2123. if build:
  2124. result = build.result
  2125. else:
  2126. result = None
  2127. job_name = job.name
  2128. if not job.voting:
  2129. voting = ' (non-voting)'
  2130. else:
  2131. voting = ''
  2132. if html:
  2133. if build:
  2134. url = build.url
  2135. else:
  2136. url = None
  2137. if url is not None:
  2138. job_name = '<a href="%s">%s</a>' % (url, job_name)
  2139. ret += '%s %s: %s%s' % (indent_str, job_name, result, voting)
  2140. ret += '\n'
  2141. return ret
  2142. def makeMergerItem(self):
  2143. # Create a dictionary with all info about the item needed by
  2144. # the merger.
  2145. number = None
  2146. patchset = None
  2147. oldrev = None
  2148. newrev = None
  2149. branch = None
  2150. if hasattr(self.change, 'number'):
  2151. number = self.change.number
  2152. patchset = self.change.patchset
  2153. if hasattr(self.change, 'newrev'):
  2154. oldrev = self.change.oldrev
  2155. newrev = self.change.newrev
  2156. if hasattr(self.change, 'branch'):
  2157. branch = self.change.branch
  2158. source = self.change.project.source
  2159. connection_name = source.connection.connection_name
  2160. project = self.change.project
  2161. return dict(project=project.name,
  2162. connection=connection_name,
  2163. merge_mode=self.current_build_set.getMergeMode(),
  2164. ref=self.change.ref,
  2165. branch=branch,
  2166. buildset_uuid=self.current_build_set.uuid,
  2167. number=number,
  2168. patchset=patchset,
  2169. oldrev=oldrev,
  2170. newrev=newrev,
  2171. )
  2172. class Ref(object):
  2173. """An existing state of a Project."""
  2174. def __init__(self, project):
  2175. self.project = project
  2176. self.ref = None
  2177. self.oldrev = None
  2178. self.newrev = None
  2179. self.files = []
  2180. def _id(self):
  2181. return self.newrev
  2182. def __repr__(self):
  2183. rep = None
  2184. pname = None
  2185. if self.project and self.project.name:
  2186. pname = self.project.name
  2187. if self.newrev == '0000000000000000000000000000000000000000':
  2188. rep = '<%s 0x%x %s deletes %s from %s' % (
  2189. type(self).__name__, id(self), pname,
  2190. self.ref, self.oldrev)
  2191. elif self.oldrev == '0000000000000000000000000000000000000000':
  2192. rep = '<%s 0x%x %s creates %s on %s>' % (
  2193. type(self).__name__, id(self), pname,
  2194. self.ref, self.newrev)
  2195. else:
  2196. # Catch all
  2197. rep = '<%s 0x%x %s %s updated %s..%s>' % (
  2198. type(self).__name__, id(self), pname,
  2199. self.ref, self.oldrev, self.newrev)
  2200. return rep
  2201. def equals(self, other):
  2202. if (self.project == other.project
  2203. and self.ref == other.ref
  2204. and self.newrev == other.newrev):
  2205. return True
  2206. return False
  2207. def isUpdateOf(self, other):
  2208. return False
  2209. def getRelatedChanges(self):
  2210. return set()
  2211. def updatesConfig(self):
  2212. if 'zuul.yaml' in self.files or '.zuul.yaml' in self.files or \
  2213. [True for fn in self.files if fn.startswith("zuul.d/") or
  2214. fn.startswith(".zuul.d/")]:
  2215. return True
  2216. return False
  2217. def getSafeAttributes(self):
  2218. return Attributes(project=self.project,
  2219. ref=self.ref,
  2220. oldrev=self.oldrev,
  2221. newrev=self.newrev)
  2222. def toDict(self):
  2223. # Render to a dict to use in passing json to the executor
  2224. d = dict()
  2225. d['project'] = dict(
  2226. name=self.project.name,
  2227. short_name=self.project.name.split('/')[-1],
  2228. canonical_hostname=self.project.canonical_hostname,
  2229. canonical_name=self.project.canonical_name,
  2230. src_dir=os.path.join('src', self.project.canonical_name),
  2231. )
  2232. return d
  2233. class Branch(Ref):
  2234. """An existing branch state for a Project."""
  2235. def __init__(self, project):
  2236. super(Branch, self).__init__(project)
  2237. self.branch = None
  2238. def toDict(self):
  2239. # Render to a dict to use in passing json to the executor
  2240. d = super(Branch, self).toDict()
  2241. d['branch'] = self.branch
  2242. return d
  2243. class Tag(Ref):
  2244. """An existing tag state for a Project."""
  2245. def __init__(self, project):
  2246. super(Tag, self).__init__(project)
  2247. self.tag = None
  2248. class Change(Branch):
  2249. """A proposed new state for a Project."""
  2250. def __init__(self, project):
  2251. super(Change, self).__init__(project)
  2252. self.number = None
  2253. # The gitweb url for browsing the change
  2254. self.url = None
  2255. # URIs for this change which may appear in depends-on headers.
  2256. # Note this omits the scheme; i.e., is hostname/path.
  2257. self.uris = []
  2258. self.patchset = None
  2259. # Changes that the source determined are needed due to the
  2260. # git DAG:
  2261. self.git_needs_changes = []
  2262. self.git_needed_by_changes = []
  2263. # Changes that the source determined are needed by backwards
  2264. # compatible processing of Depends-On headers (Gerrit only):
  2265. self.compat_needs_changes = []
  2266. self.compat_needed_by_changes = []
  2267. # Changes that the pipeline manager determined are needed due
  2268. # to Depends-On headers (all drivers):
  2269. self.commit_needs_changes = None
  2270. self.refresh_deps = False
  2271. self.is_current_patchset = True
  2272. self.can_merge = False
  2273. self.is_merged = False
  2274. self.failed_to_merge = False
  2275. self.open = None
  2276. self.status = None
  2277. self.owner = None
  2278. # This may be the commit message, or it may be a cover message
  2279. # in the case of a PR. Either way, it's the place where we
  2280. # look for depends-on headers.
  2281. self.message = None
  2282. self.source_event = None
  2283. def _id(self):
  2284. return '%s,%s' % (self.number, self.patchset)
  2285. def __repr__(self):
  2286. pname = None
  2287. if self.project and self.project.name:
  2288. pname = self.project.name
  2289. return '<Change 0x%x %s %s>' % (id(self), pname, self._id())
  2290. def equals(self, other):
  2291. if self.number == other.number and self.patchset == other.patchset:
  2292. return True
  2293. return False
  2294. @property
  2295. def needs_changes(self):
  2296. return (self.git_needs_changes + self.compat_needs_changes +
  2297. self.commit_needs_changes)
  2298. @property
  2299. def needed_by_changes(self):
  2300. return (self.git_needed_by_changes + self.compat_needed_by_changes)
  2301. def isUpdateOf(self, other):
  2302. if (self.project == other.project and
  2303. (hasattr(other, 'number') and self.number == other.number) and
  2304. (hasattr(other, 'patchset') and
  2305. self.patchset is not None and
  2306. other.patchset is not None and
  2307. int(self.patchset) > int(other.patchset))):
  2308. return True
  2309. return False
  2310. def getRelatedChanges(self):
  2311. related = set()
  2312. for c in self.needs_changes:
  2313. related.add(c)
  2314. for c in self.needed_by_changes:
  2315. related.add(c)
  2316. related.update(c.getRelatedChanges())
  2317. return related
  2318. def getSafeAttributes(self):
  2319. return Attributes(project=self.project,
  2320. number=self.number,
  2321. patchset=self.patchset)
  2322. def toDict(self):
  2323. # Render to a dict to use in passing json to the executor
  2324. d = super(Change, self).toDict()
  2325. d['change'] = str(self.number)
  2326. d['change_url'] = self.url
  2327. d['patchset'] = str(self.patchset)
  2328. return d
  2329. class TriggerEvent(object):
  2330. """Incoming event from an external system."""
  2331. def __init__(self):
  2332. # TODO(jeblair): further reduce this list
  2333. self.data = None
  2334. # common
  2335. self.type = None
  2336. self.branch_updated = False
  2337. self.branch_created = False
  2338. self.branch_deleted = False
  2339. self.branch_protected = True
  2340. self.ref = None
  2341. # For management events (eg: enqueue / promote)
  2342. self.tenant_name = None
  2343. self.project_hostname = None
  2344. self.project_name = None
  2345. self.trigger_name = None
  2346. # Representation of the user account that performed the event.
  2347. self.account = None
  2348. # patchset-created, comment-added, etc.
  2349. self.change_number = None
  2350. self.change_url = None
  2351. self.patch_number = None
  2352. self.branch = None
  2353. self.comment = None
  2354. self.state = None
  2355. # ref-updated
  2356. self.oldrev = None
  2357. self.newrev = None
  2358. # For events that arrive with a destination pipeline (eg, from
  2359. # an admin command, etc):
  2360. self.forced_pipeline = None
  2361. @property
  2362. def canonical_project_name(self):
  2363. return self.project_hostname + '/' + self.project_name
  2364. def isPatchsetCreated(self):
  2365. return False
  2366. def isChangeAbandoned(self):
  2367. return False
  2368. def _repr(self):
  2369. flags = [str(self.type)]
  2370. if self.project_name:
  2371. flags.append(self.project_name)
  2372. if self.ref:
  2373. flags.append(self.ref)
  2374. if self.branch_updated:
  2375. flags.append('branch_updated')
  2376. if self.branch_created:
  2377. flags.append('branch_created')
  2378. if self.branch_deleted:
  2379. flags.append('branch_deleted')
  2380. return ' '.join(flags)
  2381. def __repr__(self):
  2382. return '<%s 0x%x %s>' % (self.__class__.__name__,
  2383. id(self), self._repr())
  2384. class BaseFilter(ConfigObject):
  2385. """Base Class for filtering which Changes and Events to process."""
  2386. pass
  2387. class EventFilter(BaseFilter):
  2388. """Allows a Pipeline to only respond to certain events."""
  2389. def __init__(self, trigger):
  2390. super(EventFilter, self).__init__()
  2391. self.trigger = trigger
  2392. def matches(self, event, ref):
  2393. # TODO(jeblair): consider removing ref argument
  2394. return True
  2395. class RefFilter(BaseFilter):
  2396. """Allows a Manager to only enqueue Changes that meet certain criteria."""
  2397. def __init__(self, connection_name):
  2398. super(RefFilter, self).__init__()
  2399. self.connection_name = connection_name
  2400. def matches(self, change):
  2401. return True
  2402. class TenantProjectConfig(object):
  2403. """A project in the context of a tenant.
  2404. A Project is globally unique in the system, however, when used in
  2405. a tenant, some metadata about the project local to the tenant is
  2406. stored in a TenantProjectConfig.
  2407. """
  2408. def __init__(self, project):
  2409. self.project = project
  2410. self.load_classes = set()
  2411. self.shadow_projects = set()
  2412. self.branches = []
  2413. # The tenant's default setting of exclude_unprotected_branches will
  2414. # be overridden by this one if not None.
  2415. self.exclude_unprotected_branches = None
  2416. self.parsed_branch_config = {} # branch -> ParsedConfig
  2417. class ProjectPipelineConfig(ConfigObject):
  2418. # Represents a project cofiguration in the context of a pipeline
  2419. def __init__(self):
  2420. super(ProjectPipelineConfig, self).__init__()
  2421. self.job_list = JobList()
  2422. self.queue_name = None
  2423. self.debug = False
  2424. self.debug_messages = []
  2425. self.variables = {}
  2426. def addDebug(self, msg):
  2427. self.debug_messages.append(msg)
  2428. def update(self, other):
  2429. if not isinstance(other, ProjectPipelineConfig):
  2430. raise Exception("Unable to update from %s" % (other,))
  2431. if self.queue_name is None:
  2432. self.queue_name = other.queue_name
  2433. if other.debug:
  2434. self.debug = other.debug
  2435. self.job_list.inheritFrom(other.job_list)
  2436. def updateVariables(self, other):
  2437. # We need to keep this separate to update() because we wish to
  2438. # apply the project variables all the time, even if its jobs
  2439. # only come from templates.
  2440. self.variables = Job._deepUpdate(self.variables, other)
  2441. def toDict(self):
  2442. d = {}
  2443. d['queue_name'] = self.queue_name
  2444. return d
  2445. class ProjectConfig(ConfigObject):
  2446. # Represents a project configuration
  2447. def __init__(self, name):
  2448. super(ProjectConfig, self).__init__()
  2449. self.name = name
  2450. self.templates = []
  2451. # Pipeline name -> ProjectPipelineConfig
  2452. self.pipelines = {}
  2453. self.branch_matcher = None
  2454. self.variables = {}
  2455. # These represent the values from the config file, but should
  2456. # not be used directly; instead, use the ProjectMetadata to
  2457. # find the computed value from across all project config
  2458. # stanzas.
  2459. self.merge_mode = None
  2460. self.default_branch = None
  2461. def __repr__(self):
  2462. return '<ProjectConfig %s source: %s %s>' % (
  2463. self.name, self.source_context, self.branch_matcher)
  2464. def copy(self):
  2465. r = self.__class__(self.name)
  2466. r.source_context = self.source_context
  2467. r.start_mark = self.start_mark
  2468. r.templates = self.templates
  2469. r.pipelines = self.pipelines
  2470. r.branch_matcher = self.branch_matcher
  2471. r.variables = self.variables
  2472. r.merge_mode = self.merge_mode
  2473. r.default_branch = self.default_branch
  2474. return r
  2475. def setImpliedBranchMatchers(self, branches):
  2476. if len(branches) == 0:
  2477. self.branch_matcher = None
  2478. elif len(branches) > 1:
  2479. matchers = [change_matcher.ImpliedBranchMatcher(branch)
  2480. for branch in branches]
  2481. self.branch_matcher = change_matcher.MatchAny(matchers)
  2482. else:
  2483. self.branch_matcher = change_matcher.ImpliedBranchMatcher(
  2484. branches[0])
  2485. def changeMatches(self, change):
  2486. if self.branch_matcher and not self.branch_matcher.matches(change):
  2487. return False
  2488. return True
  2489. def toDict(self):
  2490. d = {}
  2491. d['default_branch'] = self.default_branch
  2492. if self.merge_mode:
  2493. d['merge_mode'] = list(filter(lambda x: x[1] == self.merge_mode,
  2494. MERGER_MAP.items()))[0][0]
  2495. else:
  2496. d['merge_mode'] = None
  2497. d['templates'] = self.templates
  2498. return d
  2499. class ProjectMetadata(object):
  2500. """Information about a Project
  2501. A Layout holds one of these for each project it knows about.
  2502. Information about the project which is synthesized from multiple
  2503. ProjectConfig objects is stored here.
  2504. """
  2505. def __init__(self):
  2506. self.merge_mode = None
  2507. self.default_branch = None
  2508. class ConfigItemNotListError(Exception):
  2509. def __init__(self):
  2510. message = textwrap.dedent("""\
  2511. Configuration file is not a list. Each zuul.yaml configuration
  2512. file must be a list of items, for example:
  2513. - job:
  2514. name: foo
  2515. - project:
  2516. name: bar
  2517. Ensure that every item starts with "- " so that it is parsed as a
  2518. YAML list.
  2519. """)
  2520. super(ConfigItemNotListError, self).__init__(message)
  2521. class ConfigItemNotDictError(Exception):
  2522. def __init__(self):
  2523. message = textwrap.dedent("""\
  2524. Configuration item is not a dictionary. Each zuul.yaml
  2525. configuration file must be a list of dictionaries, for
  2526. example:
  2527. - job:
  2528. name: foo
  2529. - project:
  2530. name: bar
  2531. Ensure that every item in the list is a dictionary with one
  2532. key (in this example, 'job' and 'project').
  2533. """)
  2534. super(ConfigItemNotDictError, self).__init__(message)
  2535. class ConfigItemMultipleKeysError(Exception):
  2536. def __init__(self):
  2537. message = textwrap.dedent("""\
  2538. Configuration item has more than one key. Each zuul.yaml
  2539. configuration file must be a list of dictionaries with a
  2540. single key, for example:
  2541. - job:
  2542. name: foo
  2543. - project:
  2544. name: bar
  2545. Ensure that every item in the list is a dictionary with only
  2546. one key (in this example, 'job' and 'project'). This error
  2547. may be caused by insufficient indentation of the keys under
  2548. the configuration item ('name' in this example).
  2549. """)
  2550. super(ConfigItemMultipleKeysError, self).__init__(message)
  2551. class ConfigItemUnknownError(Exception):
  2552. def __init__(self):
  2553. message = textwrap.dedent("""\
  2554. Configuration item not recognized. Each zuul.yaml
  2555. configuration file must be a list of dictionaries, for
  2556. example:
  2557. - job:
  2558. name: foo
  2559. - project:
  2560. name: bar
  2561. The dictionary keys must match one of the configuration item
  2562. types recognized by zuul (for example, 'job' or 'project').
  2563. """)
  2564. super(ConfigItemUnknownError, self).__init__(message)
  2565. class UnparsedAbideConfig(object):
  2566. """A collection of yaml lists that has not yet been parsed into objects.
  2567. An Abide is a collection of tenants.
  2568. """
  2569. def __init__(self):
  2570. self.tenants = []
  2571. self.known_tenants = set()
  2572. def extend(self, conf):
  2573. if isinstance(conf, UnparsedAbideConfig):
  2574. self.tenants.extend(conf.tenants)
  2575. return
  2576. if not isinstance(conf, list):
  2577. raise ConfigItemNotListError()
  2578. for item in conf:
  2579. if not isinstance(item, dict):
  2580. raise ConfigItemNotDictError()
  2581. if len(item.keys()) > 1:
  2582. raise ConfigItemMultipleKeysError()
  2583. key, value = list(item.items())[0]
  2584. if key == 'tenant':
  2585. self.tenants.append(value)
  2586. if 'name' in value:
  2587. self.known_tenants.add(value['name'])
  2588. else:
  2589. raise ConfigItemUnknownError()
  2590. class UnparsedConfig(object):
  2591. """A collection of yaml lists that has not yet been parsed into objects."""
  2592. def __init__(self):
  2593. self.pragmas = []
  2594. self.pipelines = []
  2595. self.jobs = []
  2596. self.project_templates = []
  2597. self.projects = []
  2598. self.nodesets = []
  2599. self.secrets = []
  2600. self.semaphores = []
  2601. def copy(self, trusted=None):
  2602. # If trusted is not None, update the source context of each
  2603. # object in the copy.
  2604. r = UnparsedConfig()
  2605. # Keep a cache of all the source contexts indexed by
  2606. # project-branch-path so that we can share them across objects
  2607. source_contexts = {}
  2608. for attr in ['pragmas', 'pipelines', 'jobs', 'project_templates',
  2609. 'projects', 'nodesets', 'secrets', 'semaphores']:
  2610. # Make a deep copy of each of our attributes
  2611. old_objlist = getattr(self, attr)
  2612. new_objlist = copy.deepcopy(old_objlist)
  2613. setattr(r, attr, new_objlist)
  2614. for i, new_obj in enumerate(new_objlist):
  2615. old_obj = old_objlist[i]
  2616. key = (old_obj['_source_context'].project,
  2617. old_obj['_source_context'].branch,
  2618. old_obj['_source_context'].path)
  2619. new_sc = source_contexts.get(key)
  2620. if not new_sc:
  2621. new_sc = new_obj['_source_context']
  2622. if trusted is not None:
  2623. new_sc.trusted = trusted
  2624. source_contexts[key] = new_sc
  2625. else:
  2626. new_obj['_source_context'] = new_sc
  2627. return r
  2628. def extend(self, conf):
  2629. if isinstance(conf, UnparsedConfig):
  2630. self.pragmas.extend(conf.pragmas)
  2631. self.pipelines.extend(conf.pipelines)
  2632. self.jobs.extend(conf.jobs)
  2633. self.project_templates.extend(conf.project_templates)
  2634. self.projects.extend(conf.projects)
  2635. self.nodesets.extend(conf.nodesets)
  2636. self.secrets.extend(conf.secrets)
  2637. self.semaphores.extend(conf.semaphores)
  2638. return
  2639. if not isinstance(conf, list):
  2640. raise ConfigItemNotListError()
  2641. for item in conf:
  2642. if not isinstance(item, dict):
  2643. raise ConfigItemNotDictError()
  2644. if len(item.keys()) > 1:
  2645. raise ConfigItemMultipleKeysError()
  2646. key, value = list(item.items())[0]
  2647. if key == 'project':
  2648. self.projects.append(value)
  2649. elif key == 'job':
  2650. self.jobs.append(value)
  2651. elif key == 'project-template':
  2652. self.project_templates.append(value)
  2653. elif key == 'pipeline':
  2654. self.pipelines.append(value)
  2655. elif key == 'nodeset':
  2656. self.nodesets.append(value)
  2657. elif key == 'secret':
  2658. self.secrets.append(value)
  2659. elif key == 'semaphore':
  2660. self.semaphores.append(value)
  2661. elif key == 'pragma':
  2662. self.pragmas.append(value)
  2663. else:
  2664. raise ConfigItemUnknownError()
  2665. class ParsedConfig(object):
  2666. """A collection of parsed config objects."""
  2667. def __init__(self):
  2668. self.pragmas = []
  2669. self.pipelines = []
  2670. self.jobs = []
  2671. self.project_templates = []
  2672. self.projects = []
  2673. self.projects_by_regex = {}
  2674. self.nodesets = []
  2675. self.secrets = []
  2676. self.semaphores = []
  2677. def copy(self):
  2678. r = ParsedConfig()
  2679. r.pragmas = self.pragmas[:]
  2680. r.pipelines = self.pipelines[:]
  2681. r.jobs = self.jobs[:]
  2682. r.project_templates = self.project_templates[:]
  2683. r.projects = self.projects[:]
  2684. r.projects_by_regex = copy.copy(self.projects_by_regex)
  2685. r.nodesets = self.nodesets[:]
  2686. r.secrets = self.secrets[:]
  2687. r.semaphores = self.semaphores[:]
  2688. return r
  2689. def extend(self, conf):
  2690. if isinstance(conf, ParsedConfig):
  2691. self.pragmas.extend(conf.pragmas)
  2692. self.pipelines.extend(conf.pipelines)
  2693. self.jobs.extend(conf.jobs)
  2694. self.project_templates.extend(conf.project_templates)
  2695. self.projects.extend(conf.projects)
  2696. self.nodesets.extend(conf.nodesets)
  2697. self.secrets.extend(conf.secrets)
  2698. self.semaphores.extend(conf.semaphores)
  2699. for regex, projects in conf.projects_by_regex.items():
  2700. self.projects_by_regex.setdefault(regex, []).extend(projects)
  2701. return
  2702. else:
  2703. raise ConfigItemUnknownError()
  2704. class Layout(object):
  2705. """Holds all of the Pipelines."""
  2706. log = logging.getLogger("zuul.layout")
  2707. def __init__(self, tenant):
  2708. self.uuid = uuid4().hex
  2709. self.tenant = tenant
  2710. self.project_configs = {}
  2711. self.project_templates = {}
  2712. self.project_metadata = {}
  2713. self.pipelines = OrderedDict()
  2714. # This is a dictionary of name -> [jobs]. The first element
  2715. # of the list is the first job added with that name. It is
  2716. # the reference definition for a given job. Subsequent
  2717. # elements are aspects of that job with different matchers
  2718. # that override some attribute of the job. These aspects all
  2719. # inherit from the reference definition.
  2720. noop = Job('noop')
  2721. noop.description = 'A job that will always succeed, no operation.'
  2722. noop.parent = noop.BASE_JOB_MARKER
  2723. noop.run = (PlaybookContext(None, 'noop.yaml', [], []),)
  2724. self.jobs = {'noop': [noop]}
  2725. self.nodesets = {}
  2726. self.secrets = {}
  2727. self.semaphores = {}
  2728. self.loading_errors = LoadingErrors()
  2729. def getJob(self, name):
  2730. if name in self.jobs:
  2731. return self.jobs[name][0]
  2732. raise Exception("Job %s not defined" % (name,))
  2733. def hasJob(self, name):
  2734. return name in self.jobs
  2735. def getJobs(self, name):
  2736. return self.jobs.get(name, [])
  2737. def addJob(self, job):
  2738. # We can have multiple variants of a job all with the same
  2739. # name, but these variants must all be defined in the same repo.
  2740. prior_jobs = [j for j in self.getJobs(job.name) if
  2741. j.source_context.project !=
  2742. job.source_context.project]
  2743. # Unless the repo is permitted to shadow another. If so, and
  2744. # the job we are adding is from a repo that is permitted to
  2745. # shadow the one with the older jobs, skip adding this job.
  2746. job_project = job.source_context.project
  2747. job_tpc = self.tenant.project_configs[job_project.canonical_name]
  2748. skip_add = False
  2749. for prior_job in prior_jobs[:]:
  2750. prior_project = prior_job.source_context.project
  2751. if prior_project in job_tpc.shadow_projects:
  2752. prior_jobs.remove(prior_job)
  2753. skip_add = True
  2754. if prior_jobs:
  2755. raise Exception("Job %s in %s is not permitted to shadow "
  2756. "job %s in %s" % (
  2757. job,
  2758. job.source_context.project,
  2759. prior_jobs[0],
  2760. prior_jobs[0].source_context.project))
  2761. if skip_add:
  2762. return False
  2763. if job.name in self.jobs:
  2764. self.jobs[job.name].append(job)
  2765. else:
  2766. self.jobs[job.name] = [job]
  2767. return True
  2768. def addNodeSet(self, nodeset):
  2769. # It's ok to have a duplicate nodeset definition, but only if
  2770. # they are in different branches of the same repo, and have
  2771. # the same values.
  2772. other = self.nodesets.get(nodeset.name)
  2773. if other is not None:
  2774. if not nodeset.source_context.isSameProject(other.source_context):
  2775. raise Exception("Nodeset %s already defined in project %s" %
  2776. (nodeset.name, other.source_context.project))
  2777. if nodeset.source_context.branch == other.source_context.branch:
  2778. raise Exception("Nodeset %s already defined" % (nodeset.name,))
  2779. if nodeset != other:
  2780. raise Exception("Nodeset %s does not match existing definition"
  2781. " in branch %s" %
  2782. (nodeset.name, other.source_context.branch))
  2783. # Identical data in a different branch of the same project;
  2784. # ignore the duplicate definition
  2785. return
  2786. self.nodesets[nodeset.name] = nodeset
  2787. def addSecret(self, secret):
  2788. # It's ok to have a duplicate secret definition, but only if
  2789. # they are in different branches of the same repo, and have
  2790. # the same values.
  2791. other = self.secrets.get(secret.name)
  2792. if other is not None:
  2793. if not secret.source_context.isSameProject(other.source_context):
  2794. raise Exception("Secret %s already defined in project %s" %
  2795. (secret.name, other.source_context.project))
  2796. if secret.source_context.branch == other.source_context.branch:
  2797. raise Exception("Secret %s already defined" % (secret.name,))
  2798. if not secret.areDataEqual(other):
  2799. raise Exception("Secret %s does not match existing definition"
  2800. " in branch %s" %
  2801. (secret.name, other.source_context.branch))
  2802. # Identical data in a different branch of the same project;
  2803. # ignore the duplicate definition
  2804. return
  2805. self.secrets[secret.name] = secret
  2806. def addSemaphore(self, semaphore):
  2807. # It's ok to have a duplicate semaphore definition, but only if
  2808. # they are in different branches of the same repo, and have
  2809. # the same values.
  2810. other = self.semaphores.get(semaphore.name)
  2811. if other is not None:
  2812. if not semaphore.source_context.isSameProject(
  2813. other.source_context):
  2814. raise Exception("Semaphore %s already defined in project %s" %
  2815. (semaphore.name, other.source_context.project))
  2816. if semaphore.source_context.branch == other.source_context.branch:
  2817. raise Exception("Semaphore %s already defined" %
  2818. (semaphore.name,))
  2819. if semaphore != other:
  2820. raise Exception("Semaphore %s does not match existing"
  2821. " definition in branch %s" %
  2822. (semaphore.name, other.source_context.branch))
  2823. # Identical data in a different branch of the same project;
  2824. # ignore the duplicate definition
  2825. return
  2826. self.semaphores[semaphore.name] = semaphore
  2827. def addPipeline(self, pipeline):
  2828. if pipeline.tenant is not self.tenant:
  2829. raise Exception("Pipeline created for tenant %s "
  2830. "may not be added to %s" % (
  2831. pipeline.tenant,
  2832. self.tenant))
  2833. self.pipelines[pipeline.name] = pipeline
  2834. def addProjectTemplate(self, project_template):
  2835. template_list = self.project_templates.get(project_template.name)
  2836. if template_list is not None:
  2837. reference = template_list[0]
  2838. if (reference.source_context.project !=
  2839. project_template.source_context.project):
  2840. raise Exception("Project template %s is already defined" %
  2841. (project_template.name,))
  2842. else:
  2843. template_list = self.project_templates.setdefault(
  2844. project_template.name, [])
  2845. template_list.append(project_template)
  2846. def getProjectTemplates(self, name):
  2847. pt = self.project_templates.get(name, None)
  2848. if pt is None:
  2849. raise TemplateNotFoundError("Project template %s not found" % name)
  2850. return pt
  2851. def addProjectConfig(self, project_config):
  2852. if project_config.name in self.project_configs:
  2853. self.project_configs[project_config.name].append(project_config)
  2854. else:
  2855. self.project_configs[project_config.name] = [project_config]
  2856. self.project_metadata[project_config.name] = ProjectMetadata()
  2857. md = self.project_metadata[project_config.name]
  2858. if md.merge_mode is None and project_config.merge_mode is not None:
  2859. md.merge_mode = project_config.merge_mode
  2860. if (md.default_branch is None and
  2861. project_config.default_branch is not None):
  2862. md.default_branch = project_config.default_branch
  2863. def getProjectConfigs(self, name):
  2864. return self.project_configs.get(name, [])
  2865. def getAllProjectConfigs(self, name):
  2866. # Get all the project configs (project and project-template
  2867. # stanzas) for a project.
  2868. try:
  2869. ret = []
  2870. for pc in self.getProjectConfigs(name):
  2871. ret.append(pc)
  2872. for template_name in pc.templates:
  2873. templates = self.getProjectTemplates(template_name)
  2874. ret.extend(templates)
  2875. return ret
  2876. except TemplateNotFoundError as e:
  2877. self.log.warning("%s for project %s" % (e, name))
  2878. return []
  2879. def getProjectMetadata(self, name):
  2880. if name in self.project_metadata:
  2881. return self.project_metadata[name]
  2882. return None
  2883. def getProjectPipelineConfig(self, item):
  2884. # Create a project-pipeline config for the given item, taking
  2885. # its branch (if any) into consideration. If the project does
  2886. # not participate in the pipeline at all (in this branch),
  2887. # return None.
  2888. # A pc for a project can appear only in a config-project
  2889. # (unbranched, always applies), or in the project itself (it
  2890. # should have an implied branch matcher and it must match the
  2891. # item).
  2892. ppc = ProjectPipelineConfig()
  2893. project_in_pipeline = False
  2894. for pc in self.getProjectConfigs(item.change.project.canonical_name):
  2895. if not pc.changeMatches(item.change):
  2896. msg = "Project %s did not match" % (pc,)
  2897. ppc.addDebug(msg)
  2898. self.log.debug("%s item %s" % (msg, item))
  2899. continue
  2900. msg = "Project %s matched" % (pc,)
  2901. ppc.addDebug(msg)
  2902. self.log.debug("%s item %s" % (msg, item))
  2903. for template_name in pc.templates:
  2904. templates = self.getProjectTemplates(template_name)
  2905. for template in templates:
  2906. template_ppc = template.pipelines.get(item.pipeline.name)
  2907. if template_ppc:
  2908. if not template.changeMatches(item.change):
  2909. msg = "Project template %s did not match" % (
  2910. template,)
  2911. ppc.addDebug(msg)
  2912. self.log.debug("%s item %s" % (msg, item))
  2913. continue
  2914. msg = "Project template %s matched" % (
  2915. template,)
  2916. ppc.addDebug(msg)
  2917. self.log.debug("%s item %s" % (msg, item))
  2918. project_in_pipeline = True
  2919. ppc.update(template_ppc)
  2920. ppc.updateVariables(template.variables)
  2921. # Now merge in project variables (they will override
  2922. # template variables; later job variables may override
  2923. # these again)
  2924. ppc.updateVariables(pc.variables)
  2925. project_ppc = pc.pipelines.get(item.pipeline.name)
  2926. if project_ppc:
  2927. project_in_pipeline = True
  2928. ppc.update(project_ppc)
  2929. if project_in_pipeline:
  2930. return ppc
  2931. return None
  2932. def _updateOverrideCheckouts(self, override_checkouts, job):
  2933. # Update the values in an override_checkouts dict with those
  2934. # in a job. Used in collectJobVariants.
  2935. if job.override_checkout:
  2936. override_checkouts[None] = job.override_checkout
  2937. for req in job.required_projects.values():
  2938. if req.override_checkout:
  2939. override_checkouts[req.project_name] = req.override_checkout
  2940. def _collectJobVariants(self, item, jobname, change, path, jobs, stack,
  2941. override_checkouts, indent):
  2942. matched = False
  2943. local_override_checkouts = override_checkouts.copy()
  2944. override_branch = None
  2945. project = None
  2946. for variant in self.getJobs(jobname):
  2947. if project is None and variant.source_context:
  2948. project = variant.source_context.project
  2949. if override_checkouts.get(None) is not None:
  2950. override_branch = override_checkouts.get(None)
  2951. override_branch = override_checkouts.get(
  2952. project.canonical_name, override_branch)
  2953. branches = self.tenant.getProjectBranches(project)
  2954. if override_branch not in branches:
  2955. override_branch = None
  2956. if not variant.changeMatchesBranch(
  2957. change,
  2958. override_branch=override_branch):
  2959. self.log.debug("Variant %s did not match %s", repr(variant),
  2960. change)
  2961. item.debug("Variant {variant} did not match".format(
  2962. variant=repr(variant)), indent=indent)
  2963. continue
  2964. else:
  2965. self.log.debug("Variant %s matched %s", repr(variant), change)
  2966. item.debug("Variant {variant} matched".format(
  2967. variant=repr(variant)), indent=indent)
  2968. if not variant.isBase():
  2969. parent = variant.parent
  2970. if not jobs and parent is None:
  2971. parent = self.tenant.default_base_job
  2972. else:
  2973. parent = None
  2974. self._updateOverrideCheckouts(local_override_checkouts, variant)
  2975. if parent and parent not in path:
  2976. if parent in stack:
  2977. raise Exception("Dependency cycle in jobs: %s" % stack)
  2978. self.collectJobs(item, parent, change, path, jobs,
  2979. stack + [jobname], local_override_checkouts)
  2980. matched = True
  2981. if variant not in jobs:
  2982. jobs.append(variant)
  2983. return matched
  2984. def collectJobs(self, item, jobname, change, path=None, jobs=None,
  2985. stack=None, override_checkouts=None):
  2986. # Stack is the recursion stack of job parent names. Each time
  2987. # we go up a level, we add to stack, and it's popped as we
  2988. # descend.
  2989. if stack is None:
  2990. stack = []
  2991. # Jobs is the list of jobs we've accumulated.
  2992. if jobs is None:
  2993. jobs = []
  2994. # Path is the list of job names we've examined. It
  2995. # accumulates and never reduces. If more than one job has the
  2996. # same parent, this will prevent us from adding it a second
  2997. # time.
  2998. if path is None:
  2999. path = []
  3000. # Override_checkouts is a dictionary of canonical project
  3001. # names -> branch names. It is not mutated, but instead new
  3002. # copies are made and updated as we ascend the hierarchy, so
  3003. # higher levels don't affect lower levels after we descend.
  3004. # It's used to override the branch matchers for jobs.
  3005. if override_checkouts is None:
  3006. override_checkouts = {}
  3007. path.append(jobname)
  3008. matched = False
  3009. indent = len(path) + 1
  3010. msg = "Collecting job variants for {jobname}".format(jobname=jobname)
  3011. self.log.debug(msg)
  3012. item.debug(msg, indent=indent)
  3013. matched = self._collectJobVariants(
  3014. item, jobname, change, path, jobs, stack, override_checkouts,
  3015. indent)
  3016. if not matched:
  3017. self.log.debug("No matching parents for job %s and change %s",
  3018. jobname, change)
  3019. item.debug("No matching parents for {jobname}".format(
  3020. jobname=repr(jobname)), indent=indent)
  3021. raise NoMatchingParentError()
  3022. return jobs
  3023. def _createJobGraph(self, item, ppc, job_graph):
  3024. job_list = ppc.job_list
  3025. change = item.change
  3026. pipeline = item.pipeline
  3027. item.debug("Freezing job graph")
  3028. for jobname in job_list.jobs:
  3029. # This is the final job we are constructing
  3030. frozen_job = None
  3031. self.log.debug("Collecting jobs %s for %s", jobname, change)
  3032. item.debug("Freezing job {jobname}".format(
  3033. jobname=jobname), indent=1)
  3034. # Create the initial list of override_checkouts, which are
  3035. # used as we walk up the hierarchy to expand the set of
  3036. # jobs which match.
  3037. override_checkouts = {}
  3038. for variant in job_list.jobs[jobname]:
  3039. if variant.changeMatchesBranch(change):
  3040. self._updateOverrideCheckouts(override_checkouts, variant)
  3041. try:
  3042. variants = self.collectJobs(
  3043. item, jobname, change,
  3044. override_checkouts=override_checkouts)
  3045. except NoMatchingParentError:
  3046. variants = None
  3047. self.log.debug("Collected jobs %s for %s", jobname, change)
  3048. if not variants:
  3049. # A change must match at least one defined job variant
  3050. # (that is to say that it must match more than just
  3051. # the job that is defined in the tree).
  3052. item.debug("No matching variants for {jobname}".format(
  3053. jobname=jobname), indent=2)
  3054. continue
  3055. for variant in variants:
  3056. if frozen_job is None:
  3057. frozen_job = variant.copy()
  3058. frozen_job.setBase(item.layout)
  3059. else:
  3060. frozen_job.applyVariant(variant, item.layout)
  3061. frozen_job.name = variant.name
  3062. frozen_job.name = jobname
  3063. self.log.debug("Froze job %s for %s", jobname, change)
  3064. # Whether the change matches any of the project pipeline
  3065. # variants
  3066. matched = False
  3067. for variant in job_list.jobs[jobname]:
  3068. if variant.changeMatchesBranch(change):
  3069. frozen_job.applyVariant(variant, item.layout)
  3070. matched = True
  3071. self.log.debug("Pipeline variant %s matched %s",
  3072. repr(variant), change)
  3073. item.debug("Pipeline variant {variant} matched".format(
  3074. variant=repr(variant)), indent=2)
  3075. else:
  3076. self.log.debug("Pipeline variant %s did not match %s",
  3077. repr(variant), change)
  3078. item.debug("Pipeline variant {variant} did not match".
  3079. format(variant=repr(variant)), indent=2)
  3080. if not matched:
  3081. # A change must match at least one project pipeline
  3082. # job variant.
  3083. item.debug("No matching pipeline variants for {jobname}".
  3084. format(jobname=jobname), indent=2)
  3085. continue
  3086. if not frozen_job.changeMatchesFiles(change):
  3087. self.log.debug("Job %s did not match files in %s",
  3088. repr(frozen_job), change)
  3089. item.debug("Job {jobname} did not match files".
  3090. format(jobname=jobname), indent=2)
  3091. continue
  3092. if frozen_job.abstract:
  3093. raise Exception("Job %s is abstract and may not be "
  3094. "directly run" %
  3095. (frozen_job.name,))
  3096. if (frozen_job.allowed_projects is not None and
  3097. change.project.name not in frozen_job.allowed_projects):
  3098. raise Exception("Project %s is not allowed to run job %s" %
  3099. (change.project.name, frozen_job.name))
  3100. if ((not pipeline.post_review) and frozen_job.post_review):
  3101. raise Exception("Pre-review pipeline %s does not allow "
  3102. "post-review job %s" % (
  3103. pipeline.name, frozen_job.name))
  3104. if not frozen_job.run:
  3105. raise Exception("Job %s does not specify a run playbook" % (
  3106. frozen_job.name,))
  3107. # Now merge variables set from this parent ppc
  3108. # (i.e. project+templates) directly into the job vars
  3109. frozen_job.updateProjectVariables(ppc.variables)
  3110. job_graph.addJob(frozen_job)
  3111. def createJobGraph(self, item, ppc):
  3112. # NOTE(pabelanger): It is possible for a foreign project not to have a
  3113. # configured pipeline, if so return an empty JobGraph.
  3114. ret = JobGraph()
  3115. if ppc:
  3116. self._createJobGraph(item, ppc, ret)
  3117. return ret
  3118. class Semaphore(ConfigObject):
  3119. def __init__(self, name, max=1):
  3120. super(Semaphore, self).__init__()
  3121. self.name = name
  3122. self.max = int(max)
  3123. def __ne__(self, other):
  3124. return not self.__eq__(other)
  3125. def __eq__(self, other):
  3126. if not isinstance(other, Semaphore):
  3127. return False
  3128. return (self.name == other.name and
  3129. self.max == other.max)
  3130. class SemaphoreHandler(object):
  3131. log = logging.getLogger("zuul.SemaphoreHandler")
  3132. def __init__(self):
  3133. self.semaphores = {}
  3134. def acquire(self, item, job, request_resources):
  3135. """
  3136. Aquires a semaphore for an item job combination. This gets called twice
  3137. during the lifecycle of a job. The first call is before requesting
  3138. build resources. The second call is before running the job. In which
  3139. call we really acquire the semaphore is defined by the job.
  3140. :param item: The item
  3141. :param job: The job
  3142. :param request_resources: True if we want to acquire for the request
  3143. resources phase, False if we want to acquire
  3144. for the run phase.
  3145. """
  3146. if not job.semaphore:
  3147. return True
  3148. if job.semaphore.resources_first and request_resources:
  3149. # We're currently in the resource request phase and want to get the
  3150. # resources before locking. So we don't need to do anything here.
  3151. return True
  3152. else:
  3153. # As a safety net we want to acuire the semaphore at least in the
  3154. # run phase so don't filter this here as re-acuiring the semaphore
  3155. # is not a problem here if it has been already acquired before in
  3156. # the resources phase.
  3157. pass
  3158. semaphore_key = job.semaphore.name
  3159. m = self.semaphores.get(semaphore_key)
  3160. if not m:
  3161. # The semaphore is not held, acquire it
  3162. self._acquire(semaphore_key, item, job.name)
  3163. return True
  3164. if (item, job.name) in m:
  3165. # This item already holds the semaphore
  3166. return True
  3167. # semaphore is there, check max
  3168. if len(m) < self._max_count(item, job.semaphore.name):
  3169. self._acquire(semaphore_key, item, job.name)
  3170. return True
  3171. return False
  3172. def release(self, item, job):
  3173. if not job.semaphore:
  3174. return
  3175. semaphore_key = job.semaphore.name
  3176. m = self.semaphores.get(semaphore_key)
  3177. if not m:
  3178. # The semaphore is not held, nothing to do
  3179. self.log.error("Semaphore can not be released for %s "
  3180. "because the semaphore is not held" %
  3181. item)
  3182. return
  3183. if (item, job.name) in m:
  3184. # This item is a holder of the semaphore
  3185. self._release(semaphore_key, item, job.name)
  3186. return
  3187. self.log.error("Semaphore can not be released for %s "
  3188. "which does not hold it" % item)
  3189. def _acquire(self, semaphore_key, item, job_name):
  3190. self.log.debug("Semaphore acquire {semaphore}: job {job}, item {item}"
  3191. .format(semaphore=semaphore_key,
  3192. job=job_name,
  3193. item=item))
  3194. if semaphore_key not in self.semaphores:
  3195. self.semaphores[semaphore_key] = []
  3196. self.semaphores[semaphore_key].append((item, job_name))
  3197. def _release(self, semaphore_key, item, job_name):
  3198. self.log.debug("Semaphore release {semaphore}: job {job}, item {item}"
  3199. .format(semaphore=semaphore_key,
  3200. job=job_name,
  3201. item=item))
  3202. sem_item = (item, job_name)
  3203. if sem_item in self.semaphores[semaphore_key]:
  3204. self.semaphores[semaphore_key].remove(sem_item)
  3205. # cleanup if there is no user of the semaphore anymore
  3206. if len(self.semaphores[semaphore_key]) == 0:
  3207. del self.semaphores[semaphore_key]
  3208. @staticmethod
  3209. def _max_count(item, semaphore_name):
  3210. if not item.layout:
  3211. # This should not occur as the layout of the item must already be
  3212. # built when acquiring or releasing a semaphore for a job.
  3213. raise Exception("Item {} has no layout".format(item))
  3214. # find the right semaphore
  3215. default_semaphore = Semaphore(semaphore_name, 1)
  3216. semaphores = item.layout.semaphores
  3217. return semaphores.get(semaphore_name, default_semaphore).max
  3218. class Tenant(object):
  3219. def __init__(self, name):
  3220. self.name = name
  3221. self.max_nodes_per_job = 5
  3222. self.max_job_timeout = 10800
  3223. self.exclude_unprotected_branches = False
  3224. self.default_base_job = None
  3225. self.layout = None
  3226. # The unparsed configuration from the main zuul config for
  3227. # this tenant.
  3228. self.unparsed_config = None
  3229. # The list of projects from which we will read full
  3230. # configuration.
  3231. self.config_projects = []
  3232. # The parsed config from those projects.
  3233. self.config_projects_config = None
  3234. # The list of projects from which we will read untrusted
  3235. # in-repo configuration.
  3236. self.untrusted_projects = []
  3237. # The parsed config from those projects.
  3238. self.untrusted_projects_config = None
  3239. self.semaphore_handler = SemaphoreHandler()
  3240. # Metadata about projects for this tenant
  3241. # canonical project name -> TenantProjectConfig
  3242. self.project_configs = {}
  3243. # A mapping of project names to projects. project_name ->
  3244. # VALUE where VALUE is a further dictionary of
  3245. # canonical_hostname -> Project.
  3246. self.projects = {}
  3247. self.canonical_hostnames = set()
  3248. def _addProject(self, tpc):
  3249. """Add a project to the project index
  3250. :arg TenantProjectConfig tpc: The TenantProjectConfig (with
  3251. associated project) to add.
  3252. """
  3253. project = tpc.project
  3254. self.canonical_hostnames.add(project.canonical_hostname)
  3255. hostname_dict = self.projects.setdefault(project.name, {})
  3256. if project.canonical_hostname in hostname_dict:
  3257. raise Exception("Project %s is already in project index" %
  3258. (project,))
  3259. hostname_dict[project.canonical_hostname] = project
  3260. self.project_configs[project.canonical_name] = tpc
  3261. def getProject(self, name):
  3262. """Return a project given its name.
  3263. :arg str name: The name of the project. It may be fully
  3264. qualified (E.g., "git.example.com/subpath/project") or may
  3265. contain only the project name name may be supplied (E.g.,
  3266. "subpath/project").
  3267. :returns: A tuple (trusted, project) or (None, None) if the
  3268. project is not found or ambiguous. The "trusted" boolean
  3269. indicates whether or not the project is trusted by this
  3270. tenant.
  3271. :rtype: (bool, Project)
  3272. """
  3273. path = name.split('/', 1)
  3274. if path[0] in self.canonical_hostnames:
  3275. hostname = path[0]
  3276. project_name = path[1]
  3277. else:
  3278. hostname = None
  3279. project_name = name
  3280. hostname_dict = self.projects.get(project_name)
  3281. project = None
  3282. if hostname_dict:
  3283. if hostname:
  3284. project = hostname_dict.get(hostname)
  3285. else:
  3286. values = list(hostname_dict.values())
  3287. if len(values) == 1:
  3288. project = values[0]
  3289. else:
  3290. raise Exception("Project name '%s' is ambiguous, "
  3291. "please fully qualify the project "
  3292. "with a hostname" % (name,))
  3293. if project is None:
  3294. return (None, None)
  3295. if project in self.config_projects:
  3296. return (True, project)
  3297. if project in self.untrusted_projects:
  3298. return (False, project)
  3299. # This should never happen:
  3300. raise Exception("Project %s is neither trusted nor untrusted" %
  3301. (project,))
  3302. def getProjectsByRegex(self, regex):
  3303. """Return all projects with a full match to either project name or
  3304. canonical project name.
  3305. :arg str regex: The regex to match
  3306. :returns: A list of tuples (trusted, project) describing the found
  3307. projects. Raises an exception if the same project name is found
  3308. several times across multiple hostnames.
  3309. """
  3310. matcher = re2.compile(regex)
  3311. projects = []
  3312. result = []
  3313. for name, hostname_dict in self.projects.items():
  3314. if matcher.fullmatch(name):
  3315. # validate that this match is unambiguous
  3316. values = list(hostname_dict.values())
  3317. if len(values) > 1:
  3318. raise Exception("Project name '%s' is ambiguous, "
  3319. "please fully qualify the project "
  3320. "with a hostname. Valid hostnames "
  3321. "are %s." % (name, hostname_dict.keys()))
  3322. projects.append(values[0])
  3323. else:
  3324. # try to match canonical project names
  3325. for project in hostname_dict.values():
  3326. if matcher.fullmatch(project.canonical_name):
  3327. projects.append(project)
  3328. for project in projects:
  3329. if project in self.config_projects:
  3330. result.append((True, project))
  3331. elif project in self.untrusted_projects:
  3332. result.append((False, project))
  3333. else:
  3334. raise Exception("Project %s is neither trusted nor untrusted" %
  3335. (project,))
  3336. return result
  3337. def getProjectBranches(self, project):
  3338. """Return a project's branches (filtered by this tenant config)
  3339. :arg Project project: The project object.
  3340. :returns: A list of branch names.
  3341. :rtype: [str]
  3342. """
  3343. tpc = self.project_configs[project.canonical_name]
  3344. return tpc.branches
  3345. def getExcludeUnprotectedBranches(self, project):
  3346. # Evaluate if unprotected branches should be excluded or not. The first
  3347. # match wins. The order is project -> tenant (default is false).
  3348. project_config = self.project_configs.get(project.canonical_name)
  3349. if project_config.exclude_unprotected_branches is not None:
  3350. exclude_unprotected = project_config.exclude_unprotected_branches
  3351. else:
  3352. exclude_unprotected = self.exclude_unprotected_branches
  3353. return exclude_unprotected
  3354. def addConfigProject(self, tpc):
  3355. self.config_projects.append(tpc.project)
  3356. self._addProject(tpc)
  3357. def addUntrustedProject(self, tpc):
  3358. self.untrusted_projects.append(tpc.project)
  3359. self._addProject(tpc)
  3360. def getSafeAttributes(self):
  3361. return Attributes(name=self.name)
  3362. class Abide(object):
  3363. def __init__(self):
  3364. self.tenants = OrderedDict()
  3365. # project -> branch -> UnparsedConfig
  3366. self.unparsed_project_branch_config = {}
  3367. def cacheUnparsedConfig(self, canonical_project_name, branch, conf):
  3368. self.unparsed_project_branch_config.setdefault(
  3369. canonical_project_name, {})[branch] = conf
  3370. def getUnparsedConfig(self, canonical_project_name, branch):
  3371. return self.unparsed_project_branch_config.get(
  3372. canonical_project_name, {}).get(branch)
  3373. def clearUnparsedConfigCache(self, canonical_project_name, branch=None):
  3374. if canonical_project_name in self.unparsed_project_branch_config:
  3375. project_branch_config = \
  3376. self.unparsed_project_branch_config[canonical_project_name]
  3377. if branch in project_branch_config:
  3378. del project_branch_config[branch]
  3379. if len(project_branch_config) == 0 or branch is None:
  3380. del self.unparsed_project_branch_config[canonical_project_name]
  3381. class JobTimeData(object):
  3382. format = 'B10H10H10B'
  3383. version = 0
  3384. def __init__(self, path):
  3385. self.path = path
  3386. self.success_times = [0 for x in range(10)]
  3387. self.failure_times = [0 for x in range(10)]
  3388. self.results = [0 for x in range(10)]
  3389. def load(self):
  3390. if not os.path.exists(self.path):
  3391. return
  3392. with open(self.path, 'rb') as f:
  3393. data = struct.unpack(self.format, f.read())
  3394. version = data[0]
  3395. if version != self.version:
  3396. raise Exception("Unkown data version")
  3397. self.success_times = list(data[1:11])
  3398. self.failure_times = list(data[11:21])
  3399. self.results = list(data[21:32])
  3400. def save(self):
  3401. tmpfile = self.path + '.tmp'
  3402. data = [self.version]
  3403. data.extend(self.success_times)
  3404. data.extend(self.failure_times)
  3405. data.extend(self.results)
  3406. data = struct.pack(self.format, *data)
  3407. with open(tmpfile, 'wb') as f:
  3408. f.write(data)
  3409. os.rename(tmpfile, self.path)
  3410. def add(self, elapsed, result):
  3411. elapsed = int(elapsed)
  3412. if result == 'SUCCESS':
  3413. self.success_times.append(elapsed)
  3414. self.success_times.pop(0)
  3415. result = 0
  3416. else:
  3417. self.failure_times.append(elapsed)
  3418. self.failure_times.pop(0)
  3419. result = 1
  3420. self.results.append(result)
  3421. self.results.pop(0)
  3422. def getEstimatedTime(self):
  3423. times = [x for x in self.success_times if x]
  3424. if times:
  3425. return float(sum(times)) / len(times)
  3426. return 0.0
  3427. class TimeDataBase(object):
  3428. def __init__(self, root):
  3429. self.root = root
  3430. def _getTD(self, build):
  3431. if hasattr(build.build_set.item.change, 'branch'):
  3432. branch = build.build_set.item.change.branch
  3433. else:
  3434. branch = ''
  3435. dir_path = os.path.join(
  3436. self.root,
  3437. build.build_set.item.pipeline.tenant.name,
  3438. build.build_set.item.change.project.canonical_name,
  3439. branch)
  3440. if not os.path.exists(dir_path):
  3441. os.makedirs(dir_path)
  3442. path = os.path.join(dir_path, build.job.name)
  3443. td = JobTimeData(path)
  3444. td.load()
  3445. return td
  3446. def getEstimatedTime(self, name):
  3447. return self._getTD(name).getEstimatedTime()
  3448. def update(self, build, elapsed, result):
  3449. td = self._getTD(build)
  3450. td.add(elapsed, result)
  3451. td.save()
  3452. class Capabilities(object):
  3453. """The set of capabilities this Zuul installation has.
  3454. Some plugins add elements to the external API. In order to
  3455. facilitate consumers knowing if functionality is available
  3456. or not, keep track of distinct capability flags.
  3457. """
  3458. def __init__(self, job_history=False):
  3459. self.job_history = job_history
  3460. def __repr__(self):
  3461. return '<Capabilities 0x%x %s>' % (id(self), self._renderFlags())
  3462. def _renderFlags(self):
  3463. d = self.toDict()
  3464. return " ".join(['{k}={v}'.format(k=k, v=v) for (k, v) in d.items()])
  3465. def copy(self):
  3466. return Capabilities(**self.toDict())
  3467. def toDict(self):
  3468. d = dict()
  3469. d['job_history'] = self.job_history
  3470. return d
  3471. class WebInfo(object):
  3472. """Information about the system needed by zuul-web /info."""
  3473. def __init__(self, websocket_url=None,
  3474. capabilities=None, stats_url=None,
  3475. stats_prefix=None, stats_type=None):
  3476. self.capabilities = capabilities or Capabilities()
  3477. self.stats_prefix = stats_prefix
  3478. self.stats_type = stats_type
  3479. self.stats_url = stats_url
  3480. self.tenant = None
  3481. self.websocket_url = websocket_url
  3482. def __repr__(self):
  3483. return '<WebInfo 0x%x capabilities=%s>' % (
  3484. id(self), str(self.capabilities))
  3485. def copy(self):
  3486. return WebInfo(
  3487. capabilities=self.capabilities.copy(),
  3488. stats_prefix=self.stats_prefix,
  3489. stats_type=self.stats_type,
  3490. stats_url=self.stats_url,
  3491. websocket_url=self.websocket_url)
  3492. @staticmethod
  3493. def fromConfig(config):
  3494. return WebInfo(
  3495. stats_prefix=get_default(config, 'statsd', 'prefix'),
  3496. stats_type=get_default(config, 'web', 'stats_type', 'graphite'),
  3497. stats_url=get_default(config, 'web', 'stats_url', None),
  3498. websocket_url=get_default(config, 'web', 'websocket_url', None),
  3499. )
  3500. def toDict(self):
  3501. d = dict()
  3502. d['capabilities'] = self.capabilities.toDict()
  3503. d['websocket_url'] = self.websocket_url
  3504. stats = dict()
  3505. stats['prefix'] = self.stats_prefix
  3506. stats['type'] = self.stats_type
  3507. stats['url'] = self.stats_url
  3508. d['stats'] = stats
  3509. if self.tenant:
  3510. d['tenant'] = self.tenant
  3511. return d