master.cfg (52748B)
1 # -*- python -*- 2 # ex: set syntax=python: 3 4 ## 5 # This file is part of TALER 6 # (C) 2016-2025 Taler Systems SA 7 # 8 # TALER is free software; you can redistribute it and/or 9 # modify it under the terms of the GNU Affero General Public 10 # License as published by the Free Software Foundation; either 11 # version 3, or (at your option) any later version. 12 # 13 # TALER is distributed in the hope that it will be useful, 14 # but WITHOUT ANY WARRANTY; without even the implied warranty 15 # of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 16 # See the GNU General Public License for more details. 17 # 18 # You should have received a copy of the GNU General Public 19 # License along with TALER; see the file COPYING. If not, 20 # see <http://www.gnu.org/licenses/> 21 # 22 # @author Florian Dold 23 # @author Marcello Stanisci 24 # @author ng0 25 # @author Christian Grothoff 26 # @author Devan Carpenter 27 import ast 28 import configparser 29 import glob 30 import os 31 import pathlib 32 import pwd 33 import re 34 import subprocess 35 36 from buildbot.changes.pb import PBChangeSource 37 from buildbot.steps.source.git import Git 38 from buildbot.steps.shell import ShellCommand 39 from buildbot.plugins import changes 40 from buildbot.plugins import reporters 41 from buildbot.plugins import schedulers 42 from buildbot.plugins import steps 43 from buildbot.plugins import secrets, util 44 from buildbot.process import buildstep, logobserver 45 from buildbot.process.results import SKIPPED 46 from buildbot.reporters.generators.build import BuildStatusGenerator 47 from buildbot.worker import Worker 48 from twisted.internet import defer 49 50 # This is a sample buildmaster config file. It must be 51 # installed as 'master.cfg' in your buildmaster's base 52 # directory. 53 54 # This file has the following structure: 55 # - Globals: definition of global variables we use throughout 56 # + Convenience functions: helper functions useful for many jobs 57 # - Jobs: actual job definitions 58 # - General purpose: triggers and alerts shared by various jobs 59 # + general purpose notification (alerts) 60 # + general purpose triggers (schedulers) 61 # - Actual buildbot configuration object initialization 62 63 ################################################################# 64 ######################### GLOBALS ############################### 65 ################################################################# 66 67 # The 'workers' list defines the set of recognized workers. 68 # Each element is a Worker object, specifying a unique worker 69 # name and password. The same worker name and password must 70 # be configured on the worker. 71 WORKERS = [] 72 73 # 'services' is a list of BuildbotService items like reporter 74 # targets. The status of each build will be pushed to these 75 # targets. buildbot/reporters/*.py has a variety to choose from, 76 # like IRC bots. 77 78 79 class MessageFormatterWithStdout(reporters.MessageFormatter): 80 def buildAdditionalContext(self, master, ctx): 81 stdout = [] 82 for step in ctx["build"]["steps"]: 83 for log in step["logs"]: 84 all_logs = log["content"]["content"].splitlines() 85 # Including only what the script printed on stdout. 86 for line in all_logs: 87 if re.search("^o", line): 88 stdout.append(line[1:]) 89 ctx.update(dict(stdout="\n".join(stdout))) 90 91 92 ##################################################### 93 # Commit message triggers # 94 # # 95 # This checks for triggers in the commit messages # 96 # !tarball will trigger a release build # 97 # !coverity will trigger a coverity check # 98 ##################################################### 99 def checkForTarballTrigger(change): 100 if "!tarball" in change.comments: 101 return True 102 return False 103 def checkForCoverityTrigger(change): 104 if "!coverity" in change.comments: 105 return True 106 107 SERVICES = [] 108 109 # The 'builders' list defines the Builders, which tell Buildbot 110 # how to perform a build: what steps, and which workers can execute 111 # them. Note that any particular build will only take place on 112 # one worker. 113 BUILDERS = [] 114 115 # Configures the Schedulers, which decide how to react to incoming 116 # changes. 117 SCHEDULERS = [] 118 119 # Array of builders to be scheduled every night. 120 NIGHTLY_TRIGGERS=[] 121 122 # Array of builders to be scheduled whenever any of the code Git repos change 123 CODECHANGE_TRIGGERS = [] 124 125 # Array of builders to be scheduled whenever the taler-typescript-core or 126 # taler-deployment change 127 WALLETCHANGE_TRIGGERS = [] 128 129 # Array of builder names for which build status reports should be sent 130 # via e-mail 131 EMAIL_ALERTS = [] 132 133 # Array of email address for which build status reports shoudl be sent 134 BUILDER_EMAIL_ADDRESSES = [] 135 136 ############ Convenience functions ################# 137 138 # Create a FACTORY with a taler-deployment.git checkout as the first step. 139 def create_factory_with_deployment(): 140 f = util.BuildFactory() 141 update_deployment(f) 142 return f 143 144 # Convenience function that checks out a Git repository. 145 # First argument is the URL of the Git to clone, second 146 # the desired branch. Default is 'master'. 147 def git_step(repo, target_branch="master"): 148 return Git( 149 repourl=repo, 150 mode="full", 151 method="fresh", 152 logEnviron=False, 153 alwaysUseLatest=True, 154 haltOnFailure=True, 155 branch=target_branch 156 ) 157 158 # FIXME: document this function! 159 def add_default_step(bldr, step): 160 worker_script = ".buildbot/%(prop:workername)s_" + step + ".sh" 161 default_script = ".buildbot/" + step + ".sh" 162 cmd = '[ -f %s ] && ls -1 %s || exit 0' % (worker_script,worker_script) 163 bldr.addStep(steps.SetPropertyFromCommand(command=util.Interpolate(cmd), 164 hideStepIf=True, 165 property='buildbotScript_%s' % step, 166 name="Checking for worker-specific %s script" % step)) 167 cmd = '[ -f %s ] && ls -1 %s || exit 0' % (default_script,default_script) 168 bldr.addStep(steps.SetPropertyFromCommand(doStepIf=(util.Property('buildbotScript_%s' % step) != util.Interpolate(worker_script)), 169 hideStepIf=True, 170 command=util.Interpolate(cmd), 171 property='buildbotScript_%s' % step, 172 name="Checking for %s script" % step)) 173 bldr.addStep(steps.ShellCommand(command=util.Property('buildbotScript_%s' % step), 174 haltOnFailure=True, 175 env={'PATH': "${HOME}/.local/bin:$HOME/bin:${PATH}"}, 176 doStepIf=(util.Property('buildbotScript_%s' % step) != None), 177 hideStepIf=lambda results, s: results==SKIPPED, 178 name="Executing %s step" % step)) 179 180 181 182 # Convenience function that runs 'make check' in a 183 # directory of the code inside of a netjail. 184 def jailed_check(package, srcdirs): 185 return steps.ShellSequence( 186 name="Tests of " + package, 187 description="Testing " + package, 188 descriptionDone="Pass", 189 commands=map(lambda srcdir: util.ShellArg(command=["sudo", "/usr/local/bin/netjail.sh", "/home/container-worker/taler-deployment/buildbot/with-postgres.sh", "bash", "-c", "'cd src/"+srcdir+" make install check'"]), srcdirs), 190 env={'PATH': "${HOME}/local/bin:${PATH}"}, 191 workdir="../../sources/" + package 192 ) 193 194 # Convenience function that checks out the deployment. 195 def update_deployment(factory): 196 factory.addStep(steps.ShellSequence( 197 name="update taler-deployment", 198 description="removing old deployment and fetching fresh repository", 199 descriptionDone="Deployment updated", 200 commands=[ 201 util.ShellArg(command=["rm", "-rf", "taler-deployment"]), 202 util.ShellArg(command=["git", "clone", "git://git.taler.net/taler-deployment"]), 203 ], 204 haltOnFailure=True, 205 workdir="../.." 206 )) 207 208 # Convenience function that builds and runs a container. 209 def container_add_step(HALT_ON_FAILURE, 210 WARN_ON_FAILURE, 211 CONTAINER_BUILD, 212 CONTAINER_NAME, 213 factory, 214 WORK_DIR, 215 repoName, 216 stepName, 217 CONTAINER_ARCH="amd64", 218 jobCmd="/workdir/contrib/ci/ci.sh", 219 containerFile="contrib/ci/Containerfile"): 220 print(f"HALT_ON_FAILURE: {HALT_ON_FAILURE}, WARN_ON_FAILURE: {WARN_ON_FAILURE}, CONTAINER_BUILD: {CONTAINER_BUILD}, CONTAINER_NAME: {CONTAINER_NAME}") 221 222 runCommand = ["podman", "--transient-store", "run", "--rm", 223 "--arch", CONTAINER_ARCH, 224 "--log-driver=none", 225 "--add-host", "taler.host.internal:10.0.2.2", 226 "--network", "slirp4netns:allow_host_loopback=true", 227 "--env", util.Interpolate("CI_COMMIT_REF=%(prop:got_revision:-%(src::revision:-unknown)s)s"), 228 "--env", util.Interpolate("CI_GIT_BRANCH=%(src::branch)s"), 229 "--env", util.Interpolate("CI_PROJECT_NAME=%(src::project)s"), 230 "--cap-add", "SYS_ADMIN,CAP_SYS_CHROOT", 231 "--volume", f"{WORK_DIR}:/workdir", 232 "--volume", "/home/container-worker/ephemeral_ci_artifacts:/artifacts", 233 "--volume", "/home/container-worker/persistent_ci_artifacts:/persistent_artifacts", 234 "--volume", "/home/container-worker/mounted_files/ci_container_id_ed25519:/root/.ssh/id_ed25519:ro", 235 "--volume", "/home/container-worker/mounted_files/container_known_hosts:/root/.ssh/known_hosts:ro", 236 "--workdir", "/workdir"] 237 238 # Inputs directory 239 inputs_path = f"/home/container-worker/container_inputs/{repoName}" 240 print(f"Checking that {inputs_path} exists") 241 if os.path.isdir(inputs_path): 242 print(f"Adding {inputs_path}") 243 runCommand += ["--volume", f"{inputs_path}:/inputs:ro"] 244 else: 245 print(f"Inputs directory not found at {inputs_path}") 246 247 if CONTAINER_BUILD: 248 runCommand += ["--volume", f"/run/user/{pwd.getpwnam('container-worker').pw_uid}/podman/podman.sock:/run/podman/podman.sock", 249 "--security-opt", "label=disable"] 250 251 runCommand += [CONTAINER_NAME, jobCmd] 252 253 runArg = util.ShellArg(command=runCommand, 254 logname='run inside container', 255 warnOnFailure=WARN_ON_FAILURE, 256 haltOnFailure=HALT_ON_FAILURE) 257 258 buildArg = util.ShellArg(command=["podman", "build", "-t", CONTAINER_NAME, 259 "--arch", CONTAINER_ARCH, 260 "-f", containerFile, "."], 261 logname='build container', haltOnFailure=True) 262 263 if not CONTAINER_BUILD: 264 return steps.ShellSequence( 265 name=stepName, 266 commands=[runArg], 267 haltOnFailure=HALT_ON_FAILURE, 268 warnOnFailure=WARN_ON_FAILURE, 269 workdir=WORK_DIR 270 ) 271 else: 272 return steps.ShellSequence( 273 name=stepName, 274 commands=[buildArg, runArg], 275 haltOnFailure=HALT_ON_FAILURE, 276 warnOnFailure=WARN_ON_FAILURE, 277 workdir=WORK_DIR 278 ) 279 280 ################################################################## 281 ######################## JOBS #################################### 282 ################################################################## 283 284 # For every job, we have (in this order!): 285 # - worker(s): hosts/users that run the job 286 # - factory: list of steps that define what to do 287 # - builder: gives the job a name and binds it to the factory and worker 288 # - (OPTIONAL) alerts: notifications to trigger when the job fails 289 # Pre-defined: EMAIL_ALERTS 290 # - scheduler: rules that define when to run the job 291 # Pre-defined: NIGHTLY_TRIGGERS, CODECHANGE_TRIGGERS, WALLETCHANGE_TRIGGERS 292 293 ################ 1: BUILDMASTER JOB ################################### 294 295 ## 296 # This worker restarts the buildmaster itself on 297 # changes to this file. 298 # Location: /home/buildbot-master 299 WORKERS.append(Worker("buildmaster-worker", "buildmaster-pass")) 300 301 BUILDMASTER_FACTORY = create_factory_with_deployment() 302 BUILDMASTER_FACTORY.addStep( 303 ShellCommand( 304 name="restart buildmaster", 305 description="trigger buildmaster restart with new configuration", 306 descriptionDone="Buildmaster updated", 307 command=["pkill", "-HUP", "-A", "-u", "buildbot-master", "-f", "/usr/bin/python3"], 308 workdir="../.." 309 ) 310 ) 311 312 BUILDERS.append(util.BuilderConfig( 313 name="buildmaster-builder", 314 workernames=["buildmaster-worker"], 315 factory=BUILDMASTER_FACTORY 316 )) 317 318 EMAIL_ALERTS.append("buildmaster-builder") 319 320 # Buildmaster is notified whenever deployment.git changes 321 SCHEDULERS.append(schedulers.SingleBranchScheduler( 322 name="buildmaster-scheduler", 323 change_filter=util.ChangeFilter( 324 branch="master", 325 project_re="(taler-deployment)" 326 ), 327 treeStableTimer=None, 328 builderNames=["buildmaster-builder"] 329 )) 330 331 ################ 2: WEBSITE JOB ################################### 332 333 ## 334 # This worker builds Websites: www and stage. 335 # 336 WORKERS.append(Worker("sites-worker", "sites-pass")) 337 338 SITES_FACTORY = create_factory_with_deployment() 339 SITES_FACTORY.addStep( 340 ShellCommand( 341 name="build Web sites", 342 description="Building all the Taler homepages", 343 descriptionDone="Sites built.", 344 command=["./build-sites.sh"], 345 workdir="../../taler-deployment/worker-sites", 346 haltOnFailure=True 347 ) 348 ) 349 350 BUILDERS.append(util.BuilderConfig( 351 name="sites-builder", workernames=["sites-worker"], factory=SITES_FACTORY 352 )) 353 354 #EMAIL_ALERTS.append("sites-builder") 355 356 357 # The web page changed if 'taler-www', 'taler-tutorials', 358 # 'taler-docs' or 'twister' changed 359 def web_page(change): 360 _change = change.asDict() 361 repo = _change.get("project") 362 if repo in ["taler-docs", "taler-tutorials", "taler-www", "twister", "taler-deployment", "buywith", "taler-ops-www", "taler-systems-www", "anastasis-www"]: 363 return True 364 return False 365 366 367 # Sites are re-build whenever taler-deployment, taler-www, or twister changes. 368 SCHEDULERS.append(schedulers.SingleBranchScheduler( 369 name="sites-scheduler", 370 builderNames=["sites-builder"], 371 change_filter=util.ChangeFilter( 372 branch_re="(master|stable)", 373 filter_fn=web_page 374 ), 375 treeStableTimer=None 376 )) 377 378 379 ################ 3: 'check links' JOB ################################### 380 381 ## 382 # linkchecker worker checks for dead links in the Website 383 # Location: /home/linkchecker-worker 384 WORKERS.append(Worker("linkchecker-worker", "linkchecker-pass")) 385 386 # linkchecker FACTORY 387 LINKCHECKER_FACTORY = create_factory_with_deployment() 388 LINKCHECKER_FACTORY.addStep( 389 ShellCommand( 390 name="linkchecker", 391 description="Check taler.net website for broken links && Notify", 392 descriptionDone="Results of wget in buildbot logs.", 393 command=["/home/linkchecker-worker/taler-deployment/worker-linkchecker/linkchecker.sh"], 394 workdir="/home/linkchecker-worker", 395 haltOnFailure=True, 396 timeout=7200 # 2 hours 397 ) 398 ) 399 400 # linkchecker BUILDER 401 # worker at linkchecker@taler.net 402 BUILDERS.append(util.BuilderConfig( 403 name="linkchecker-builder", 404 workernames="linkchecker-worker", 405 factory=LINKCHECKER_FACTORY 406 )) 407 408 docs_generator = BuildStatusGenerator( 409 mode=('change', 'problem', 'failing', 'exception',), 410 builders=[ 411 'linkchecker-builder', 412 ], 413 message_formatter=reporters.MessageFormatter( 414 template_type='plain', 415 want_logs_content=True 416 ) 417 ) 418 419 420 SERVICES.append(reporters.MailNotifier( 421 fromaddr="bb@taler.net", 422 generators=[docs_generator], 423 sendToInterestedUsers=False, 424 useTls=False, 425 relayhost="localhost", 426 smtpPort=25, 427 dumpMailsToLog=True, 428 extraRecipients=['linkcheck@taler.net'] 429 )) 430 431 # SERVICES.append(tipReserveEmails) 432 433 NIGHTLY_TRIGGERS.append("linkchecker-builder") 434 435 #################### 436 ## GNUnet workers ## 437 #################### 438 439 WORKERS.append(Worker("firefly-x86_64-amdepyc", "pass")) 440 WORKERS.append(Worker("mp-amd64-openbsd", "Tai7zeic")) 441 WORKERS.append(Worker("schanzen-aarch64-fedora-meson", "eWi9keet")) 442 443 444 SCHEDULERS.append(schedulers.AnyBranchScheduler( 445 name="gnunet", 446 change_filter=util.ChangeFilter(branch_re='master', 447 repository='git://git.gnunet.org/gnunet.git'), 448 treeStableTimer=None, 449 builderNames=["gnunet-debian-x86_64", 450 "gnunet-fedora-aarch64"])) 451 452 SCHEDULERS.append(schedulers.AnyBranchScheduler( 453 name="gnunet-dev", 454 change_filter=util.ChangeFilter(branch_re='dev/.+', 455 repository='git://git.gnunet.org/gnunet.git'), 456 treeStableTimer=None, 457 builderNames=["gnunet-debian-x86_64-dev", 458 "gnunet-fedora-aarch64-dev"])) 459 460 SCHEDULERS.append(schedulers.SingleBranchScheduler( 461 name="tagged_release", 462 change_filter=util.ChangeFilter(branch_re='.*v[0-9]*[.][0-9]*[.][0-9]*$', 463 repository='git://git.gnunet.org/gnunet.git'), 464 treeStableTimer=None, 465 builderNames=["gnunet_release"])) 466 467 SCHEDULERS.append(schedulers.SingleBranchScheduler( 468 name="tarball", 469 onlyImportant=True, 470 change_filter=util.ChangeFilter(branch='master', 471 repository='git://git.gnunet.org/gnunet.git'), 472 fileIsImportant=checkForTarballTrigger, 473 treeStableTimer=None, 474 builderNames=["gnunet_release"])) 475 476 477 # Build a tarball nightly 478 SCHEDULERS.append(schedulers.Nightly(name='nightly', 479 change_filter=util.ChangeFilter(branch='master', 480 repository='git://git.gnunet.org/gnunet.git'), 481 builderNames=['gnunet_release'], 482 onlyIfChanged=True, 483 hour=6, minute=0)) 484 485 SCHEDULERS.append(schedulers.SingleBranchScheduler( 486 name="gnunet_cov", 487 onlyImportant=True, 488 change_filter=util.ChangeFilter(branch='master', 489 repository='git://git.gnunet.org/gnunet.git'), 490 fileIsImportant=checkForCoverityTrigger, 491 treeStableTimer=None, 492 builderNames=["gnunet_coverity"])) 493 494 SCHEDULERS.append(schedulers.SingleBranchScheduler( 495 name="gnunet_rpm", 496 change_filter=util.ChangeFilter(branch='dev/schanzen/copr', 497 repository='git://git.gnunet.org/gnunet-rpm.git'), 498 treeStableTimer=None, 499 builderNames=["gnunet_rpm_copr"])) 500 SCHEDULERS.append(schedulers.SingleBranchScheduler( 501 name="registrar_scheduler", 502 change_filter=util.ChangeFilter(branch='master', 503 repository='git://git.gnunet.org/gnunet-gns-registrar.git'), 504 treeStableTimer=None, 505 builderNames=["gnunet-gns-registrar"])) 506 507 508 # Schedule a coverity pass monthly 509 SCHEDULERS.append(schedulers.Nightly(name='nightly_cov', 510 change_filter=util.ChangeFilter(branch='master', 511 repository='git://git.gnunet.org/gnunet.git'), 512 builderNames=['gnunet_coverity'], 513 onlyIfChanged=True, 514 dayOfMonth=0, hour=3, minute=0)) 515 516 517 518 519 # 520 # WEBSITES 521 # 522 SCHEDULERS.append(schedulers.SingleBranchScheduler( 523 name="www_master_scheduler", 524 change_filter=util.ChangeFilter(branch='master', 525 repository='git://git.gnunet.org/www.git'), 526 treeStableTimer=None, 527 builderNames=["stage.gnunet.org"])) 528 529 SCHEDULERS.append(schedulers.SingleBranchScheduler( 530 name="www_stable_scheduler", 531 change_filter=util.ChangeFilter(branch='stable', 532 repository='git://git.gnunet.org/www.git'), 533 treeStableTimer=None, 534 builderNames=["www.gnunet.org"])) 535 SCHEDULERS.append(schedulers.SingleBranchScheduler( 536 name="bib_scheduler", 537 change_filter=util.ChangeFilter(branch='master', 538 repository='git://git.gnunet.org/gnunetbib.git'), 539 treeStableTimer=None, 540 builderNames=["bib.gnunet.org"])) 541 SCHEDULERS.append(schedulers.SingleBranchScheduler( 542 name="reclaim_www_scheduler", 543 change_filter=util.ChangeFilter(branch='master', 544 repository='git://git.gnunet.org/www-reclaim.git'), 545 treeStableTimer=None, 546 builderNames=["reclaim.gnunet.org"])) 547 SCHEDULERS.append(schedulers.SingleBranchScheduler( 548 name="rest_scheduler", 549 change_filter=util.ChangeFilter(branch='master', 550 repository='git://git.gnunet.org/gnunet-rest-api.git'), 551 treeStableTimer=None, 552 builderNames=["rest.gnunet.org"])) 553 SCHEDULERS.append(schedulers.SingleBranchScheduler( 554 name="lsd_scheduler", 555 change_filter=util.ChangeFilter(branch='master', 556 repository_re='git://git.gnunet.org/lsd.*'), 557 treeStableTimer=None, 558 builderNames=["lsd.gnunet.org"])) 559 SCHEDULERS.append(schedulers.SingleBranchScheduler( 560 name="gana_scheduler", 561 change_filter=util.ChangeFilter(branch='master', 562 repository='git://git.gnunet.org/gana.git'), 563 treeStableTimer=None, 564 builderNames=["gana.gnunet.org"])) 565 SCHEDULERS.append(schedulers.SingleBranchScheduler( 566 name="doc_scheduler", 567 change_filter=util.ChangeFilter(branch='master', 568 repository='git://git.gnunet.org/gnunet-handbook.git'), 569 treeStableTimer=None, 570 builderNames=["doc.gnunet.org"])) 571 572 573 # 574 # Buildbot self-reconfigure 575 # 576 SCHEDULERS.append(schedulers.SingleBranchScheduler( 577 name="bb_reload_scheduler", 578 change_filter=util.ChangeFilter(branch='master', 579 repository='git://git.gnunet.org/buildbot-ci.git'), 580 treeStableTimer=None, 581 builderNames=["buildbot"])) 582 583 584 585 def add_default_pipeline(bldr): 586 add_default_step(bldr, "build") 587 add_default_step(bldr, "install") 588 add_default_step(bldr, "test") 589 add_default_step(bldr, "deploy") 590 591 592 factory = util.BuildFactory() 593 594 595 ########################### 596 # GNUnet # 597 ########################### 598 gnunet_make_step = steps.ShellSequence( 599 name=util.Interpolate("GNUnet build"), 600 env={'GNUNET_PREFIX': '/tmp/gnunet-buildbot/lib', 601 'PATH': ["/tmp/gnunet-buildbot/bin", "${PATH}"], 602 'TMPDIR': '/tmp/gnunet/'}, 603 haltOnFailure=True, 604 commands=[ 605 util.ShellArg(command=['./bootstrap'], logname='bootstrap', haltOnFailure=True), 606 util.ShellArg(command=['./configure', 607 "--prefix=/tmp/gnunet-buildbot", 608 "--enable-experimental", 609 "--enable-logging=verbose"], 610 logname='configure', 611 haltOnFailure=True), 612 util.ShellArg(command=['make'], 613 logname='make', 614 haltOnFailure=True), 615 util.ShellArg(command=['make', 'install'], 616 logname='make install', 617 haltOnFailure=True), 618 #util.ShellArg(command=['sudo', '/home/buildbot/bin/netjail.sh', 'bash', '-c', "'make check'"], 619 # logname='make check', 620 # warnOnFailure=True, 621 # flunkOnFailure=False), # make check has issues. 622 util.ShellArg(command=['make', 'uninstall'], 623 logname='make uninstall', 624 haltOnFailure=True)] 625 ) 626 627 gnunet_build_steps = [ 628 steps.Git(repourl='git://git.gnunet.org/gnunet.git', 629 mode='full', method='fresh'), 630 gnunet_make_step 631 ] 632 633 factory.addSteps(gnunet_build_steps) 634 635 ############################ 636 # COVERITY # 637 # Occurs: 1st day of month # 638 ############################ 639 cov_factory = util.BuildFactory() 640 cov_factory.addStep(steps.Git(repourl='git://git.gnunet.org/gnunet.git', mode='full', method='fresh')) 641 cov_factory.addStep(steps.ShellSequence( 642 name=util.Interpolate("Git rev. %(prop:got_revision)s build"), 643 env={'PATH': "${HOME}/.local/bin:${HOME}/bin:${PATH}"}, 644 commands=[ 645 util.ShellArg(command=['./bootstrap'], logname='bootstrap'), 646 util.ShellArg(command=['./configure', 647 "--prefix=/tmp/gnunet-buildbot", 648 "--enable-experimental=true"], 649 logname="configure"), 650 util.ShellArg(command=['cov-build', '--dir', 'cov-int', 'make'], logname='cov-build'), 651 ])) 652 cov_factory.addStep(steps.ShellCommand(command=['tar', 'czf', 'coverity.tar.gz', 'cov-int/'], 653 haltOnFailure=True, 654 name="Packing up")) 655 cov_factory.addStep(steps.ShellCommand(command=['curl', 656 '--form', util.Interpolate('token=%(secret:coverity_token)s'), 657 '--form', 'email=mschanzenbach@posteo.de', 658 '--form', 'version="git master"', 659 '--form', 'file=@./coverity.tar.gz', 660 '--form', 'description="Buildbot triggered build"', 661 'https://scan.coverity.com/builds?project=GNUnet%2Fgnunet'], 662 haltOnFailure=True, 663 name="Sending")) 664 665 666 ################################################# 667 # RELEASE BUILD # 668 # Occurs: # 669 # 1. Nightly # 670 # 2. Upon a pushed release tag (vX.Y.Z) # 671 # 3. With a commit message containing !tarball # 672 ################################################# 673 dist_factory = util.BuildFactory() 674 # https://github.com/buildbot/buildbot/issues/6539 change to "copy" when fixed 675 dist_factory.addStep(steps.Git(repourl='git://git.gnunet.org/gnunet.git', mode='full', method='clobber')) 676 677 dist_factory.addStep(steps.ShellSequence( 678 name=util.Interpolate("Git rev. %(prop:got_revision)s build"), 679 haltOnFailure=True, 680 commands=[ 681 util.ShellArg(command=['./bootstrap'], 682 logname='bootstrap', 683 haltOnFailure=True), 684 util.ShellArg(command=['./configure'], 685 logname='configure', 686 haltOnFailure=True), 687 util.ShellArg(command=['git', 'status'], 688 logname='status before dist', 689 haltOnFailure=True), 690 util.ShellArg(command=['make', 'dist'], 691 logname="dist", 692 haltOnFailure=True), 693 util.ShellArg(command=['make', 'doxygen'], 694 haltOnFailure=True, 695 logname="Doxygen") 696 ])) 697 698 # Get version number of tarball 699 cmdmeson = r'ls -1 build/meson-dist/gnunet-*.tar.gz | sed "s/build\/meson-dist\/gnunet-//" | sed "s/.tar.gz//" | tail -n1' 700 #cmd = 'git describe --tags | sed "s/^v//"' 701 dist_factory.addStep(steps.SetPropertyFromCommand(hideStepIf=False, 702 command=cmdmeson, 703 property='gnunet_meson_releasever', 704 name="Getting release version")) 705 dist_factory.addStep(steps.ShellCommand(command=["tar", 706 "xf", 707 util.Interpolate('build/meson-dist/gnunet-%(prop:gnunet_meson_releasever)s.tar.gz'), 708 '-C', 'build'], 709 haltOnFailure=True, 710 name="Extracting tarball")) 711 712 # Make doxygen 713 # Try to build dist package 714 dist_factory.addStep(steps.ShellSequence( 715 workdir=util.Interpolate('build/build/gnunet-%(prop:gnunet_meson_releasever)s'), 716 name=util.Interpolate("GNUnet %(prop:gnunet_meson_releasever)s tarball build"), 717 env={'GNUNET_PREFIX': '/tmp/gnunet-buildbot/lib', 718 'PATH': ["/tmp/gnunet-buildbot/bin", "${PATH}"]}, 719 commands=[ 720 util.ShellArg(command=['mkdir', '-p', '$TMPDIR'],logname='tmpdir',haltOnFailure=True), 721 util.ShellArg(command=['./configure', 722 "--prefix=/tmp/gnunet-buildbot", 723 "--enable-experimental=true", 724 "--enable-logging=verbose", 725 "--mesonbuilddir=tarball_build"], 726 logname='setup', 727 haltOnFailure=True), 728 util.ShellArg(command=['make'], 729 logname='compile', 730 haltOnFailure=True), 731 util.ShellArg(command=['make', 'install'], 732 logname='install', 733 haltOnFailure=True), 734 util.ShellArg(command=['make', 'uninstall'], 735 logname='uninstall', 736 haltOnFailure=True)] 737 )) 738 739 # Upload artifact to https://buildbot.gnunet.org/artifacts 740 dist_factory.addStep(steps.FileUpload(workersrc=util.Interpolate('build/meson-dist/gnunet-%(prop:gnunet_meson_releasever)s.tar.gz'), 741 mode=0o644, 742 masterdest=util.Interpolate("~/artifacts/gnunet-%(prop:gnunet_meson_releasever)s.tar.gz"), 743 url=util.Interpolate("https://buildbot.gnunet.org/artifacts/gnunet-%(prop:gnunet_meson_releasever)s.tar.gz"))) 744 745 746 # Update doxygen (TODO skit on nightly?) 747 dist_factory.addStep(steps.ShellSequence( 748 name=util.Interpolate("Deploy Doxygen"), 749 workdir=util.Interpolate('build/doc'), 750 commands=[ 751 util.ShellArg(command=['chmod', '-R', 'ag+rX', '../doc'], 752 logname='Permissions', 753 haltOnFailure=True), 754 util.ShellArg(command=['rsync', '-a', '--delete', 755 '../doc/doxygen', 756 'handbook@firefly.gnunet.org:~/doc_deployment/'], 757 logname='Deploy', 758 haltOnFailure=True), 759 ])) 760 761 762 763 764 ########################### 765 # Fedora COPR build # 766 ########################### 767 copr_factory = util.BuildFactory() 768 copr_factory.addStep(steps.ShellCommand(command=["curl", 769 "-H", "Content-Type: application/json", 770 "--data", "{}", 771 "-X", "POST", 772 "https://copr.fedorainfracloud.org/webhooks/custom/36992/d316c169-1482-4508-a765-cfb5c0efeb67/gnunet/"], 773 haltOnFailure=True, 774 name="Triggering Copr build")) 775 776 777 ########################### 778 # gnunet-gns-registrar # 779 ########################### 780 registrar_factory = util.BuildFactory() 781 registrar_factory.addStep(steps.Git(repourl='git://git.gnunet.org/gnunet-gns-registrar.git', mode='incremental')) 782 add_default_pipeline(registrar_factory) 783 784 ########################### 785 # stage.gnunet.org # 786 ########################### 787 www_factory = util.BuildFactory() 788 www_factory.addStep(steps.Git(repourl='git://git.gnunet.org/www.git', mode='incremental')) 789 add_default_pipeline(www_factory) 790 791 ########################### 792 # www.gnunet.org # 793 ########################### 794 www_stable_factory = util.BuildFactory() 795 www_stable_factory.addStep(steps.Git(repourl='git://git.gnunet.org/www.git', mode='incremental', branch='stable')) 796 add_default_pipeline(www_stable_factory) 797 798 ########################### 799 # bib.gnunet.org # 800 ########################### 801 bib_factory = util.BuildFactory() 802 bib_factory.addStep(steps.Git(repourl='git://git.gnunet.org/gnunetbib.git', mode='incremental')) 803 add_default_pipeline(bib_factory) 804 805 806 ########################### 807 # reclaim.gnunet.org # 808 ########################### 809 reclaim_www_stable_factory = util.BuildFactory() 810 reclaim_www_stable_factory.addStep(steps.Git(repourl='git://git.gnunet.org/www-reclaim.git', mode='incremental', branch='master')) 811 add_default_pipeline(reclaim_www_stable_factory) 812 813 814 ########################### 815 # rest.gnunet.org # 816 ########################### 817 rest_factory = util.BuildFactory() 818 rest_factory.addStep(steps.Git(repourl='git://git.gnunet.org/gnunet-rest-api.git', mode='incremental', branch='master')) 819 add_default_pipeline(rest_factory) 820 821 ########################### 822 # lsd.gnunet.org # 823 ########################### 824 lsd_factory = util.BuildFactory() 825 lsd_factory.addStep(steps.Git(repourl=util.Property('repository'), mode='full', method="clobber", branch='master')) 826 add_default_pipeline(lsd_factory) 827 828 ########################### 829 # gana.gnunet.org # 830 ########################### 831 gana_factory = util.BuildFactory() 832 gana_factory.addStep(steps.Git(repourl='git://git.gnunet.org/gana.git', mode='full', method="fresh", branch='master')) 833 add_default_pipeline(gana_factory) 834 835 ############################## 836 # doc.gnunet.org (Doc-NG) # 837 ############################## 838 doc_factory = util.BuildFactory() 839 doc_factory.addStep(steps.Git(repourl='git://git.gnunet.org/gnunet-handbook.git', alwaysUseLatest=True, mode='full', method="clobber", branch='master')) 840 add_default_pipeline(doc_factory) 841 842 843 844 ########################### 845 # Buildbot TODO delete at some point # 846 ########################### 847 bb_factory = util.BuildFactory() 848 bb_factory.addStep(steps.Git(repourl='ssh://git@git.gnunet.org/buildbot-ci.git', mode='incremental')) 849 bb_factory.addStep(steps.ShellCommand(command=["./reload_bb-master.sh"], name="Reload configuration")) 850 851 852 853 BUILDERS.append( 854 util.BuilderConfig(name="gnunet-debian-x86_64", 855 workernames=["firefly-x86_64-amdepyc"], 856 factory=factory)) 857 858 BUILDERS.append( 859 util.BuilderConfig(name="gnunet-fedora-aarch64", 860 workernames=["schanzen-aarch64-fedora-meson"], 861 factory=factory)) 862 863 BUILDERS.append( 864 util.BuilderConfig(name="gnunet-debian-x86_64-dev", 865 workernames=["firefly-x86_64-amdepyc"], 866 factory=factory)) 867 868 BUILDERS.append( 869 util.BuilderConfig(name="gnunet-fedora-aarch64-dev", 870 workernames=["schanzen-aarch64-fedora-meson"], 871 factory=factory)) 872 873 874 BUILDERS.append( 875 util.BuilderConfig(name="gnunet_release", 876 workernames=["firefly-x86_64-amdepyc"], 877 factory=dist_factory)) 878 BUILDERS.append( 879 util.BuilderConfig(name="gnunet_coverity", 880 workernames=["firefly-x86_64-amdepyc"], 881 factory=cov_factory)) 882 883 BUILDERS.append( 884 util.BuilderConfig(name="gnunet-gns-registrar", 885 workernames=["firefly-x86_64-amdepyc", "schanzen-aarch64-fedora-meson"], 886 factory=registrar_factory)) 887 BUILDERS.append( 888 util.BuilderConfig(name="stage.gnunet.org", 889 workernames=["firefly-x86_64-amdepyc"], 890 factory=www_factory)) 891 BUILDERS.append( 892 util.BuilderConfig(name="www.gnunet.org", 893 workernames=["firefly-x86_64-amdepyc"], 894 factory=www_stable_factory)) 895 BUILDERS.append( 896 util.BuilderConfig(name="bib.gnunet.org", 897 workernames=["firefly-x86_64-amdepyc"], 898 factory=bib_factory)) 899 BUILDERS.append( 900 util.BuilderConfig(name="reclaim.gnunet.org", 901 workernames=["firefly-x86_64-amdepyc"], 902 factory=reclaim_www_stable_factory)) 903 BUILDERS.append( 904 util.BuilderConfig(name="rest.gnunet.org", 905 workernames=["firefly-x86_64-amdepyc"], 906 factory=rest_factory)) 907 BUILDERS.append( 908 util.BuilderConfig(name="lsd.gnunet.org", 909 workernames=["firefly-x86_64-amdepyc"], 910 factory=lsd_factory)) 911 BUILDERS.append( 912 util.BuilderConfig(name="gana.gnunet.org", 913 workernames=["firefly-x86_64-amdepyc"], 914 factory=gana_factory)) 915 BUILDERS.append( 916 util.BuilderConfig(name="doc.gnunet.org", 917 workernames=["firefly-x86_64-amdepyc"], 918 factory=doc_factory)) 919 BUILDERS.append( 920 util.BuilderConfig(name="buildbot", 921 workernames=["firefly-x86_64-amdepyc"], 922 factory=bb_factory)) 923 BUILDERS.append( 924 util.BuilderConfig(name="gnunet_rpm_copr", 925 workernames=["firefly-x86_64-amdepyc"], 926 factory=copr_factory)) 927 928 929 bsg = reporters.BuildStatusGenerator(mode=["exception", "failing", "problem"], 930 builders=["gnunet-debian-x86_64", "gnunet-fedora-aarch64"]) 931 932 mn = reporters.MailNotifier(fromaddr="buildbot@firefly.gnunet.org", 933 sendToInterestedUsers=False, 934 extraRecipients=["gnunet-ci@gnunet.org"], 935 lookup="gnunet.org", 936 generators=[bsg]) 937 SERVICES.append(mn) 938 939 940 941 942 ############################################# 943 # 19: CONTAINER FACTORY ##################### 944 ############################################# 945 ## 946 # These factories uses the standard container worker. 947 WORKERS.append(Worker("container-worker", "container-pass")) 948 949 950 # Container Job Generator Functions 951 # Parse config file and save values in a dict 952 def ingest_job_config(configPath, jobName): 953 configDict = {jobName: {}} 954 print(configDict) 955 ini.read_string(configPath) 956 for key in ini["build"]: 957 value = ini['build'][key] 958 configDict[jobName][key] = value 959 print(configDict) 960 configDict.update(configDict) 961 print(configDict) 962 return configDict 963 964 965 # Search for configs, and ingest 966 def handle_job_config(jobDirPath, jobName, repoName, configPath, configExist): 967 print(configPath) 968 if configExist == 0: 969 print(f"Ingesting Job Config: {configPath}") 970 configDict = ingest_job_config(configPath, jobName) 971 print(configDict) 972 return configDict 973 else: 974 print("No job config; Using default params") 975 # Set default job config parameters 976 configDict = {jobName: {"HALT_ON_FAILURE": True, 977 "WARN_ON_FAILURE": False, 978 "CONTAINER_BUILD": True, 979 "CONTAINER_NAME": repoName, 980 "CONTAINER_ARCH": "amd64"}} 981 return configDict 982 983 984 class GenerateStagesCommand(buildstep.ShellMixin, steps.BuildStep): 985 986 def __init__(self, REPO_NAME, **kwargs): 987 self.REPO_NAME = REPO_NAME 988 kwargs = self.setupShellMixin(kwargs) 989 super().__init__(**kwargs) 990 self.observer = logobserver.BufferLogObserver() 991 self.addLogObserver('stdio', self.observer) 992 993 def extract_stages(self, stdout): 994 stages = [] 995 for line in stdout.split('\n'): 996 stage = str(line.strip()) 997 if stage: 998 stages.append(stage) 999 return stages 1000 1001 @defer.inlineCallbacks 1002 def run(self): 1003 CONTAINER_WORKDIR = f"/home/container-worker/workspace/{self.REPO_NAME}" 1004 CI_JOBS_PATH = f"{CONTAINER_WORKDIR}/contrib/ci/jobs" 1005 # run 'ls <project_root>/contrib/ci/jobs/' to get the list of stages 1006 cmd = yield self.makeRemoteShellCommand() 1007 yield self.runCommand(cmd) 1008 jobDirs = [] 1009 1010 # if the command passes extract the list of stages 1011 result = cmd.results() 1012 if result == util.SUCCESS: 1013 jobDirs = self.extract_stages(self.observer.getStdout()) 1014 print(f"this is jobDirs list: {jobDirs}") 1015 self.configDict = {} 1016 print(f"Remote cmd stdout: {self.observer.getStdout()}") 1017 print(f"cmd.results: {cmd.results()}") 1018 for stage in jobDirs: 1019 jobDirPath = f"{CI_JOBS_PATH}/{stage}" 1020 observer = logobserver.BufferLogObserver() 1021 self.addLogObserver('stdio', observer) 1022 cmd1 = yield self.makeRemoteShellCommand( 1023 command=["cat", f"{jobDirPath}/config.ini"]) 1024 yield self.runCommand(cmd1) 1025 print(f"cmd1.results: {cmd1.results()}") 1026 print(f"Second command stdout: {observer.getStdout()}") 1027 print(f"Current stage: {stage}") 1028 print(jobDirPath) 1029 self.configDict.update( 1030 handle_job_config( 1031 jobDirPath, stage, self.REPO_NAME, 1032 observer.getStdout(), cmd1.results())) 1033 print(self.configDict) 1034 # create a container step for each stage and 1035 # add them to the build 1036 convstr2bool = ast.literal_eval 1037 self.build.addStepsAfterCurrentStep([ 1038 container_add_step( 1039 convstr2bool( 1040 str(self.configDict[stage]["HALT_ON_FAILURE"])), 1041 convstr2bool( 1042 str(self.configDict[stage]["WARN_ON_FAILURE"])), 1043 convstr2bool( 1044 str(self.configDict[stage]["CONTAINER_BUILD"])), 1045 self.configDict[stage]["CONTAINER_NAME"], 1046 container_factory, 1047 CONTAINER_WORKDIR, 1048 self.REPO_NAME, 1049 stage, 1050 self.configDict[stage]["CONTAINER_ARCH"], 1051 f"contrib/ci/jobs/{stage}/job.sh") 1052 for stage in jobDirs 1053 ]) 1054 1055 return result 1056 1057 # List of repos to add to container factory. 1058 container_repos = ["git.gnunet.org/gnunet", 1059 "git.taler.net/challenger", 1060 "git.taler.net/donau", 1061 "git.taler.net/exchange", 1062 "git.taler.net/libeufin", 1063 "git.taler.net/taler-rust", 1064 "git.taler.net/depolymerization", 1065 "git.taler.net/merchant", 1066 "git.taler.net/sandcastle-ng", 1067 "git.taler.net/sync", 1068 "git.taler.net/taler-android", 1069 "git.taler.net/taler-mailbox", 1070 "git.taler.net/taldir", 1071 "git.taler.net/taler-typescript-core",] 1072 1073 for repo in container_repos: 1074 1075 # Prepare to read job configs 1076 ini = configparser.ConfigParser() 1077 ini.optionxform = str 1078 1079 # Factory-wide variables 1080 REPO_NAME = repo.rsplit('/', 1)[1] 1081 REPO_URL = "git://" + repo + ".git" 1082 CONTAINER_WORKDIR = f"/home/container-worker/workspace/{REPO_NAME}" 1083 CI_JOBS_PATH = f"{CONTAINER_WORKDIR}/contrib/ci/jobs" 1084 1085 # Create a factory 1086 container_factory = util.BuildFactory() 1087 container_factory.workdir = CONTAINER_WORKDIR 1088 1089 # Setup workspace 1090 container_factory.addStep(ShellCommand( 1091 name="workspace", 1092 descriptionDone="Workspace directory check", 1093 command=f"test -d {CONTAINER_WORKDIR} && podman run --log-driver=none --rm --volume {CONTAINER_WORKDIR}:/workdir docker.io/library/debian:bookworm-slim chmod -R 777 /workdir ; rm -rf {CONTAINER_WORKDIR} && mkdir -p {CONTAINER_WORKDIR} || mkdir -p {CONTAINER_WORKDIR}", 1094 haltOnFailure=True, 1095 )) 1096 1097 # Ensure repo is cloned or clean. 1098 # Git() will clone repo if it doesn't exist. 1099 # Method clobber removes directory and makes a fresh clone. 1100 # Shallow set to "True" defaults to a depth of 1. 1101 # Will checkout value of "branch" property from job properties. 1102 # https://docs.buildbot.net/latest/manual/configuration/steps/source_git.html 1103 container_factory.addStep(Git( 1104 name="git", 1105 repourl=REPO_URL, 1106 branch=util.Interpolate('%(src::branch)s'), 1107 mode='full', 1108 method='clobber', 1109 shallow=True, 1110 submodules=True, 1111 haltOnFailure=True, 1112 )) 1113 1114 container_factory.addStep(GenerateStagesCommand( 1115 REPO_NAME, 1116 name="Generate build stages", 1117 command=f"ls {CI_JOBS_PATH}", 1118 haltOnFailure=True)) 1119 1120 BUILDERS.append(util.BuilderConfig( 1121 name=f"{REPO_NAME}-builder", 1122 workernames=["container-worker"], 1123 factory=container_factory 1124 )) 1125 1126 # Only enable this scheduler for debugging! 1127 # Will run builders with 1 minute of waiting inbetween builds 1128 # SCHEDULERS.append(schedulers.Periodic( 1129 # name=f"{REPO_NAME}-minutely", 1130 # builderNames=[f"{REPO_NAME}-builder"], 1131 # periodicBuildTimer=60 1132 # )) 1133 1134 SCHEDULERS.append(schedulers.SingleBranchScheduler( 1135 name=f"{REPO_NAME}-container-scheduler", 1136 change_filter=util.ChangeFilter( 1137 branch="master", 1138 project_re=f"({REPO_NAME})" 1139 ), 1140 treeStableTimer=30, 1141 builderNames=[f"{REPO_NAME}-builder"] 1142 )) 1143 1144 SERVICES.append(reporters.MailNotifier( 1145 fromaddr="buildbot@taler.net", 1146 # notify from pass to fail, and viceversa. 1147 generators=[BuildStatusGenerator( 1148 mode=('change','problem','failing','exception',), 1149 builders=[f"{REPO_NAME}-builder",], 1150 message_formatter=reporters.MessageFormatter( 1151 template_type='plain', 1152 want_logs_content=True, 1153 ), 1154 )], 1155 sendToInterestedUsers=False, 1156 useTls=False, 1157 relayhost="localhost", 1158 smtpPort=25, 1159 dumpMailsToLog=True, 1160 extraRecipients=[f"ci-{REPO_NAME}@taler.net"] 1161 )) 1162 1163 1164 ############## sandcastle-ng Scheduler ################################# 1165 1166 1167 # Periodic scheduler for sandcastle-ng. 1168 # Runs every 2 hours (60 seconds * 60 * 2) 1169 SCHEDULERS.append(schedulers.Periodic( 1170 name="sandcastle-ng-periodic-scheduler", 1171 builderNames=["sandcastle-ng-builder"], 1172 change_filter=util.ChangeFilter(branch="master"), 1173 periodicBuildTimer=60*60*2 1174 )) 1175 1176 1177 ################ 99: debug stuff JOB ################################### 1178 1179 # This does nothing, just a starting point for a factory. 1180 DEBUG_FACTORY = util.BuildFactory() 1181 DEBUG_FACTORY.addStep( 1182 ShellCommand( 1183 name="echo debug", 1184 description="just echoing a word", 1185 descriptionDone="builder responded", 1186 command=["echo", "I'm here!"] 1187 ) 1188 ) 1189 1190 1191 ################################################################## 1192 #################### General purpose ############################# 1193 ################################################################## 1194 1195 # Compute array of the names of all of our builders 1196 BUILDER_LIST = map(lambda builder: builder.name, BUILDERS) 1197 1198 ####### GENERAL PURPOSE BUILDBOT SERVICES ####################### 1199 1200 SERVICES.append(reporters.MailNotifier( 1201 fromaddr="testbuild@taler.net", 1202 # notify from pass to fail, and viceversa. 1203 generators=[BuildStatusGenerator( 1204 mode=('change','problem','failing','exception',), 1205 builders=EMAIL_ALERTS, 1206 message_formatter=reporters.MessageFormatter( 1207 template_type='plain', 1208 want_logs_content=True, 1209 ), 1210 )], 1211 sendToInterestedUsers=False, 1212 useTls=False, 1213 relayhost="localhost", 1214 smtpPort=25, 1215 dumpMailsToLog=True, 1216 extraRecipients=BUILDER_EMAIL_ADDRESSES 1217 )) 1218 1219 1220 ############# GENERAL PURPOSE SCHEDULERS ########################## 1221 1222 # Workers that are done on wallet or deployment changes to master 1223 SCHEDULERS.append(schedulers.SingleBranchScheduler( 1224 name="taler-healthcheck-scheduler", 1225 change_filter=util.ChangeFilter( 1226 branch="master", 1227 project_re="(taler-typescript-core|taler-deployment)" 1228 ), 1229 treeStableTimer=None, 1230 builderNames=WALLETCHANGE_TRIGGERS 1231 )) 1232 1233 SCHEDULERS.append(schedulers.SingleBranchScheduler( 1234 name="all-scheduler", 1235 change_filter=util.ChangeFilter( 1236 branch_re="(master|stable)", 1237 project_re="(taler-typescript-core|exchange|" 1238 "merchant|taler-deployment|twister|sync|" 1239 "taler-merchant-demos)" 1240 ), 1241 treeStableTimer=None, 1242 builderNames=CODECHANGE_TRIGGERS 1243 )) 1244 1245 # Scheduler for all nightly builds. 1246 SCHEDULERS.append(schedulers.Nightly( 1247 name="nightly-scheduler", 1248 builderNames=list(NIGHTLY_TRIGGERS), 1249 branch="master", 1250 hour=6, 1251 minute=0 1252 )) 1253 1254 # Provide "force" button in the web UI. 1255 SCHEDULERS.append(schedulers.ForceScheduler( 1256 name="force-scheduler", 1257 buttonName="Force build", 1258 builderNames=list(BUILDER_LIST) 1259 )) 1260 1261 ######################################################### 1262 ####### Actual configuation initialization ############## 1263 ######################################################### 1264 1265 # This is the dictionary that the buildmaster pays attention to. We also use 1266 # a shorter alias to save typing. 1267 c = BuildmasterConfig = {} 1268 1269 # Secrets 1270 c['secretsProviders'] = [secrets.SecretInAFile(dirname="/home/buildbot-master/secrets")] 1271 1272 c["workers"] = WORKERS 1273 c["builders"] = BUILDERS 1274 c["schedulers"] = SCHEDULERS 1275 c["services"] = SERVICES 1276 1277 # Silence warning and allow very basic phoning home. 1278 c["buildbotNetUsageData"] = "basic" 1279 1280 c["title"] = "GNUnet Builder" 1281 c["titleURL"] = "https://buildbot.gnunet.org" 1282 1283 # the 'buildbotURL' string should point to the location where the buildbot's 1284 # internal web server is visible. 1285 c['buildbotURL'] = "https://buildbot.gnunet.org/" 1286 1287 # This specifies what database buildbot uses to store its 1288 # state. You can leave this at its default for all but the 1289 # largest installations. 1290 c["db"] = { 1291 "db_url": "sqlite:///state.sqlite", 1292 } 1293 1294 # the 'change_source' setting tells the buildmaster how it should 1295 # find out about source code changes. 1296 pbSource = changes.PBChangeSource(port="tcp:19990:interface=127.0.0.1", 1297 user="git-buildbot", 1298 passwd="Aer3eari") 1299 1300 pollGnunetSource = changes.GitPoller(repourl='git://git.gnunet.org/gnunet.git', 1301 branches=True, 1302 pollInterval=300, 1303 pollAtLaunch=True, 1304 project="gnunet") 1305 c["change_source"] = [pollGnunetSource, pbSource] 1306 1307 # 'protocols' contains information about protocols which master 1308 # will use for communicating with workers. You must define at 1309 # least 'port' option that workers could connect to your master 1310 # with this protocol. 'port' must match the value configured into 1311 # the workers (with their --master option) 1312 c["protocols"] = {"pb": {"port": "tcp:19989"}} 1313 1314 # Load admin password 1315 with open("/home/buildbot-master/secrets/admin") as fh: 1316 www_pass = fh.read().strip() 1317 1318 # minimalistic config to activate new web UI 1319 # -- formerly commented out as not packaged properly in Debian and others, see 1320 # https://bugzilla.redhat.com/show_bug.cgi?id=1557687 1321 c["www"] = { 1322 "port": "tcp:8009:interface=127.0.0.1", 1323 "default_page": 'builders', 1324 "plugins": { 1325 "waterfall_view": True, 1326 "console_view": True, 1327 "grid_view": True, 1328 }, 1329 "auth": util.UserPasswordAuth([('admin',www_pass)]), 1330 "allowed_origins": ["https://*.taler.net","https://*.gnunet.org"], 1331 "avatar_methods": [], 1332 }