[153421] contrib/buildbot-test/master.cfg
larryv at macports.org
larryv at macports.org
Fri Sep 30 03:05:43 CEST 2016
Revision: 153421
https://trac.macports.org/changeset/153421
Author: larryv at macports.org
Date: 2016-09-29 18:05:42 -0700 (Thu, 29 Sep 2016)
Log Message:
-----------
buildbot: Standardize on PEP 8 quoting
Use single quotes throughout, except for triple-quoted strings and
strings that themselves contain single quotes.
https://www.python.org/dev/peps/pep-0008/#string-quotes
Modified Paths:
--------------
contrib/buildbot-test/master.cfg
Modified: contrib/buildbot-test/master.cfg
===================================================================
--- contrib/buildbot-test/master.cfg 2016-09-30 00:19:41 UTC (rev 153420)
+++ contrib/buildbot-test/master.cfg 2016-09-30 01:05:42 UTC (rev 153421)
@@ -28,16 +28,16 @@
config = {
'production': False,
- 'privkey': "",
+ 'privkey': '',
'slaveport': 9989,
'httpport': 8010,
- 'buildboturl': "http://localhost:8010/",
- 'htpasswdfile': "htpasswd",
- 'mpbbsvnurl': "https://svn.macports.org/repository/macports/contrib/mp-buildbot",
- 'svnurl': "https://svn.macports.org/repository/macports/trunk",
- 'archivesite': "https://packages.macports.org",
- 'slaveprefix': "/opt/local",
- 'toolsprefix': "/opt/mports",
+ 'buildboturl': 'http://localhost:8010/',
+ 'htpasswdfile': 'htpasswd',
+ 'mpbbsvnurl': 'https://svn.macports.org/repository/macports/contrib/mp-buildbot',
+ 'svnurl': 'https://svn.macports.org/repository/macports/trunk',
+ 'archivesite': 'https://packages.macports.org',
+ 'slaveprefix': '/opt/local',
+ 'toolsprefix': '/opt/mports',
'deploy': {}
}
@@ -48,8 +48,8 @@
if configdata[key]:
config[key] = configdata[key]
-path_base = "/usr/bin:/bin:/usr/sbin:/sbin"
-path_ports = os.path.join(config['toolsprefix'], "bin") + ":" + path_base
+path_base = '/usr/bin:/bin:/usr/sbin:/sbin'
+path_ports = os.path.join(config['toolsprefix'], 'bin') + ':' + path_base
path_docs = path_ports
# Allow spaces and tabs in property values
@@ -111,11 +111,11 @@
def change_has_ports(change):
for f in change.files:
- if "_resources" in f:
+ if '_resources' in f:
continue
# should actually skip changes to files/ only, but only if we know the
# last build of the port succeeded
- if "dports" in f and ("Portfile" in f or "files" in f):
+ if 'dports' in f and ('Portfile' in f or 'files' in f):
return True
return False
@@ -143,10 +143,10 @@
wwwfilter = ChangeFilter(filter_fn=change_has_www)
guidefilter = ChangeFilter(filter_fn=change_has_guide)
-base_buildernames = ["base-"+plat for plat in build_platforms if 'legacy' not in plat and '10.6_i386' not in plat]
-portwatcher_buildernames = ["ports-"+plat+"-watcher" for plat in build_platforms if 'linux' not in plat and '10.5_ppc' != plat]
-portbuilder_buildernames = ["ports-"+plat+"-builder" for plat in build_platforms if 'linux' not in plat and '10.5_ppc' != plat]
-portbuilder_triggerables = ["ports-"+plat+"-trigger" for plat in build_platforms if 'linux' not in plat and '10.5_ppc' != plat]
+base_buildernames = ['base-'+plat for plat in build_platforms if 'legacy' not in plat and '10.6_i386' not in plat]
+portwatcher_buildernames = ['ports-'+plat+'-watcher' for plat in build_platforms if 'linux' not in plat and '10.5_ppc' != plat]
+portbuilder_buildernames = ['ports-'+plat+'-builder' for plat in build_platforms if 'linux' not in plat and '10.5_ppc' != plat]
+portbuilder_triggerables = ['ports-'+plat+'-trigger' for plat in build_platforms if 'linux' not in plat and '10.5_ppc' != plat]
from buildbot.schedulers.basic import SingleBranchScheduler
from buildbot.schedulers.forcesched import ForceScheduler
@@ -155,34 +155,34 @@
c['schedulers'] = [
SingleBranchScheduler(
- name="base",
+ name='base',
treeStableTimer=None,
change_filter = basefilter,
builderNames=base_buildernames),
SingleBranchScheduler(
- name="ports",
+ name='ports',
treeStableTimer=None,
change_filter = portsfilter,
builderNames=portwatcher_buildernames),
ForceScheduler(
- name="base_force",
+ name='base_force',
builderNames=base_buildernames),
# ForceScheduler(
-# name="portbuilder_force",
+# name='portbuilder_force',
# builderNames=portbuilder_buildernames,
# properties=[StringParameter(
-# name="portname",
-# label="Port name:",
-# default="",
+# name='portname',
+# label='Port name:',
+# default='',
# required=True)
# ]),
ForceScheduler(
- name="portwatcher_force",
+ name='portwatcher_force',
builderNames=portwatcher_buildernames,
properties=[StringParameter(
- name="portlist",
- label="Port list:",
- default="",
+ name='portlist',
+ label='Port list:',
+ default='',
size=30,
required=True)
])
@@ -192,24 +192,24 @@
if 'www' in config['deploy']:
c['schedulers'] += [
SingleBranchScheduler(
- name="www",
+ name='www',
treeStableTimer=300,
change_filter = wwwfilter,
- builderNames=["docs-www"]),
+ builderNames=['docs-www']),
ForceScheduler(
- name="www_force",
- builderNames=["docs-www"])]
+ name='www_force',
+ builderNames=['docs-www'])]
if 'guide' in config['deploy']:
c['schedulers'] += [
SingleBranchScheduler(
- name="guide",
+ name='guide',
treeStableTimer=300,
change_filter = guidefilter,
- builderNames=["docs-guide"]),
+ builderNames=['docs-guide']),
ForceScheduler(
- name="guide_force",
- builderNames=["docs-www"])]
+ name='guide_force',
+ builderNames=['docs-www'])]
for i in range(len(portbuilder_buildernames)):
c['schedulers'].append(Triggerable(
@@ -237,31 +237,31 @@
# repourl=Interpolate('https://svn.macports.org/repository/macports/%(src::branch:-trunk)s/base'),
base_factory.addStep(SVN(
repourl='https://svn.macports.org/repository/macports/trunk/base',
- method="copy",
- env={"PATH": path_ports}))
+ method='copy',
+ env={'PATH': path_ports}))
base_factory.addStep(Configure(command=WithProperties("""
env PATH=/usr/bin:/bin:/usr/sbin:/sbin ./configure --enable-readline \
--prefix=%(workdir)s/opt/local \
--with-applications-dir=%(workdir)s/opt/local/Applications \
--with-install-user=`id -un` \
--with-install-group=`id -gn` \
-"""),logfiles={"config.log": "config.log"}))
-base_factory.addStep(Compile(command="make -j`sysctl -n hw.activecpu`"))
+"""),logfiles={'config.log': 'config.log'}))
+base_factory.addStep(Compile(command='make -j`sysctl -n hw.activecpu`'))
base_factory.addStep(ShellCommand(
- command="make install",
- name="install",
- description=["installing"],
- descriptionDone=["install"]))
+ command='make install',
+ name='install',
+ description=['installing'],
+ descriptionDone=['install']))
base_factory.addStep(ShellCommand(
- command="make test",
- name="test",
- description=["testing"],
- descriptionDone=["test"]))
+ command='make test',
+ name='test',
+ description=['testing'],
+ descriptionDone=['test']))
base_factory.addStep(ShellCommand(
- command=WithProperties("make distclean; rm -rf %(workdir)s/opt/local"),
- name="clean",
- description=["cleaning"],
- descriptionDone=["clean"]))
+ command=WithProperties('make distclean; rm -rf %(workdir)s/opt/local'),
+ name='clean',
+ description=['cleaning'],
+ descriptionDone=['clean']))
# custom class to make the file list available on the slave...
class SetPropertyFromCommandWithPortlist(SetPropertyFromCommand):
@@ -284,7 +284,7 @@
def getText(self, cmd, results):
if self.hasProperty('subportlist'):
- return ["Port list: %s" % (self.getProperty('subportlist'))]
+ return ['Port list: %s' % (self.getProperty('subportlist'))]
else:
# let ShellCommand describe
return ShellCommand.getText(self, cmd, results)
@@ -317,16 +317,16 @@
url = c['buildbotURL']
if not url.endswith('/'):
url += '/'
- url += "builders/%s/builds/%s" % (buildername, buildnumber)
+ url += 'builders/%s/builds/%s' % (buildername, buildnumber)
return url
class TriggerWithPortlist(Trigger):
def getSchedulersAndProperties(self):
sp = []
for scheduler in self.schedulerNames:
- for port in self.build.getProperty("subportlist").split():
+ for port in self.build.getProperty('subportlist').split():
props = self.set_properties.copy()
- props["portname"] = port
+ props['portname'] = port
sp.append([scheduler, props])
return sp
@@ -341,32 +341,32 @@
# get mp-buildbot; we'll do the checkout of base and dports via these scripts
portwatcher_factory.addStep(SVN(
repourl=config['mpbbsvnurl'],
- env={"PATH": path_ports},
+ env={'PATH': path_ports},
alwaysUseLatest=True,
preferLastChangedRev=True,
- mode="incremental",
- workdir=os.path.join(portwatcher_factory.workdir, "mpbb"),
+ mode='incremental',
+ workdir=os.path.join(portwatcher_factory.workdir, 'mpbb'),
haltOnFailure=True))
portwatcher_factory.addStep(ShellCommand(
command=['./mpbb/mpbb', '--prefix', WithProperties(prefix), 'cleanup'],
- name="cleanup",
- description=["cleaning"],
- descriptionDone=["clean"]))
+ name='cleanup',
+ description=['cleaning'],
+ descriptionDone=['clean']))
portwatcher_factory.addStep(ShellCommand(
command=['./mpbb/mpbb', '--prefix', WithProperties(prefix), 'selfupdate'],
- name="selfupdate",
- description=["updating", "MacPorts"],
- descriptionDone=["update", "MacPorts"],
+ name='selfupdate',
+ description=['updating', 'MacPorts'],
+ descriptionDone=['update', 'MacPorts'],
haltOnFailure=True))
portwatcher_factory.addStep(ShellCommand(
command=['./mpbb/mpbb', '--prefix', WithProperties(prefix), 'checkout', '--svn-url', config['svnurl']],
timeout=3600,
- name="checkout",
- description=["syncing", "ports"],
- descriptionDone=["sync", "ports"],
+ name='checkout',
+ description=['syncing', 'ports'],
+ descriptionDone=['sync', 'ports'],
haltOnFailure=True))
def extract_subportlist(rc, stdout, stderr):
@@ -382,19 +382,19 @@
"""
if rc != 0:
# Set an empty subport list on error
- return {"subportlist": ""}
+ return {'subportlist': ''}
subports = [x.strip() for x in stdout.splitlines()]
- return {"subportlist": " ".join(sorted(subports))}
+ return {'subportlist': ' '.join(sorted(subports))}
portwatcher_factory.addStep(SetPropertyFromCommandWithPortlist(
command=WithProperties('./mpbb/mpbb list-subports %(fullportlist)s'),
extract_fn=extract_subportlist,
- name="subports",
- description=["listing", "subports"]))
+ name='subports',
+ description=['listing', 'subports']))
portwatcher_factory.addStep(TriggerWithPortlist(
schedulerNames=[triggerable],
- set_properties={"triggered_by": make_build_url},
+ set_properties={'triggered_by': make_build_url},
waitForFinish=True,
updateSourceStamp=True))
@@ -402,9 +402,9 @@
# (Current approach is not so useful as it is not incremental;
# ideally this would already be displayed during the Trigger step.)
portwatcher_factory.addStep(ShellCommand(
- command=["cat", os.path.join(logdir, "ports-progress.txt")],
- name="summary",
- description=["summary"]))
+ command=['cat', os.path.join(logdir, 'ports-progress.txt')],
+ name='summary',
+ description=['summary']))
return portwatcher_factory
@@ -413,33 +413,33 @@
portbuilder_factory = BuildFactory()
portbuilder_factory.useProgress = False
portbuilder_factory.workdir = '../build'
-logdir = os.path.join(portbuilder_factory.workdir, "logs")
+logdir = os.path.join(portbuilder_factory.workdir, 'logs')
portbuilder_factory.addStep(Compile(
command=['./mpbb/mpbb', '--prefix', WithProperties(prefix), 'install-dependencies', WithProperties('%(portname)s')],
- name="install-dependencies",
- description=["installing", "dependencies", "of", WithProperties("%(portname)s")],
- descriptionDone=["install", "dependencies", "of", WithProperties("%(portname)s")],
- logfiles={"dependencies": os.path.join(logdir, "dependencies-progress.txt")},
+ name='install-dependencies',
+ description=['installing', 'dependencies', 'of', WithProperties('%(portname)s')],
+ descriptionDone=['install', 'dependencies', 'of', WithProperties('%(portname)s')],
+ logfiles={'dependencies': os.path.join(logdir, 'dependencies-progress.txt')},
haltOnFailure=True))
portbuilder_factory.addStep(Compile(
command=['./mpbb/mpbb', '--prefix', WithProperties(prefix), 'install-port', WithProperties('%(portname)s')],
- name="install-port",
- description=["installing", WithProperties("%(portname)s")],
- descriptionDone=["install", WithProperties("%(portname)s")],
+ name='install-port',
+ description=['installing', WithProperties('%(portname)s')],
+ descriptionDone=['install', WithProperties('%(portname)s')],
logfiles={
- "files": os.path.join(logdir, "port-contents.txt"),
- "statistics": os.path.join(logdir, "port-statistics.txt"),
- "main.log": os.path.join(logdir, "main.log")
+ 'files': os.path.join(logdir, 'port-contents.txt'),
+ 'statistics': os.path.join(logdir, 'port-statistics.txt'),
+ 'main.log': os.path.join(logdir, 'main.log')
},
haltOnFailure=True))
portbuilder_factory.addStep(ShellCommand(
command=['./mpbb/mpbb', '--prefix', WithProperties(prefix), 'gather-archives', '--archive-site', config['archivesite'], '--staging-dir', ulpath],
- name="gather-archives",
- description=["gathering", "archives"],
- descriptionDone=["gather", "archives"],
+ name='gather-archives',
+ description=['gathering', 'archives'],
+ descriptionDone=['gather', 'archives'],
haltOnFailure=True))
# upload archives from build slave to master
@@ -450,10 +450,10 @@
# XXX: move deploy_archives.sh functionality to mp-buildbot
# sign generated binaries and sync to download server (if distributable)
if config['production']:
- portbuilder_factory.addStep(MasterShellCommand(command=["./deploy_archives.sh", WithProperties(ulpath_unique)],
- name="deploy-archives",
- description=["deploying", "archives"],
- descriptionDone=["deploy", "archives"],
+ portbuilder_factory.addStep(MasterShellCommand(command=['./deploy_archives.sh', WithProperties(ulpath_unique)],
+ name='deploy-archives',
+ description=['deploying', 'archives'],
+ descriptionDone=['deploy', 'archives'],
env={'PRIVKEY': config['privkey'],
'DLHOST': dlhost,
'DLPATH': dlpath}))
@@ -461,31 +461,31 @@
# TODO: do we want to upload the individual logs so maintainers can review them?
portbuilder_factory.addStep(ShellCommand(
command=['./mpbb/mpbb', '--prefix', WithProperties(prefix), 'cleanup'],
- name="cleanup",
- description=["cleaning"],
- descriptionDone=["clean"],
+ name='cleanup',
+ description=['cleaning'],
+ descriptionDone=['clean'],
alwaysRun=True))
def make_rsync_deploy_steps(host, user, sshkeyfile, sshknownhostsfile, srcpath, destpath):
return [
- FileDownload(name="ssh key",
- description="transferring",
- descriptionDone="transfer",
+ FileDownload(name='ssh key',
+ description='transferring',
+ descriptionDone='transfer',
mastersrc=sshkeyfile,
- slavedest="ssh_key",
+ slavedest='ssh_key',
mode=0600),
- FileDownload(name="ssh known_hosts",
- description="transferring",
- descriptionDone="transfer",
+ FileDownload(name='ssh known_hosts',
+ description='transferring',
+ descriptionDone='transfer',
mastersrc=sshknownhostsfile,
- slavedest="ssh_known_hosts",
+ slavedest='ssh_known_hosts',
mode=0600),
- ShellCommand(name="rsync",
- description="deploying",
- descriptionDone="deploy",
- command="rsync -avzhC --delay-updates --delete-delay %s/ %s@%s:%s/" % (srcpath, user, host, destpath),
- env={'RSYNC_RSH': "ssh -i ssh_key -oUserKnownHostsFile=ssh_known_hosts"})
+ ShellCommand(name='rsync',
+ description='deploying',
+ descriptionDone='deploy',
+ command='rsync -avzhC --delay-updates --delete-delay %s/ %s@%s:%s/' % (srcpath, user, host, destpath),
+ env={'RSYNC_RSH': 'ssh -i ssh_key -oUserKnownHostsFile=ssh_known_hosts'})
]
if 'www' in config['deploy']:
@@ -493,9 +493,9 @@
# TODO: incremental mode with cleanup?
docs_www_factory.addStep(SVN(
repourl='https://svn.macports.org/repository/macports/trunk/www',
- mode="full",
- method="copy",
- workdir="www"))
+ mode='full',
+ method='copy',
+ workdir='www'))
# TODO: validate/lint files
docs_www_factory.addSteps(
make_rsync_deploy_steps(
@@ -503,7 +503,7 @@
user=config['deploy']['www']['user'],
sshkeyfile=config['deploy']['www']['sshkeyfile'],
sshknownhostsfile=config['deploy']['www']['sshknownhostsfile'],
- srcpath="www",
+ srcpath='www',
destpath=config['deploy']['www']['destpath']))
if 'guide' in config['deploy']:
@@ -511,26 +511,26 @@
# TODO: incremental mode with cleanup?
docs_guide_factory.addStep(SVN(
repourl='https://svn.macports.org/repository/macports/trunk/doc-new',
- mode="full",
- method="copy",
- workdir="guide"))
+ mode='full',
+ method='copy',
+ workdir='guide'))
# TODO: check for existence of tools in toolsprefix
docs_guide_factory.addStep(Compile(
- name="validate",
- description="validating",
- descriptionDone="validate",
- command="make validate",
- workdir="guide"))
+ name='validate',
+ description='validating',
+ descriptionDone='validate',
+ command='make validate',
+ workdir='guide'))
docs_guide_factory.addStep(Compile(
- command="make all",
- workdir="guide"))
+ command='make all',
+ workdir='guide'))
docs_guide_factory.addSteps(
make_rsync_deploy_steps(
host=config['deploy']['guide']['host'],
user=config['deploy']['guide']['user'],
sshkeyfile=config['deploy']['guide']['sshkeyfile'],
sshknownhostsfile=config['deploy']['guide']['sshknownhostsfile'],
- srcpath="guide",
+ srcpath='guide',
destpath=config['deploy']['guide']['destpath']))
@@ -543,59 +543,59 @@
baseslaves = {}
slavenames = slavedata['slaves'].keys()
for plat in build_platforms:
- baseslaves[plat] = filter(lambda x: x.endswith(plat+"-base"), slavenames)
- portsslaves[plat] = filter(lambda x: x.endswith(plat+"-ports"), slavenames)
+ baseslaves[plat] = filter(lambda x: x.endswith(plat+'-base'), slavenames)
+ portsslaves[plat] = filter(lambda x: x.endswith(plat+'-ports'), slavenames)
env_buildinfo = {
- "BUILDBOT_BUILDERNAME": WithProperties("%(buildername)s"),
- "BUILDBOT_BUILDNUMBER": WithProperties("%(buildnumber)s"),
- "BUILDBOT_BUILDURL": make_build_url
+ 'BUILDBOT_BUILDERNAME': WithProperties('%(buildername)s'),
+ 'BUILDBOT_BUILDNUMBER': WithProperties('%(buildnumber)s'),
+ 'BUILDBOT_BUILDURL': make_build_url
}
c['builders']=[]
-extract_os = re.compile("10\.\d+")
+extract_os = re.compile('10\.\d+')
for plat in build_platforms:
os_match = extract_os.search(plat)
os_version = os_match.group(0) if os_match else plat
if 'legacy' not in plat and '10.6_i386' not in plat:
c['builders'] += [
BuilderConfig(
- name="base-" + plat,
- slavenames=["base-" + plat],
+ name='base-' + plat,
+ slavenames=['base-' + plat],
factory=base_factory,
- tags=["base", os_version],
- env=merge_dicts(env_buildinfo, {"PATH": path_base}))]
+ tags=['base', os_version],
+ env=merge_dicts(env_buildinfo, {'PATH': path_base}))]
if 'linux' not in plat and '10.5_ppc' != plat:
c['builders'] += [
BuilderConfig(
- name="ports-" + plat + "-watcher",
- slavenames=["ports-" + plat],
- factory=make_portwatcher_factory("ports-" + plat + "-trigger"),
- tags=["portwatcher", os_version],
- env=merge_dicts(env_buildinfo, {"PATH": path_ports})),
+ name='ports-' + plat + '-watcher',
+ slavenames=['ports-' + plat],
+ factory=make_portwatcher_factory('ports-' + plat + '-trigger'),
+ tags=['portwatcher', os_version],
+ env=merge_dicts(env_buildinfo, {'PATH': path_ports})),
BuilderConfig(
- name="ports-" + plat + "-builder",
- slavenames=["ports-" + plat],
+ name='ports-' + plat + '-builder',
+ slavenames=['ports-' + plat],
factory=portbuilder_factory,
- tags=["portbuilder", os_version],
- env=merge_dicts(env_buildinfo, {"PATH": path_ports}))]
+ tags=['portbuilder', os_version],
+ env=merge_dicts(env_buildinfo, {'PATH': path_ports}))]
if 'www' in config['deploy']:
c['builders'].append(
BuilderConfig(
- name="docs-www",
- slavenames=["docs"],
+ name='docs-www',
+ slavenames=['docs'],
factory=docs_www_factory,
- tags=["docs", "www"],
- env=merge_dicts(env_buildinfo, {"PATH": path_ports})))
+ tags=['docs', 'www'],
+ env=merge_dicts(env_buildinfo, {'PATH': path_ports})))
if 'guide' in config['deploy']:
c['builders'].append(
BuilderConfig(
- name="docs-guide",
- slavenames=["docs"],
+ name='docs-guide',
+ slavenames=['docs'],
factory=docs_guide_factory,
- tags=["docs", "guide"],
- env=merge_dicts(env_buildinfo, {"PATH": path_ports})))
+ tags=['docs', 'guide'],
+ env=merge_dicts(env_buildinfo, {'PATH': path_ports})))
####### STATUS TARGETS
@@ -642,11 +642,11 @@
interestedUsers = set()
# XXX: needs to be rewritten for the new steps of mp-buildbot
- statusStep = [x for x in build.getSteps() if x.getName() == "status"][0]
- statusLog = [x for x in statusStep.getLogs() if x.getName() == "portstatus"][0]
+ statusStep = [x for x in build.getSteps() if x.getName() == 'status'][0]
+ statusLog = [x for x in statusStep.getLogs() if x.getName() == 'portstatus'][0]
for line in statusLog.getText().splitlines():
halves = line.split()
- if halves[0] == "[FAIL]":
+ if halves[0] == '[FAIL]':
failedPorts.add(halves[1])
fakeAddresses = {'nomaintainer', 'nomaintainer at macports.org', 'openmaintainer', 'openmaintainer at macports.org'}
@@ -707,21 +707,21 @@
def portWatcherMessageFormatter(mode, name, build, results, master_status, interested_users):
result = util.Results[results]
text = list()
- text.append("Status: {:s}".format(result.title()))
- text.append("Build slave: {:s}".format(build.getSlavename()))
+ text.append('Status: {:s}'.format(result.title()))
+ text.append('Build slave: {:s}'.format(build.getSlavename()))
if master_status.getURLForThing(build):
- text.append("Full logs: {:s}".format(master_status.getURLForThing(build)))
- text.append("Build reason: {:s}".format(build.getReason()))
- text.append("Port list: {:s}".format(build.getProperty('fullportlist')))
- text.append("Subport list:\n\t- {:s}".format(build.getProperty('subportlist').replace(" ", "\n\t- ")))
- text.append("Variants: {:s}".format(build.getProperty('variants')))
- text.append("Revision: {:s}".format(build.getProperty('revision')))
- text.append("Build time: {:s}".format(datetime.timedelta(seconds=int(round(build.getTimes()[1] - build.getTimes()[0])))))
- text.append(u"Committer: {:s}".format(",".join(build.getResponsibleUsers())))
+ text.append('Full logs: {:s}'.format(master_status.getURLForThing(build)))
+ text.append('Build reason: {:s}'.format(build.getReason()))
+ text.append('Port list: {:s}'.format(build.getProperty('fullportlist')))
+ text.append('Subport list:\n\t- {:s}'.format(build.getProperty('subportlist').replace(' ', '\n\t- ')))
+ text.append('Variants: {:s}'.format(build.getProperty('variants')))
+ text.append('Revision: {:s}'.format(build.getProperty('revision')))
+ text.append('Build time: {:s}'.format(datetime.timedelta(seconds=int(round(build.getTimes()[1] - build.getTimes()[0])))))
+ text.append(u'Committer: {:s}'.format(','.join(build.getResponsibleUsers())))
- text.append("\nLog from failed builds:")
- summary_step = [x for x in build.getSteps() if x.getName() == "summary"][0]
- summary_log = [x for x in summary_step.getLogs() if x.getName() == "stdio"][0]
+ text.append('\nLog from failed builds:')
+ summary_step = [x for x in build.getSteps() if x.getName() == 'summary'][0]
+ summary_log = [x for x in summary_step.getLogs() if x.getName() == 'stdio'][0]
failed_ports = set()
maintainers_to_notify = set()
pattern = re.compile("^Building '(?P<port>.*?)'.*?(\(failed to install dependency '(?P<dependency>.*?)'\))?( maintainers: (?P<maintainers>.*?)[.])?$")
@@ -730,7 +730,7 @@
# in case of a build error, print the error and add the broken port(s) to the list
if 'ERROR' in line:
line = line.replace(';', '@')
- text.append("\t" + line.replace(" maintainers:", "\n\t> maintainers:"))
+ text.append('\t' + line.replace(' maintainers:', '\n\t> maintainers:'))
match = pattern.match(line)
if match:
for key in ['port', 'dependency']:
@@ -742,33 +742,33 @@
for maintainer in maintainers.split(','):
maintainers_to_notify.add(maintainer)
if len(failed_ports) > 0:
- text.append("\nBroken ports:\n\t- {:s}".format("\n\t- ".join(sorted(failed_ports))))
+ text.append('\nBroken ports:\n\t- {:s}'.format('\n\t- '.join(sorted(failed_ports))))
if len(maintainers_to_notify) > 0:
- text.append("\nResponsible maintainers:\n\t- {}".format("\n\t- ".join(sorted(maintainers_to_notify))))
+ text.append('\nResponsible maintainers:\n\t- {}'.format('\n\t- '.join(sorted(maintainers_to_notify))))
for user in maintainers_to_notify:
interested_users.add(user)
# links to individual builds
- text.append("\nLinks to individual build jobs:")
- trigger_step = [x for x in build.getSteps() if x.getName() == "trigger"][0]
+ text.append('\nLinks to individual build jobs:')
+ trigger_step = [x for x in build.getSteps() if x.getName() == 'trigger'][0]
build_urls_dict = trigger_step.getURLs()
# TODO; sorting won't work properly for
# - ports-10.11-x86_64-builder #99
# - ports-10.11-x86_64-builder #100
build_urls_keys = sorted(build_urls_dict.keys())
for k in build_urls_keys:
- text.append("- {:s}\n {:s}".format(k, build_urls_dict[k]))
- text.append("\n-- \nBest regards,\nMacPorts Buildbot\n{:s}".format(c['buildbotURL']))
+ text.append('- {:s}\n {:s}'.format(k, build_urls_dict[k]))
+ text.append('\n-- \nBest regards,\nMacPorts Buildbot\n{:s}'.format(c['buildbotURL']))
- subject = "Build {:s}".format(result.title())
+ subject = 'Build {:s}'.format(result.title())
if failed_ports:
- subject += ": "
- subject += ", ".join(sorted(failed_ports)[:10])
+ subject += ': '
+ subject += ', '.join(sorted(failed_ports)[:10])
if len(failed_ports) > 10:
- subject += ", and {} more".format(len(failed_ports) - 10)
+ subject += ', and {} more'.format(len(failed_ports) - 10)
return {
- 'body' : "\n".join(text),
+ 'body' : '\n'.join(text),
'type' : 'plain',
'subject' : subject
}
@@ -776,38 +776,38 @@
if config['production']:
# send mail about base failures to users on the blamelist
mn = MailNotifier(
- fromaddr="buildbot at macports.org",
- extraHeaders={"Reply-To": "noreply at macports.org"},
+ fromaddr='buildbot at macports.org',
+ extraHeaders={'Reply-To': 'noreply at macports.org'},
# unless lookup is defined, users have to be configured locally
- # maybe a smarter function is needed, but lookup="" does it for now
- lookup="",
- mode=("problem"),
+ # maybe a smarter function is needed, but lookup='' does it for now
+ lookup='',
+ mode=('problem'),
builders=base_buildernames,
- #extraRecipients=["..."],
+ #extraRecipients=['...'],
#smtpPort=25,
- #relayhost="localhost",
+ #relayhost='localhost',
sendToInterestedUsers=True)
c['status'].append(mn)
mn = PortsMailNotifier(
- fromaddr="buildbot at macports.org",
- extraHeaders={"Reply-To": "noreply at macports.org"},
- lookup="",
- mode=("failing"),
+ fromaddr='buildbot at macports.org',
+ extraHeaders={'Reply-To': 'noreply at macports.org'},
+ lookup='',
+ mode=('failing'),
builders=portwatcher_buildernames,
- #extraRecipients=["..."],
+ #extraRecipients=['...'],
#smtpPort=25,
- #relayhost="localhost",
+ #relayhost='localhost',
sendToInterestedUsers=True,
portMessageFormatter=portWatcherMessageFormatter)
c['status'].append(mn)
# notifications about exceptions
mn = MailNotifier(
- fromaddr="buildbot at macports.org",
- extraHeaders={"Reply-To": "noreply at macports.org"},
- mode=("exception"),
- extraRecipients=["admin at macports.org"],
+ fromaddr='buildbot at macports.org',
+ extraHeaders={'Reply-To': 'noreply at macports.org'},
+ mode=('exception'),
+ extraRecipients=['admin at macports.org'],
sendToInterestedUsers=False)
c['status'].append(mn)
@@ -819,14 +819,14 @@
# installation's html.WebStatus home page (linked to the
# 'titleURL') and is embedded in the title of the waterfall HTML page.
-c['title'] = "MacPorts"
-c['titleURL'] = "https://www.macports.org/"
+c['title'] = 'MacPorts'
+c['titleURL'] = 'https://www.macports.org/'
c['buildbotURL'] = config['buildboturl']
c['status'].append(html.WebStatus(
http_port=config['httpport'],
authz=authz_cfg,
- changecommentlink=(r"#(\d+)", r"https://trac.macports.org/ticket/\1", r"Ticket \g<0>")
+ changecommentlink=(r'#(\d+)', r'https://trac.macports.org/ticket/\1', r'Ticket \g<0>')
))
c['revlink'] = util.RevlinkMatch([r'https://svn.macports.org/repository/macports/(.*)'],
@@ -838,7 +838,7 @@
c['db'] = {
# This specifies what database buildbot uses to store its state. You can leave
# this at its default for all but the largest installations.
- 'db_url' : "sqlite:///state.sqlite",
+ 'db_url' : 'sqlite:///state.sqlite',
}
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.macports.org/pipermail/macports-changes/attachments/20160929/ef93e483/attachment-0002.html>
More information about the macports-changes
mailing list