id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4,800 | indent.py | buildbot_buildbot/master/buildbot/spec/indent.py | #!/usr/bin/python
import re
import sys
spaces = re.compile("^ +")
def indent_file(fn: str) -> None:
lines = []
with open(fn) as f:
for line in f:
lines.append(line)
def getIndent(i):
res = spaces.match(lines[i])
if res is None:
return 0
return len(res.group(0))
def IndentBlock(i, numspaces):
initIndent = getIndent(i)
while i < len(lines) and initIndent <= getIndent(i):
lines[i] = " " * numspaces + lines[i]
i += 1
for i, line in enumerate(lines):
missingIndent = 4 - (getIndent(i) % 4)
if missingIndent != 4:
IndentBlock(i, missingIndent)
with open(fn, 'w') as f:
for line in lines:
f.write(line)
def main() -> None:
for fn in sys.argv[1:]:
indent_file(fn)
| 851 | Python | .py | 29 | 21.758621 | 60 | 0.552404 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,801 | remotetransfer.py | buildbot_buildbot/master/buildbot/process/remotetransfer.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
module for regrouping all FileWriterImpl and FileReaderImpl away from steps
"""
import os
import shutil
import tarfile
import tempfile
from io import BytesIO
from buildbot.util import bytes2unicode
from buildbot.util import unicode2bytes
from buildbot.worker.protocols import base
class FileWriter(base.FileWriterImpl):
"""
Helper class that acts as a file-object with write access
"""
def __init__(self, destfile, maxsize, mode):
# Create missing directories.
destfile = os.path.abspath(destfile)
dirname = os.path.dirname(destfile)
if not os.path.exists(dirname):
os.makedirs(dirname)
self.destfile = destfile
self.mode = mode
fd, self.tmpname = tempfile.mkstemp(dir=dirname, prefix='buildbot-transfer-')
self.fp = os.fdopen(fd, 'wb')
self.remaining = maxsize
def remote_write(self, data):
"""
Called from remote worker to write L{data} to L{fp} within boundaries
of L{maxsize}
@type data: C{string}
@param data: String of data to write
"""
data = unicode2bytes(data)
if self.remaining is not None:
if len(data) > self.remaining:
data = data[: self.remaining]
self.fp.write(data)
self.remaining = self.remaining - len(data)
else:
self.fp.write(data)
def remote_utime(self, accessed_modified):
os.utime(self.destfile, accessed_modified)
def remote_close(self):
"""
Called by remote worker to state that no more data will be transferred
"""
self.fp.close()
self.fp = None
# on windows, os.rename does not automatically unlink, so do it
# manually
if os.path.exists(self.destfile):
os.unlink(self.destfile)
os.rename(self.tmpname, self.destfile)
self.tmpname = None
if self.mode is not None:
os.chmod(self.destfile, self.mode)
def cancel(self):
# unclean shutdown, the file is probably truncated, so delete it
# altogether rather than deliver a corrupted file
fp = getattr(self, "fp", None)
if fp:
fp.close()
self.purge()
def purge(self):
if self.destfile and os.path.exists(self.destfile):
os.unlink(self.destfile)
if self.tmpname and os.path.exists(self.tmpname):
os.unlink(self.tmpname)
class DirectoryWriter(FileWriter):
"""
A DirectoryWriter is implemented as a FileWriter, with an added post-processing
step to unpack the archive, once the transfer has completed.
"""
def __init__(self, destroot, maxsize, compress, mode):
self.destroot = destroot
self.compress = compress
self.fd, self.tarname = tempfile.mkstemp(prefix='buildbot-transfer-')
os.close(self.fd)
super().__init__(self.tarname, maxsize, mode)
def remote_unpack(self):
"""
Called by remote worker to state that no more data will be transferred
"""
# Make sure remote_close is called, otherwise atomic rename won't happen
self.remote_close()
# Map configured compression to a TarFile setting
if self.compress == 'bz2':
mode = 'r|bz2'
elif self.compress == 'gz':
mode = 'r|gz'
else:
mode = 'r'
# Unpack archive and clean up after self
with tarfile.open(name=self.tarname, mode=mode) as archive:
if hasattr(tarfile, 'data_filter'):
archive.extractall(path=self.destroot, filter='data')
else:
archive.extractall(path=self.destroot)
os.remove(self.tarname)
def purge(self):
super().purge()
if os.path.isdir(self.destroot):
shutil.rmtree(self.destroot)
class FileReader(base.FileReaderImpl):
"""
Helper class that acts as a file-object with read access
"""
def __init__(self, fp):
self.fp = fp
def remote_read(self, maxlength):
"""
Called from remote worker to read at most L{maxlength} bytes of data
@type maxlength: C{integer}
@param maxlength: Maximum number of data bytes that can be returned
@return: Data read from L{fp}
@rtype: C{string} of bytes read from file
"""
if self.fp is None:
return ''
data = self.fp.read(maxlength)
return data
def remote_close(self):
"""
Called by remote worker to state that no more data will be transferred
"""
if self.fp is not None:
self.fp.close()
self.fp = None
class StringFileWriter(base.FileWriterImpl):
"""
FileWriter class that just puts received data into a buffer.
Used to upload a file from worker for inline processing rather than
writing into a file on master.
"""
def __init__(self):
self.buffer = ""
def remote_write(self, data):
self.buffer += bytes2unicode(data)
def remote_close(self):
pass
class StringFileReader(FileReader):
"""
FileWriter class that just buid send data from a string.
Used to download a file to worker from local string rather than first
writing into a file on master.
"""
def __init__(self, s):
s = unicode2bytes(s)
super().__init__(BytesIO(s))
| 6,168 | Python | .py | 164 | 30.073171 | 85 | 0.646043 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,802 | project.py | buildbot_buildbot/master/buildbot/process/project.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from typing import ClassVar
from typing import Sequence
from buildbot import util
from buildbot.config.checks import check_markdown_support
from buildbot.config.checks import check_param_str
from buildbot.config.checks import check_param_str_none
from buildbot.config.errors import error
class Project(util.ComparableMixin):
compare_attrs: ClassVar[Sequence[str]] = (
"name",
"slug",
"description",
"description_format",
)
def __init__(self, name, slug=None, description=None, description_format=None):
if slug is None:
slug = name
self.name = check_param_str(name, self.__class__, "name")
self.slug = check_param_str(slug, self.__class__, "slug")
self.description = check_param_str_none(description, self.__class__, "description")
self.description_format = check_param_str_none(
description_format, self.__class__, "description_format"
)
if self.description_format is None:
pass
elif self.description_format == "markdown":
if not check_markdown_support(self.__class__): # pragma: no cover
self.description_format = None
else:
error("project description format must be None or \"markdown\"")
self.description_format = None
| 2,039 | Python | .py | 45 | 39.644444 | 91 | 0.705734 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,803 | subunitlogobserver.py | buildbot_buildbot/master/buildbot/process/subunitlogobserver.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
# this used to be referenced here, so we keep a link for old time's sake
import buildbot.steps.subunit
SubunitShellCommand = buildbot.steps.subunit.SubunitShellCommand
| 876 | Python | .py | 17 | 50.352941 | 79 | 0.797897 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,804 | logobserver.py | buildbot_buildbot/master/buildbot/process/logobserver.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from typing import TYPE_CHECKING
from zope.interface import implementer
from buildbot import interfaces
if TYPE_CHECKING:
from buildbot.process.build import Build
from buildbot.process.log import Log
@implementer(interfaces.ILogObserver)
class LogObserver:
def setStep(self, step):
self.step = step
def setLog(self, loog):
loog.subscribe(self.gotData)
def gotData(self, stream, data):
if data is None:
self.finishReceived()
elif stream is None or stream == 'o':
self.outReceived(data)
elif stream == 'e':
self.errReceived(data)
elif stream == 'h':
self.headerReceived(data)
def finishReceived(self):
pass
def outReceived(self, data):
pass
def errReceived(self, data):
pass
def headerReceived(self, data):
pass
def logChunk(
self, build: "Build", step: interfaces.IBuildStep, log: "Log", channel: str, text: str
) -> None:
pass
class LogLineObserver(LogObserver):
stdoutDelimiter = "\n"
stderrDelimiter = "\n"
headerDelimiter = "\n"
def __init__(self):
super().__init__()
self.max_length = 16384
def setMaxLineLength(self, max_length):
"""
Set the maximum line length: lines longer than max_length are
dropped. Default is 16384 bytes. Use sys.maxint for effective
infinity.
"""
self.max_length = max_length
def _lineReceived(self, data, delimiter, funcReceived):
for line in data.rstrip().split(delimiter):
if len(line) > self.max_length:
continue
funcReceived(line)
def outReceived(self, data):
self._lineReceived(data, self.stdoutDelimiter, self.outLineReceived)
def errReceived(self, data):
self._lineReceived(data, self.stderrDelimiter, self.errLineReceived)
def headerReceived(self, data):
self._lineReceived(data, self.headerDelimiter, self.headerLineReceived)
def outLineReceived(self, line):
"""This will be called with complete stdout lines (not including the
delimiter). Override this in your observer."""
def errLineReceived(self, line):
"""This will be called with complete lines of stderr (not including
the delimiter). Override this in your observer."""
def headerLineReceived(self, line):
"""This will be called with complete lines of stderr (not including
the delimiter). Override this in your observer."""
class LineConsumerLogObserver(LogLineObserver):
def __init__(self, consumerFunction):
super().__init__()
self.generator = None
self.consumerFunction = consumerFunction
def feed(self, input):
# note that we defer starting the generator until the first bit of
# data, since the observer may be instantiated during configuration as
# well as for each execution of the step.
self.generator = self.consumerFunction()
next(self.generator)
# shortcut all remaining feed operations
self.feed = self.generator.send
self.feed(input)
def outLineReceived(self, line):
self.feed(('o', line))
def errLineReceived(self, line):
self.feed(('e', line))
def headerLineReceived(self, line):
self.feed(('h', line))
def finishReceived(self):
if self.generator:
self.generator.close()
class OutputProgressObserver(LogObserver):
length = 0
def __init__(self, name):
self.name = name
def gotData(self, stream, data):
if data:
self.length += len(data)
self.step.setProgress(self.name, self.length)
class BufferLogObserver(LogObserver):
def __init__(self, wantStdout=True, wantStderr=False):
super().__init__()
self.stdout = [] if wantStdout else None
self.stderr = [] if wantStderr else None
def outReceived(self, data):
if self.stdout is not None:
self.stdout.append(data)
def errReceived(self, data):
if self.stderr is not None:
self.stderr.append(data)
def _get(self, chunks):
if chunks is None or not chunks:
return ''
return ''.join(chunks)
def getStdout(self):
return self._get(self.stdout)
def getStderr(self):
return self._get(self.stderr)
| 5,149 | Python | .py | 131 | 32.21374 | 94 | 0.669815 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,805 | factory.py | buildbot_buildbot/master/buildbot/process/factory.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
from contextlib import contextmanager
from typing import TYPE_CHECKING
from typing import ClassVar
from typing import Sequence
from twisted.python import deprecate
from twisted.python import versions
from buildbot import interfaces
from buildbot import util
from buildbot.process import buildstep
from buildbot.process.build import Build
from buildbot.steps.download_secret_to_worker import DownloadSecretsToWorker
from buildbot.steps.download_secret_to_worker import RemoveWorkerFileSecret
from buildbot.steps.shell import Compile
from buildbot.steps.shell import Configure
from buildbot.steps.shell import PerlModuleTest
from buildbot.steps.shell import ShellCommand
from buildbot.steps.shell import Test
from buildbot.steps.source.cvs import CVS
from buildbot.steps.source.svn import SVN
if TYPE_CHECKING:
from buildbot.process.builder import Builder
# deprecated, use BuildFactory.addStep
@deprecate.deprecated(versions.Version("buildbot", 0, 8, 6))
def s(steptype, **kwargs):
# convenience function for master.cfg files, to create step
# specification tuples
return buildstep.get_factory_from_step_or_factory(steptype(**kwargs))
class BuildFactory(util.ComparableMixin):
"""
@cvar buildClass: class to use when creating builds
@type buildClass: L{buildbot.process.build.Build}
"""
buildClass: type[Build] = Build
useProgress = True
workdir = "build"
compare_attrs: ClassVar[Sequence[str]] = ('buildClass', 'steps', 'useProgress', 'workdir')
def __init__(self, steps=None):
self.steps = []
if steps:
self.addSteps(steps)
def newBuild(self, requests, builder: Builder) -> Build:
"""Create a new Build instance.
@param requests: a list of buildrequest dictionaries describing what is
to be built
"""
b = self.buildClass(requests, builder)
b.useProgress = self.useProgress
b.workdir = self.workdir
b.setStepFactories(self.steps)
return b
def addStep(self, step):
if not interfaces.IBuildStep.providedBy(
step
) and not interfaces.IBuildStepFactory.providedBy(step):
raise TypeError('step must be an instance of a BuildStep')
self.steps.append(buildstep.get_factory_from_step_or_factory(step))
def addSteps(self, steps, withSecrets=None):
if withSecrets is None:
withSecrets = []
if withSecrets:
self.addStep(DownloadSecretsToWorker(withSecrets))
for s in steps:
self.addStep(s)
if withSecrets:
self.addStep(RemoveWorkerFileSecret(withSecrets))
@contextmanager
def withSecrets(self, secrets):
self.addStep(DownloadSecretsToWorker(secrets))
yield self
self.addStep(RemoveWorkerFileSecret(secrets))
# BuildFactory subclasses for common build tools
class _DefaultCommand:
# Used to indicate a default command to the step.
pass
class GNUAutoconf(BuildFactory):
def __init__(
self,
source,
configure="./configure",
configureEnv=None,
configureFlags=None,
reconf=None,
compile=_DefaultCommand,
test=_DefaultCommand,
distcheck=_DefaultCommand,
):
if configureEnv is None:
configureEnv = {}
if configureFlags is None:
configureFlags = []
if compile is _DefaultCommand:
compile = ["make", "all"]
if test is _DefaultCommand:
test = ["make", "check"]
if distcheck is _DefaultCommand:
distcheck = ["make", "distcheck"]
super().__init__([source])
if reconf is True:
reconf = ["autoreconf", "-si"]
if reconf is not None:
self.addStep(ShellCommand(name="autoreconf", command=reconf, env=configureEnv))
if configure is not None:
# we either need to wind up with a string (which will be
# space-split), or with a list of strings (which will not). The
# list of strings is the preferred form.
if isinstance(configure, str):
if configureFlags:
assert " " not in configure # please use list instead
command = [configure, *configureFlags]
else:
command = configure
else:
assert isinstance(configure, (list, tuple))
command = configure + configureFlags
self.addStep(Configure(command=command, env=configureEnv))
if compile is not None:
self.addStep(Compile(command=compile, env=configureEnv))
if test is not None:
self.addStep(Test(command=test, env=configureEnv))
if distcheck is not None:
self.addStep(Test(command=distcheck, env=configureEnv))
class CPAN(BuildFactory):
def __init__(self, source, perl="perl"):
super().__init__([source])
self.addStep(Configure(command=[perl, "Makefile.PL"]))
self.addStep(Compile(command=["make"]))
self.addStep(PerlModuleTest(command=["make", "test"]))
# deprecated, use Distutils
@deprecate.deprecated(versions.Version("buildbot", 4, 0, 0))
class Distutils(BuildFactory):
def __init__(self, source, python="python", test=None):
super().__init__([source])
self.addStep(Compile(command=[python, "./setup.py", "build"]))
if test is not None:
self.addStep(Test(command=test))
class Trial(BuildFactory):
"""Build a python module that uses distutils and trial. Set 'tests' to
the module in which the tests can be found, or set useTestCaseNames=True
to always have trial figure out which tests to run (based upon which
files have been changed).
See docs/factories.xhtml for usage samples. Not all of the Trial
BuildStep options are available here, only the most commonly used ones.
To get complete access, you will need to create a custom
BuildFactory."""
trial = "trial"
randomly = False
recurse = False
def __init__(
self,
source,
buildpython=None,
trialpython=None,
trial=None,
testpath=".",
randomly=None,
recurse=None,
tests=None,
useTestCaseNames=False,
env=None,
):
super().__init__([source])
assert tests or useTestCaseNames, "must use one or the other"
if buildpython is None:
buildpython = ["python"]
if trialpython is None:
trialpython = []
if trial is not None:
self.trial = trial
if randomly is not None:
self.randomly = randomly
if recurse is not None:
self.recurse = recurse
from buildbot.steps.python_twisted import Trial
buildcommand = [*buildpython, "./setup.py", "build"]
self.addStep(Compile(command=buildcommand, env=env))
self.addStep(
Trial(
python=trialpython,
trial=self.trial,
testpath=testpath,
tests=tests,
testChanges=useTestCaseNames,
randomly=self.randomly,
recurse=self.recurse,
env=env,
)
)
# compatibility classes, will go away. Note that these only offer
# compatibility at the constructor level: if you have subclassed these
# factories, your subclasses are unlikely to still work correctly.
ConfigurableBuildFactory = BuildFactory
class BasicBuildFactory(GNUAutoconf):
# really a "GNU Autoconf-created tarball -in-CVS tree" builder
def __init__(
self,
cvsroot,
cvsmodule,
configure=None,
configureEnv=None,
compile="make all",
test="make check",
cvsCopy=False,
):
if configureEnv is None:
configureEnv = {}
mode = "full"
method = "clobber"
if cvsCopy:
method = "copy"
source = CVS(cvsroot=cvsroot, cvsmodule=cvsmodule, mode=mode, method=method)
super().__init__(
source, configure=configure, configureEnv=configureEnv, compile=compile, test=test
)
class QuickBuildFactory(BasicBuildFactory):
useProgress = False
def __init__(
self,
cvsroot,
cvsmodule,
configure=None,
configureEnv=None,
compile="make all",
test="make check",
cvsCopy=False,
):
if configureEnv is None:
configureEnv = {}
mode = "incremental"
source = CVS(cvsroot=cvsroot, cvsmodule=cvsmodule, mode=mode)
super().__init__(
source, configure=configure, configureEnv=configureEnv, compile=compile, test=test
)
class BasicSVN(GNUAutoconf):
def __init__(
self, svnurl, configure=None, configureEnv=None, compile="make all", test="make check"
):
if configureEnv is None:
configureEnv = {}
source = SVN(svnurl=svnurl, mode="incremental")
super().__init__(
source, configure=configure, configureEnv=configureEnv, compile=compile, test=test
)
| 9,965 | Python | .py | 257 | 30.867704 | 94 | 0.656868 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,806 | measured_service.py | buildbot_buildbot/master/buildbot/process/measured_service.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from buildbot.process import metrics
from buildbot.util.service import BuildbotServiceManager
class MeasuredBuildbotServiceManager(BuildbotServiceManager):
managed_services_name = "services"
@defer.inlineCallbacks
def reconfigServiceWithBuildbotConfig(self, new_config):
timer = metrics.Timer(f"{self.name}.reconfigServiceWithBuildbotConfig")
timer.start()
yield super().reconfigServiceWithBuildbotConfig(new_config)
metrics.MetricCountEvent.log(
f"num_{self.managed_services_name}", len(list(self)), absolute=True
)
timer.stop()
| 1,348 | Python | .py | 28 | 44.357143 | 79 | 0.773973 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,807 | properties.py | buildbot_buildbot/master/buildbot/process/properties.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import collections
import json
import re
import weakref
from typing import ClassVar
from typing import Sequence
from twisted.internet import defer
from twisted.python.components import registerAdapter
from zope.interface import implementer
from buildbot import config
from buildbot import util
from buildbot.interfaces import IProperties
from buildbot.interfaces import IRenderable
from buildbot.util import flatten
@implementer(IProperties)
class Properties(util.ComparableMixin):
"""
I represent a set of properties that can be interpolated into various
strings in buildsteps.
@ivar properties: dictionary mapping property values to tuples
(value, source), where source is a string identifying the source
of the property.
Objects of this class can be read like a dictionary -- in this case,
only the property value is returned.
As a special case, a property value of None is returned as an empty
string when used as a mapping.
"""
compare_attrs: ClassVar[Sequence[str]] = ('properties',)
def __init__(self, **kwargs):
"""
@param kwargs: initial property values (for testing)
"""
self.properties = {}
# Track keys which are 'runtime', and should not be
# persisted if a build is rebuilt
self.runtime = set()
self.build = None # will be set by the Build when starting
self._used_secrets = {}
if kwargs:
self.update(kwargs, "TEST")
self._master = None
self._sourcestamps = None
self._changes = None
@property
def master(self):
if self.build is not None:
return self.build.master
return self._master
@master.setter
def master(self, value):
self._master = value
@property
def sourcestamps(self):
if self.build is not None:
return [b.asDict() for b in self.build.getAllSourceStamps()]
elif self._sourcestamps is not None:
return self._sourcestamps
raise AttributeError('neither build nor _sourcestamps are set')
@sourcestamps.setter
def sourcestamps(self, value):
self._sourcestamps = value
def getSourceStamp(self, codebase=''):
for source in self.sourcestamps:
if source['codebase'] == codebase:
return source
return None
@property
def changes(self):
if self.build is not None:
return [c.asChDict() for c in self.build.allChanges()]
elif self._changes is not None:
return self._changes
raise AttributeError('neither build nor _changes are set')
@changes.setter
def changes(self, value):
self._changes = value
@property
def files(self):
if self.build is not None:
return self.build.allFiles()
files = []
# self.changes, not self._changes to raise AttributeError if unset
for chdict in self.changes:
files.extend(chdict['files'])
return files
@classmethod
def fromDict(cls, propDict):
properties = cls()
for name, (value, source) in propDict.items():
properties.setProperty(name, value, source)
return properties
def __getstate__(self):
d = self.__dict__.copy()
d['build'] = None
return d
def __setstate__(self, d):
self.__dict__ = d
if not hasattr(self, 'runtime'):
self.runtime = set()
def __contains__(self, name):
return name in self.properties
def __getitem__(self, name):
"""Just get the value for this property."""
rv = self.properties[name][0]
return rv
def __bool__(self):
return bool(self.properties)
def getPropertySource(self, name):
return self.properties[name][1]
def asList(self):
"""Return the properties as a sorted list of (name, value, source)"""
ret = sorted([(k, v[0], v[1]) for k, v in self.properties.items()])
return ret
def asDict(self):
"""Return the properties as a simple key:value dictionary,
properly unicoded"""
return self.properties.copy()
def __repr__(self):
return 'Properties(**' + repr(dict((k, v[0]) for k, v in self.properties.items())) + ')'
def update(self, dict, source, runtime=False):
"""Update this object from a dictionary, with an explicit source specified."""
for k, v in dict.items():
self.setProperty(k, v, source, runtime=runtime)
def updateFromProperties(self, other):
"""Update this object based on another object; the other object's"""
self.properties.update(other.properties)
self.runtime.update(other.runtime)
def updateFromPropertiesNoRuntime(self, other):
"""Update this object based on another object, but don't
include properties that were marked as runtime."""
for k, v in other.properties.items():
if k not in other.runtime:
self.properties[k] = v
# IProperties methods
def getProperty(self, name, default=None):
return self.properties.get(name, (default,))[0]
def hasProperty(self, name):
return name in self.properties
has_key = hasProperty
def setProperty(self, name, value, source, runtime=False):
name = util.bytes2unicode(name)
if not IRenderable.providedBy(value):
json.dumps(value) # Let the exception propagate ...
source = util.bytes2unicode(source)
self.properties[name] = (value, source)
if runtime:
self.runtime.add(name)
def getProperties(self):
return self
def getBuild(self):
return self.build
def render(self, value):
renderable = IRenderable(value)
return defer.maybeDeferred(renderable.getRenderingFor, self)
# as the secrets are used in the renderable, they can pretty much arrive anywhere
# in the log of state strings
# so we have the renderable record here which secrets are used that we must remove
def useSecret(self, secret_value, secret_name):
if secret_value.strip():
self._used_secrets[secret_value] = "<" + secret_name + ">"
# This method shall then be called to remove secrets from any text that could be logged
# somewhere and that could contain secrets
def cleanupTextFromSecrets(self, text):
# Better be correct and inefficient than efficient and wrong
secrets = self._used_secrets
for k in sorted(secrets, key=len, reverse=True):
text = text.replace(k, secrets[k])
return text
class PropertiesMixin:
"""
A mixin to add L{IProperties} methods to a class which does not implement
the full interface, only getProperties() function.
This is useful because L{IProperties} methods are often called on L{Build}
objects without first coercing them.
@ivar set_runtime_properties: the default value for the C{runtime}
parameter of L{setProperty}.
"""
set_runtime_properties = False
def getProperty(self, propname, default=None):
return self.getProperties().getProperty(propname, default)
def hasProperty(self, propname):
return self.getProperties().hasProperty(propname)
has_key = hasProperty
def setProperty(self, propname, value, source='Unknown', runtime=None):
# source is not optional in IProperties, but is optional here to avoid
# breaking user-supplied code that fails to specify a source
props = self.getProperties()
if runtime is None:
runtime = self.set_runtime_properties
props.setProperty(propname, value, source, runtime=runtime)
def render(self, value):
return self.getProperties().render(value)
@implementer(IRenderable)
class RenderableOperatorsMixin:
"""
Properties and Interpolate instances can be manipulated with standard operators.
"""
def __eq__(self, other):
return _OperatorRenderer(self, other, "==", lambda v1, v2: v1 == v2)
def __ne__(self, other):
return _OperatorRenderer(self, other, "!=", lambda v1, v2: v1 != v2)
def __lt__(self, other):
return _OperatorRenderer(self, other, "<", lambda v1, v2: v1 < v2)
def __le__(self, other):
return _OperatorRenderer(self, other, "<=", lambda v1, v2: v1 <= v2)
def __gt__(self, other):
return _OperatorRenderer(self, other, ">", lambda v1, v2: v1 > v2)
def __ge__(self, other):
return _OperatorRenderer(self, other, ">=", lambda v1, v2: v1 >= v2)
def __add__(self, other):
return _OperatorRenderer(self, other, "+", lambda v1, v2: v1 + v2)
def __sub__(self, other):
return _OperatorRenderer(self, other, "-", lambda v1, v2: v1 - v2)
def __mul__(self, other):
return _OperatorRenderer(self, other, "*", lambda v1, v2: v1 * v2)
def __truediv__(self, other):
return _OperatorRenderer(self, other, "/", lambda v1, v2: v1 / v2)
def __floordiv__(self, other):
return _OperatorRenderer(self, other, "//", lambda v1, v2: v1 // v2)
def __mod__(self, other):
return _OperatorRenderer(self, other, "%", lambda v1, v2: v1 % v2)
# we cannot use this trick to overload the 'in' operator, as python will force the result
# of __contains__ to a boolean, forcing it to True all the time
# so we mimic sqlalchemy and make a in_ method
def in_(self, other):
return _OperatorRenderer(self, other, "in", lambda v1, v2: v1 in v2)
def getRenderingFor(self, iprops: IProperties) -> defer.Deferred:
raise NotImplementedError
@implementer(IRenderable)
class _OperatorRenderer(RenderableOperatorsMixin, util.ComparableMixin):
"""
An instance of this class renders a comparison given by a operator
function with v1 and v2
"""
compare_attrs: ClassVar[Sequence[str]] = ('fn',)
def __init__(self, v1, v2, cstr, comparator):
self.v1 = v1
self.v2 = v2
self.comparator = comparator
self.cstr = cstr
@defer.inlineCallbacks
def getRenderingFor(self, props):
v1, v2 = yield props.render((self.v1, self.v2))
return self.comparator(v1, v2)
def __repr__(self):
return f'{self.v1!r} {self.cstr!s} {self.v2!r}'
class _PropertyMap:
"""
Privately-used mapping object to implement WithProperties' substitutions,
including the rendering of None as ''.
"""
colon_minus_re = re.compile(r"(.*):-(.*)")
colon_tilde_re = re.compile(r"(.*):~(.*)")
colon_plus_re = re.compile(r"(.*):\+(.*)")
def __init__(self, properties):
# use weakref here to avoid a reference loop
self.properties = weakref.ref(properties)
self.temp_vals = {}
def __getitem__(self, key):
properties = self.properties()
assert properties is not None
def colon_minus(mo):
# %(prop:-repl)s
# if prop exists, use it; otherwise, use repl
prop, repl = mo.group(1, 2)
if prop in self.temp_vals:
return self.temp_vals[prop]
elif prop in properties:
return properties[prop]
return repl
def colon_tilde(mo):
# %(prop:~repl)s
# if prop exists and is true (nonempty), use it; otherwise, use
# repl
prop, repl = mo.group(1, 2)
if self.temp_vals.get(prop):
return self.temp_vals[prop]
elif prop in properties and properties[prop]: # noqa: RUF019
return properties[prop]
return repl
def colon_plus(mo):
# %(prop:+repl)s
# if prop exists, use repl; otherwise, an empty string
prop, repl = mo.group(1, 2)
if prop in properties or prop in self.temp_vals:
return repl
return ''
for regexp, fn in [
(self.colon_minus_re, colon_minus),
(self.colon_tilde_re, colon_tilde),
(self.colon_plus_re, colon_plus),
]:
mo = regexp.match(key)
if mo:
rv = fn(mo)
break
else:
# If explicitly passed as a kwarg, use that,
# otherwise, use the property value.
if key in self.temp_vals:
rv = self.temp_vals[key]
else:
rv = properties[key]
# translate 'None' to an empty string
if rv is None:
rv = ''
return rv
def add_temporary_value(self, key, val):
"Add a temporary value (to support keyword arguments to WithProperties)"
self.temp_vals[key] = val
@implementer(IRenderable)
class WithProperties(util.ComparableMixin):
"""
This is a marker class, used fairly widely to indicate that we
want to interpolate build properties.
"""
compare_attrs: ClassVar[Sequence[str]] = ('fmtstring', 'args', 'lambda_subs')
def __init__(self, fmtstring, *args, **lambda_subs):
self.fmtstring = fmtstring
self.args = args
if not self.args:
self.lambda_subs = lambda_subs
for key, val in self.lambda_subs.items():
if not callable(val):
raise ValueError(f'Value for lambda substitution "{key}" must be callable.')
elif lambda_subs:
raise ValueError(
'WithProperties takes either positional or keyword substitutions, not both.'
)
def getRenderingFor(self, build):
pmap = _PropertyMap(build.getProperties())
if self.args:
strings = []
for name in self.args:
strings.append(pmap[name])
s = self.fmtstring % tuple(strings)
else:
for k, v in self.lambda_subs.items():
pmap.add_temporary_value(k, v(build))
s = self.fmtstring % pmap
return s
class _NotHasKey(util.ComparableMixin):
"""A marker for missing ``hasKey`` parameter.
To withstand ``deepcopy``, ``reload`` and pickle serialization round trips,
check it with ``==`` or ``!=``.
"""
compare_attrs: ClassVar[Sequence[str]] = ()
# any instance of _NotHasKey would do, yet we don't want to create and delete
# them all the time
_notHasKey = _NotHasKey()
@implementer(IRenderable)
class _Lookup(util.ComparableMixin):
compare_attrs: ClassVar[Sequence[str]] = (
'value',
'index',
'default',
'defaultWhenFalse',
'hasKey',
'elideNoneAs',
)
def __init__(
self, value, index, default=None, defaultWhenFalse=True, hasKey=_notHasKey, elideNoneAs=None
):
self.value = value
self.index = index
self.default = default
self.defaultWhenFalse = defaultWhenFalse
self.hasKey = hasKey
self.elideNoneAs = elideNoneAs
def __repr__(self):
parts = [repr(self.index)]
if self.default is not None:
parts.append(f', default={self.default!r}')
if not self.defaultWhenFalse:
parts.append(', defaultWhenFalse=False')
if self.hasKey != _notHasKey:
parts.append(f', hasKey={self.hasKey!r}')
if self.elideNoneAs is not None:
parts.append(f', elideNoneAs={self.elideNoneAs!r}')
parts_str = ''.join(parts)
return f'_Lookup({self.value!r}, {parts_str})'
@defer.inlineCallbacks
def getRenderingFor(self, build):
value = build.render(self.value)
index = build.render(self.index)
value, index = yield defer.gatherResults([value, index])
if index not in value:
rv = yield build.render(self.default)
else:
if self.defaultWhenFalse:
rv = yield build.render(value[index])
if not rv:
rv = yield build.render(self.default)
elif self.hasKey != _notHasKey:
rv = yield build.render(self.hasKey)
elif self.hasKey != _notHasKey:
rv = yield build.render(self.hasKey)
else:
rv = yield build.render(value[index])
if rv is None:
rv = yield build.render(self.elideNoneAs)
return rv
def _getInterpolationList(fmtstring):
# TODO: Verify that no positional substitutions are requested
dd = collections.defaultdict(str)
fmtstring % dd
return list(dd)
@implementer(IRenderable)
class _PropertyDict:
def getRenderingFor(self, build):
return build.getProperties()
_thePropertyDict = _PropertyDict()
@implementer(IRenderable)
class _WorkerPropertyDict:
def getRenderingFor(self, build):
return build.getBuild().getWorkerInfo()
_theWorkerPropertyDict = _WorkerPropertyDict()
@implementer(IRenderable)
class _SecretRenderer:
def __init__(self, secret_name):
self.secret_name = secret_name
@defer.inlineCallbacks
def getRenderingFor(self, properties):
secretsSrv = properties.master.namedServices.get("secrets")
if not secretsSrv:
error_message = (
"secrets service not started, need to configure"
" SecretManager in c['services'] to use 'secrets'"
"in Interpolate"
)
raise KeyError(error_message)
credsservice = properties.master.namedServices['secrets']
secret_detail = yield credsservice.get(self.secret_name)
if secret_detail is None:
raise KeyError(f"secret key {self.secret_name} is not found in any provider")
properties.useSecret(secret_detail.value, self.secret_name)
return secret_detail.value
class Secret(_SecretRenderer):
def __repr__(self):
return f"Secret({self.secret_name})"
class _SecretIndexer:
def __contains__(self, password):
return True
def __getitem__(self, password):
return _SecretRenderer(password)
@implementer(IRenderable)
class _SourceStampDict(util.ComparableMixin):
compare_attrs: ClassVar[Sequence[str]] = ('codebase',)
def __init__(self, codebase):
self.codebase = codebase
def getRenderingFor(self, props):
ss = props.getSourceStamp(self.codebase)
if ss:
return ss
return {}
@implementer(IRenderable)
class _Lazy(util.ComparableMixin):
compare_attrs: ClassVar[Sequence[str]] = ('value',)
def __init__(self, value):
self.value = value
def getRenderingFor(self, build):
return self.value
def __repr__(self):
return f'_Lazy({self.value!r})'
@implementer(IRenderable)
class Interpolate(RenderableOperatorsMixin, util.ComparableMixin):
"""
This is a marker class, used fairly widely to indicate that we
want to interpolate build properties.
"""
compare_attrs: ClassVar[Sequence[str]] = ('fmtstring', 'args', 'kwargs')
identifier_re = re.compile(r'^[\w._-]*$')
def __init__(self, fmtstring, *args, **kwargs):
self.fmtstring = fmtstring
self.args = args
self.kwargs = kwargs
if self.args and self.kwargs:
config.error("Interpolate takes either positional or keyword substitutions, not both.")
if not self.args:
self.interpolations = {}
self._parse(fmtstring)
def __repr__(self):
if self.args:
return f'Interpolate({self.fmtstring!r}, *{self.args!r})'
elif self.kwargs:
return f'Interpolate({self.fmtstring!r}, **{self.kwargs!r})'
return f'Interpolate({self.fmtstring!r})'
def _parse_substitution_prop(self, arg):
try:
prop, repl = arg.split(":", 1)
except ValueError:
prop = arg
repl = None
if not Interpolate.identifier_re.match(prop):
config.error(f"Property name must be alphanumeric for prop Interpolation '{arg}'")
prop = None
repl = None
return _thePropertyDict, prop, repl
def _parse_substitution_secret(self, arg):
try:
secret, repl = arg.split(":", 1)
except ValueError:
secret = arg
repl = None
return _SecretIndexer(), secret, repl
def _parse_substitution_src(self, arg):
# TODO: Handle changes
try:
codebase, attr, repl = arg.split(":", 2)
except ValueError:
try:
codebase, attr = arg.split(":", 1)
repl = None
except ValueError:
config.error(
"Must specify both codebase and attribute for " f"src Interpolation '{arg}'"
)
return {}, None, None
if not Interpolate.identifier_re.match(codebase):
config.error(f"Codebase must be alphanumeric for src Interpolation '{arg}'")
codebase = attr = repl = None
if not Interpolate.identifier_re.match(attr):
config.error(f"Attribute must be alphanumeric for src Interpolation '{arg}'")
codebase = attr = repl = None
return _SourceStampDict(codebase), attr, repl
def _parse_substitution_worker(self, arg):
try:
prop, repl = arg.split(":", 1)
except ValueError:
prop = arg
repl = None
return _theWorkerPropertyDict, prop, repl
def _parse_substitution_kw(self, arg):
try:
kw, repl = arg.split(":", 1)
except ValueError:
kw = arg
repl = None
if not Interpolate.identifier_re.match(kw):
config.error(f"Keyword must be alphanumeric for kw Interpolation '{arg}'")
kw = repl = None
return _Lazy(self.kwargs), kw, repl
_substitutions = {
"prop": _parse_substitution_prop,
"secret": _parse_substitution_secret,
"src": _parse_substitution_src,
"worker": _parse_substitution_worker,
"kw": _parse_substitution_kw,
}
def _parseSubstitution(self, fmt):
try:
key, arg = fmt.split(":", 1)
except ValueError:
config.error(f"invalid Interpolate substitution without selector '{fmt}'")
return None
fn = self._substitutions.get(key, None)
if not fn:
config.error(f"invalid Interpolate selector '{key}'")
return None
return fn(self, arg)
@staticmethod
def _splitBalancedParen(delim, arg):
parenCount = 0
for i, val in enumerate(arg):
if val == "(":
parenCount += 1
if val == ")":
parenCount -= 1
if parenCount < 0:
raise ValueError
if parenCount == 0 and val == delim:
return arg[0:i], arg[i + 1 :]
return arg
def _parseColon_minus(self, d, kw, repl):
return _Lookup(
d, kw, default=Interpolate(repl, **self.kwargs), defaultWhenFalse=False, elideNoneAs=''
)
def _parseColon_tilde(self, d, kw, repl):
return _Lookup(
d, kw, default=Interpolate(repl, **self.kwargs), defaultWhenFalse=True, elideNoneAs=''
)
def _parseColon_plus(self, d, kw, repl):
return _Lookup(
d,
kw,
hasKey=Interpolate(repl, **self.kwargs),
default='',
defaultWhenFalse=False,
elideNoneAs='',
)
def _parseColon_ternary(self, d, kw, repl, defaultWhenFalse=False):
delim = repl[0]
if delim == '(':
config.error("invalid Interpolate ternary delimiter '('")
return None
try:
truePart, falsePart = self._splitBalancedParen(delim, repl[1:])
except ValueError:
config.error(
f"invalid Interpolate ternary expression '{repl[1:]}' "
f"with delimiter '{repl[0]}'"
)
return None
return _Lookup(
d,
kw,
hasKey=Interpolate(truePart, **self.kwargs),
default=Interpolate(falsePart, **self.kwargs),
defaultWhenFalse=defaultWhenFalse,
elideNoneAs='',
)
def _parseColon_ternary_hash(self, d, kw, repl):
return self._parseColon_ternary(d, kw, repl, defaultWhenFalse=True)
def _parse(self, fmtstring):
keys = _getInterpolationList(fmtstring)
for key in keys:
if key not in self.interpolations:
d, kw, repl = self._parseSubstitution(key)
if repl is None:
repl = '-'
for pattern, fn in [
("-", self._parseColon_minus),
("~", self._parseColon_tilde),
("+", self._parseColon_plus),
("?", self._parseColon_ternary),
("#?", self._parseColon_ternary_hash),
]:
junk, matches, tail = repl.partition(pattern)
if not junk and matches:
self.interpolations[key] = fn(d, kw, tail)
break
if key not in self.interpolations:
config.error(f"invalid Interpolate default type '{repl[0]}'")
def getRenderingFor(self, build):
props = build.getProperties()
if self.args:
d = props.render(self.args)
d.addCallback(lambda args: self.fmtstring % tuple(args))
else:
d = props.render(self.interpolations)
d.addCallback(lambda res: self.fmtstring % res)
return d
@implementer(IRenderable)
class Property(RenderableOperatorsMixin, util.ComparableMixin):
"""
An instance of this class renders a property of a build.
"""
compare_attrs: ClassVar[Sequence[str]] = ('key', 'default', 'defaultWhenFalse')
def __init__(self, key, default=None, defaultWhenFalse=True):
"""
@param key: Property to render.
@param default: Value to use if property isn't set.
@param defaultWhenFalse: When true (default), use default value
if property evaluates to False. Otherwise, use default value
only when property isn't set.
"""
self.key = key
self.default = default
self.defaultWhenFalse = defaultWhenFalse
def __repr__(self):
return f"Property({self.key})"
def getRenderingFor(self, props):
if self.defaultWhenFalse:
d = props.render(props.getProperty(self.key))
@d.addCallback
def checkDefault(rv):
if rv:
return rv
return props.render(self.default)
return d
if props.hasProperty(self.key):
return props.render(props.getProperty(self.key))
return props.render(self.default)
@implementer(IRenderable)
class FlattenList(RenderableOperatorsMixin, util.ComparableMixin):
"""
An instance of this class flattens all nested lists in a list
"""
compare_attrs: ClassVar[Sequence[str]] = ('nestedlist',)
def __init__(self, nestedlist, types=(list, tuple)):
"""
@param nestedlist: a list of values to render
@param types: only flatten these types. defaults to (list, tuple)
"""
self.nestedlist = nestedlist
self.types = types
def getRenderingFor(self, props):
d = props.render(self.nestedlist)
@d.addCallback
def flat(r):
return flatten(r, self.types)
return d
def __add__(self, b):
if isinstance(b, FlattenList):
b = b.nestedlist
return FlattenList(self.nestedlist + b, self.types)
@implementer(IRenderable)
class _Renderer(util.ComparableMixin):
compare_attrs: ClassVar[Sequence[str]] = ('fn',)
def __init__(self, fn):
self.fn = fn
self.args = []
self.kwargs = {}
def withArgs(self, *args, **kwargs):
new_renderer = _Renderer(self.fn)
new_renderer.args = self.args + list(args)
new_renderer.kwargs = dict(self.kwargs)
new_renderer.kwargs.update(kwargs)
return new_renderer
@defer.inlineCallbacks
def getRenderingFor(self, props):
args = yield props.render(self.args)
kwargs = yield props.render(self.kwargs)
# We allow the renderer fn to return a renderable for convenience
result = yield self.fn(props, *args, **kwargs)
result = yield props.render(result)
return result
def __repr__(self):
if self.args or self.kwargs:
return f'renderer({self.fn!r}, args={self.args!r}, kwargs={self.kwargs!r})'
return f'renderer({self.fn!r})'
def renderer(fn):
return _Renderer(fn)
@implementer(IRenderable)
class _DefaultRenderer:
"""
Default IRenderable adaptor. Calls .getRenderingFor if available, otherwise
returns argument unchanged.
"""
def __init__(self, value):
try:
self.renderer = value.getRenderingFor
except AttributeError:
self.renderer = lambda _: value
def getRenderingFor(self, build):
return self.renderer(build)
registerAdapter(_DefaultRenderer, object, IRenderable)
@implementer(IRenderable)
class _ListRenderer:
"""
List IRenderable adaptor. Maps Build.render over the list.
"""
def __init__(self, value):
self.value = value
def getRenderingFor(self, build):
return defer.gatherResults([build.render(e) for e in self.value])
registerAdapter(_ListRenderer, list, IRenderable)
@implementer(IRenderable)
class _TupleRenderer:
"""
Tuple IRenderable adaptor. Maps Build.render over the tuple.
"""
def __init__(self, value):
self.value = value
def getRenderingFor(self, build):
d = defer.gatherResults([build.render(e) for e in self.value])
d.addCallback(tuple)
return d
registerAdapter(_TupleRenderer, tuple, IRenderable)
@implementer(IRenderable)
class _DictRenderer:
"""
Dict IRenderable adaptor. Maps Build.render over the keys and values in the dict.
"""
def __init__(self, value):
self.value = _ListRenderer([_TupleRenderer((k, v)) for k, v in value.items()])
def getRenderingFor(self, build):
d = self.value.getRenderingFor(build)
d.addCallback(dict)
return d
registerAdapter(_DictRenderer, dict, IRenderable)
@implementer(IRenderable)
class Transform:
"""
A renderable that combines other renderables' results using an arbitrary function.
"""
def __init__(self, function, *args, **kwargs):
if not callable(function) and not IRenderable.providedBy(function):
config.error("function given to Transform neither callable nor renderable")
self._function = function
self._args = args
self._kwargs = kwargs
@defer.inlineCallbacks
def getRenderingFor(self, iprops):
rfunction = yield iprops.render(self._function)
rargs = yield iprops.render(self._args)
rkwargs = yield iprops.render(self._kwargs)
return rfunction(*rargs, **rkwargs)
| 32,103 | Python | .py | 802 | 31.285536 | 100 | 0.62137 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,808 | buildrequest.py | buildbot_buildbot/master/buildbot/process/buildrequest.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
import calendar
from typing import TYPE_CHECKING
from twisted.internet import defer
from buildbot.data import resultspec
from buildbot.process import properties
from buildbot.process.results import SKIPPED
if TYPE_CHECKING:
from buildbot.db.buildrequests import BuildRequestModel
from buildbot.master import BuildMaster
class BuildRequestCollapser:
# brids is a list of the new added buildrequests id
# This class is called before generated the 'new' event for the
# buildrequest
# Before adding new buildset/buildrequests, we must examine each unclaimed
# buildrequest.
# EG:
# 1. get the list of all unclaimed buildrequests:
# - We must exclude all buildsets which have at least 1 claimed buildrequest
# 2. For each unclaimed buildrequests, if compatible with the new request
# (sourcestamps match, except for revision) Then:
# 2.1. claim it
# 2.2. complete it with result SKIPPED
def __init__(self, master, brids):
self.master = master
self.brids = brids
@defer.inlineCallbacks
def _getUnclaimedBrs(self, builderid):
# Retrieve the list of Brs for all unclaimed builds
unclaim_brs = yield self.master.data.get(
('builders', builderid, 'buildrequests'), [resultspec.Filter('claimed', 'eq', [False])]
)
# sort by submitted_at, so the first is the oldest
unclaim_brs.sort(key=lambda brd: brd['submitted_at'])
return unclaim_brs
@defer.inlineCallbacks
def collapse(self):
brids_to_collapse = set()
for brid in self.brids:
# Get the BuildRequest object
br = yield self.master.data.get(('buildrequests', brid))
# Retrieve the buildername
builderid = br['builderid']
bldrdict = yield self.master.data.get(('builders', builderid))
# Get the builder object
bldr = self.master.botmaster.builders.get(bldrdict['name'])
if not bldr:
continue
# Get the Collapse BuildRequest function (from the configuration)
collapseRequestsFn = bldr.getCollapseRequestsFn()
unclaim_brs = yield self._getUnclaimedBrs(builderid)
# short circuit if there is no merging to do
if not collapseRequestsFn or not unclaim_brs:
continue
for unclaim_br in unclaim_brs:
if unclaim_br['buildrequestid'] == br['buildrequestid']:
continue
canCollapse = yield collapseRequestsFn(self.master, bldr, br, unclaim_br)
if canCollapse is True:
brids_to_collapse.add(unclaim_br['buildrequestid'])
collapsed_brids = []
for brid in brids_to_collapse:
claimed = yield self.master.data.updates.claimBuildRequests([brid])
if claimed:
yield self.master.data.updates.completeBuildRequests([brid], SKIPPED)
collapsed_brids.append(brid)
return collapsed_brids
class TempSourceStamp:
# temporary fake sourcestamp
ATTRS = ('branch', 'revision', 'repository', 'project', 'codebase')
PATCH_ATTRS = (
('patch_level', 'level'),
('patch_body', 'body'),
('patch_subdir', 'subdir'),
('patch_author', 'author'),
('patch_comment', 'comment'),
)
changes: list[TempChange]
def __init__(self, ssdict):
self._ssdict = ssdict
def __getattr__(self, attr):
patch = self._ssdict.get('patch')
if attr == 'patch':
if patch:
return (patch['level'], patch['body'], patch['subdir'])
return None
elif attr == 'patch_info':
if patch:
return (patch['author'], patch['comment'])
return (None, None)
elif attr in self.ATTRS or attr == 'ssid':
return self._ssdict[attr]
raise AttributeError(attr)
def asSSDict(self):
return self._ssdict
def asDict(self):
# This return value should match the kwargs to
# SourceStampsConnectorComponent.findSourceStampId
result = {}
for attr in self.ATTRS:
result[attr] = self._ssdict.get(attr)
patch = self._ssdict.get('patch') or {}
for patch_attr, attr in self.PATCH_ATTRS:
result[patch_attr] = patch.get(attr)
assert all(
isinstance(val, (str, int, bytes, type(None))) for attr, val in result.items()
), result
return result
class TempChange:
# temporary fake change
def __init__(self, d):
self._chdict = d
def __getattr__(self, attr):
if attr == 'who':
return self._chdict['author']
elif attr == 'properties':
return properties.Properties.fromDict(self._chdict['properties'])
return self._chdict[attr]
def asChDict(self):
return self._chdict
class BuildRequest:
"""
A rolled-up encapsulation of all of the data relevant to a build request.
This class is used by the C{nextBuild} and C{collapseRequests} configuration
parameters, as well as in starting a build. Construction of a BuildRequest
object is a heavyweight process involving a lot of database queries, so
it should be avoided where possible. See bug #1894.
@type reason: string
@ivar reason: the reason this Build is being requested. Schedulers provide
this, but for forced builds the user requesting the build will provide a
string. It comes from the buildsets table.
@type properties: L{properties.Properties}
@ivar properties: properties that should be applied to this build, taken
from the buildset containing this build request
@ivar submittedAt: a timestamp (seconds since epoch) when this request was
submitted to the Builder. This is used by the CVS step to compute a
checkout timestamp, as well as by the master to prioritize build requests
from oldest to newest.
@ivar buildername: name of the requested builder
@ivar priority: request priority
@ivar id: build request ID
@ivar bsid: ID of the parent buildset
"""
submittedAt: None | int = None
sources: dict[str, TempSourceStamp] = {}
id: int
bsid: int
buildername: str
builderid: int
priority: int
master: BuildMaster
waitedFor: int
reason: str
properties: properties.Properties
@classmethod
def fromBrdict(cls, master, brdict: BuildRequestModel):
"""
Construct a new L{BuildRequest} from a L{BuildRequestModel} as returned by
L{BuildRequestsConnectorComponent.getBuildRequest}.
This method uses a cache, which may result in return of stale objects;
for the most up-to-date information, use the database connector
methods.
@param master: current build master
@param brdict: build request dictionary
@returns: L{BuildRequest}, via Deferred
"""
cache = master.caches.get_cache("BuildRequests", cls._make_br)
return cache.get(brdict.buildrequestid, brdict=brdict, master=master)
@classmethod
@defer.inlineCallbacks
def _make_br(cls, brid: int, brdict: BuildRequestModel, master: BuildMaster):
buildrequest = cls()
buildrequest.id = brid
buildrequest.bsid = brdict.buildsetid
buildrequest.buildername = brdict.buildername
buildrequest.builderid = brdict.builderid
buildrequest.priority = brdict.priority
dt = brdict.submitted_at
if dt:
buildrequest.submittedAt = calendar.timegm(dt.utctimetuple())
buildrequest.master = master
buildrequest.waitedFor = brdict.waited_for
# fetch the buildset to get the reason
buildset = yield master.db.buildsets.getBuildset(brdict.buildsetid)
assert buildset # schema should guarantee this
buildrequest.reason = buildset.reason
# fetch the buildset properties, and convert to Properties
buildset_properties = yield master.db.buildsets.getBuildsetProperties(brdict.buildsetid)
buildrequest.properties = properties.Properties.fromDict(buildset_properties)
# make a fake sources dict (temporary)
bsdata = yield master.data.get(('buildsets', str(buildrequest.bsid)))
assert bsdata['sourcestamps'], "buildset must have at least one sourcestamp"
buildrequest.sources = {}
for ssdata in bsdata['sourcestamps']:
ss = buildrequest.sources[ssdata['codebase']] = TempSourceStamp(ssdata)
changes = yield master.data.get(("sourcestamps", ss.ssid, "changes"))
ss.changes = [TempChange(change) for change in changes]
return buildrequest
@staticmethod
def filter_buildset_props_for_collapsing(bs_props):
return {
name: value
for name, (value, source) in bs_props.items()
if name != 'scheduler' and source == 'Scheduler'
}
@staticmethod
@defer.inlineCallbacks
def canBeCollapsed(master, new_br, old_br):
"""
Returns true if both buildrequest can be merged, via Deferred.
This implements Buildbot's default collapse strategy.
"""
# short-circuit: if these are for the same buildset, collapse away
if new_br['buildsetid'] == old_br['buildsetid']:
return True
# the new buildrequest must actually be newer than the old build request, otherwise we
# may end up with situations where two build requests submitted at the same time will
# cancel each other.
if new_br['buildrequestid'] < old_br['buildrequestid']:
return False
# get the buildsets for each buildrequest
selfBuildsets = yield master.data.get(('buildsets', str(new_br['buildsetid'])))
otherBuildsets = yield master.data.get(('buildsets', str(old_br['buildsetid'])))
# extract sourcestamps, as dictionaries by codebase
selfSources = dict((ss['codebase'], ss) for ss in selfBuildsets['sourcestamps'])
otherSources = dict((ss['codebase'], ss) for ss in otherBuildsets['sourcestamps'])
# if the sets of codebases do not match, we can't collapse
if set(selfSources) != set(otherSources):
return False
for c, selfSS in selfSources.items():
otherSS = otherSources[c]
if selfSS['repository'] != otherSS['repository']:
return False
if selfSS['branch'] != otherSS['branch']:
return False
if selfSS['project'] != otherSS['project']:
return False
# anything with a patch won't be collapsed
if selfSS['patch'] or otherSS['patch']:
return False
# get changes & compare
selfChanges = yield master.data.get(('sourcestamps', selfSS['ssid'], 'changes'))
otherChanges = yield master.data.get(('sourcestamps', otherSS['ssid'], 'changes'))
# if both have changes, proceed, else fail - if no changes check revision instead
if selfChanges and otherChanges:
continue
if selfChanges and not otherChanges:
return False
if not selfChanges and otherChanges:
return False
# else check revisions
if selfSS['revision'] != otherSS['revision']:
return False
# don't collapse build requests if the properties injected by the scheduler differ
new_bs_props = yield master.data.get(('buildsets', str(new_br['buildsetid']), 'properties'))
old_bs_props = yield master.data.get(('buildsets', str(old_br['buildsetid']), 'properties'))
new_bs_props = BuildRequest.filter_buildset_props_for_collapsing(new_bs_props)
old_bs_props = BuildRequest.filter_buildset_props_for_collapsing(old_bs_props)
if new_bs_props != old_bs_props:
return False
return True
def mergeSourceStampsWith(self, others):
"""Returns one merged sourcestamp for every codebase"""
# get all codebases from all requests
all_codebases = set(self.sources)
for other in others:
all_codebases |= set(other.sources)
all_merged_sources = {}
# walk along the codebases
for codebase in all_codebases:
all_sources = []
if codebase in self.sources:
all_sources.append(self.sources[codebase])
for other in others:
if codebase in other.sources:
all_sources.append(other.sources[codebase])
assert all_sources, "each codebase should have at least one sourcestamp"
# TODO: select the sourcestamp that best represents the merge,
# preferably the latest one. This used to be accomplished by
# looking at changeids and picking the highest-numbered.
all_merged_sources[codebase] = all_sources[-1]
return list(all_merged_sources.values())
def mergeReasons(self, others):
"""Return a reason for the merged build request."""
reasons = []
for req in [self, *others]:
if req.reason and req.reason not in reasons:
reasons.append(req.reason)
return ", ".join(reasons)
def getSubmitTime(self):
return self.submittedAt
| 14,266 | Python | .py | 305 | 37.754098 | 100 | 0.654974 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,809 | results.py | buildbot_buildbot/master/buildbot/process/results.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
ALL_RESULTS = list(range(7))
SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION, RETRY, CANCELLED = ALL_RESULTS
Results = ["success", "warnings", "failure", "skipped", "exception", "retry", "cancelled"]
MultipleResults = [
"successes",
"warnings",
"failures",
"skipped",
"exceptions",
"retries",
"cancelled",
]
def statusToString(status, count=1):
if status is None:
return "not finished"
if status < 0 or status >= len(Results):
return "Invalid status"
if count > 1:
return MultipleResults[status]
return Results[status]
def worst_status(a, b):
# SKIPPED > SUCCESS > WARNINGS > FAILURE > EXCEPTION > RETRY > CANCELLED
# CANCELLED needs to be considered the worst.
for s in (CANCELLED, RETRY, EXCEPTION, FAILURE, WARNINGS, SUCCESS, SKIPPED):
if s in (a, b):
return s
return None
def computeResultAndTermination(obj, result, previousResult):
possible_overall_result = result
terminate = False
if result == FAILURE:
if not obj.flunkOnFailure:
possible_overall_result = SUCCESS
if obj.warnOnFailure:
possible_overall_result = WARNINGS
if obj.flunkOnFailure:
possible_overall_result = FAILURE
if obj.haltOnFailure:
terminate = True
elif result == WARNINGS:
if not obj.warnOnWarnings:
possible_overall_result = SUCCESS
else:
possible_overall_result = WARNINGS
if obj.flunkOnWarnings:
possible_overall_result = FAILURE
elif result in (EXCEPTION, RETRY, CANCELLED):
terminate = True
result = worst_status(previousResult, possible_overall_result)
return result, terminate
class ResultComputingConfigMixin:
haltOnFailure = False
flunkOnWarnings = False
flunkOnFailure = True
warnOnWarnings = False
warnOnFailure = False
resultConfig = [
"haltOnFailure",
"flunkOnWarnings",
"flunkOnFailure",
"warnOnWarnings",
"warnOnFailure",
]
| 2,779 | Python | .py | 77 | 30.428571 | 90 | 0.692679 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,810 | cache.py | buildbot_buildbot/master/buildbot/process/cache.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from buildbot.util import lru
from buildbot.util import service
class CacheManager(service.ReconfigurableServiceMixin, service.AsyncService):
"""
A manager for a collection of caches, each for different types of objects
and with potentially-overlapping key spaces.
There is generally only one instance of this class, available at
C{master.caches}.
"""
# a cache of length one still has many benefits: it collects objects that
# remain referenced elsewhere; it collapses simultaneous misses into one
# miss function; and it will optimize repeated fetches of the same object.
DEFAULT_CACHE_SIZE = 1
def __init__(self):
self.setName('caches')
self.config = {}
self._caches = {}
def get_cache(self, cache_name, miss_fn):
"""
Get an L{AsyncLRUCache} object with the given name. If such an object
does not exist, it will be created. Since the cache is permanent, this
method can be called only once, e.g., in C{startService}, and it value
stored indefinitely.
@param cache_name: name of the cache (usually the name of the type of
object it stores)
@param miss_fn: miss function for the cache; see L{AsyncLRUCache}
constructor.
@returns: L{AsyncLRUCache} instance
"""
try:
return self._caches[cache_name]
except KeyError:
max_size = self.config.get(cache_name, self.DEFAULT_CACHE_SIZE)
assert max_size >= 1
c = self._caches[cache_name] = lru.AsyncLRUCache(miss_fn, max_size)
return c
def reconfigServiceWithBuildbotConfig(self, new_config):
self.config = new_config.caches
for name, cache in self._caches.items():
cache.set_max_size(new_config.caches.get(name, self.DEFAULT_CACHE_SIZE))
return super().reconfigServiceWithBuildbotConfig(new_config)
def get_metrics(self):
return {
n: {'hits': c.hits, 'refhits': c.refhits, 'misses': c.misses, 'max_size': c.max_size}
for n, c in self._caches.items()
}
| 2,832 | Python | .py | 60 | 40.683333 | 97 | 0.693589 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,811 | metrics.py | buildbot_buildbot/master/buildbot/process/metrics.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
r"""
Buildbot metrics module
Keeps track of counts and timings of various internal buildbot
activities.
Basic architecture:
MetricEvent.log(...)
||
\/
MetricLogObserver
||
\/
MetricHandler
||
\/
MetricWatcher
"""
from __future__ import annotations
import gc
import os
import sys
from collections import defaultdict
from collections import deque
from twisted.application import service
from twisted.internet import reactor
from twisted.internet.base import ReactorBase
from twisted.internet.task import LoopingCall
from twisted.python import log
from buildbot import util
from buildbot.util import service as util_service
# Make use of the resource module if we can
try:
import resource
assert resource
except ImportError:
resource = None # type: ignore[assignment]
class MetricEvent:
@classmethod
def log(cls, *args, **kwargs):
log.msg(metric=cls(*args, **kwargs))
class MetricCountEvent(MetricEvent):
def __init__(self, counter, count=1, absolute=False):
self.counter = counter
self.count = count
self.absolute = absolute
class MetricTimeEvent(MetricEvent):
def __init__(self, timer, elapsed):
self.timer = timer
self.elapsed = elapsed
ALARM_OK, ALARM_WARN, ALARM_CRIT = list(range(3))
ALARM_TEXT = ["OK", "WARN", "CRIT"]
class MetricAlarmEvent(MetricEvent):
def __init__(self, alarm, msg=None, level=ALARM_OK):
self.alarm = alarm
self.level = level
self.msg = msg
def countMethod(counter):
def decorator(func):
def wrapper(*args, **kwargs):
MetricCountEvent.log(counter=counter)
return func(*args, **kwargs)
return wrapper
return decorator
class Timer:
# For testing
_reactor: ReactorBase | None = None
def __init__(self, name):
self.name = name
self.started = None
def startTimer(self, func):
def wrapper(*args, **kwargs):
self.start()
return func(*args, **kwargs)
return wrapper
def stopTimer(self, func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
finally:
self.stop()
return wrapper
def start(self):
self.started = util.now(self._reactor)
def stop(self):
if self.started is not None:
elapsed = util.now(self._reactor) - self.started
MetricTimeEvent.log(timer=self.name, elapsed=elapsed)
self.started = None
def timeMethod(name, _reactor=None):
def decorator(func):
t = Timer(name)
t._reactor = _reactor
def wrapper(*args, **kwargs):
t.start()
try:
return func(*args, **kwargs)
finally:
t.stop()
return wrapper
return decorator
class FiniteList(deque):
def __init__(self, maxlen=10):
self._maxlen = maxlen
super().__init__()
def append(self, o):
deque.append(self, o)
if len(self) > self._maxlen:
self.popleft()
class AveragingFiniteList(FiniteList):
def __init__(self, maxlen=10):
super().__init__(maxlen)
self.average = 0
def append(self, o):
super().append(o)
self._calc()
def _calc(self):
if not self:
self.average = 0
else:
self.average = float(sum(self)) / len(self)
return self.average
class MetricHandler:
def __init__(self, metrics):
self.metrics = metrics
self.watchers = []
self.reset()
def addWatcher(self, watcher):
self.watchers.append(watcher)
def removeWatcher(self, watcher):
self.watchers.remove(watcher)
# For subclasses to define
def reset(self):
raise NotImplementedError
def handle(self, eventDict, metric):
raise NotImplementedError
def get(self, metric):
raise NotImplementedError
def keys(self):
raise NotImplementedError
def report(self):
raise NotImplementedError
def asDict(self):
raise NotImplementedError
class MetricCountHandler(MetricHandler):
_counters: defaultdict[str, int] | None = None
def reset(self):
self._counters = defaultdict(int)
def handle(self, eventDict, metric):
if metric.absolute:
self._counters[metric.counter] = metric.count
else:
self._counters[metric.counter] += metric.count
def keys(self):
return list(self._counters)
def get(self, counter):
return self._counters[counter]
def report(self):
retval = []
for counter in sorted(self.keys()):
retval.append(f"Counter {counter}: {self.get(counter)}")
return "\n".join(retval)
def asDict(self):
retval = {}
for counter in sorted(self.keys()):
retval[counter] = self.get(counter)
return {"counters": retval}
class MetricTimeHandler(MetricHandler):
_timers: defaultdict[str, AveragingFiniteList] | None = None
def reset(self):
self._timers = defaultdict(AveragingFiniteList)
def handle(self, eventDict, metric):
self._timers[metric.timer].append(metric.elapsed)
def keys(self):
return list(self._timers)
def get(self, timer):
return self._timers[timer].average
def report(self):
retval = []
for timer in sorted(self.keys()):
retval.append(f"Timer {timer}: {self.get(timer):.3g}")
return "\n".join(retval)
def asDict(self):
retval = {}
for timer in sorted(self.keys()):
retval[timer] = self.get(timer)
return {"timers": retval}
class MetricAlarmHandler(MetricHandler):
_alarms: defaultdict[str, tuple[int, str]] | None
def reset(self):
self._alarms = defaultdict(lambda x: ALARM_OK)
def handle(self, eventDict, metric):
self._alarms[metric.alarm] = (metric.level, metric.msg)
def report(self):
retval = []
for alarm, (level, msg) in sorted(self._alarms.items()):
if msg:
retval.append(f"{ALARM_TEXT[level]} {alarm}: {msg}")
else:
retval.append(f"{ALARM_TEXT[level]} {alarm}")
return "\n".join(retval)
def asDict(self):
retval = {}
for alarm, (level, msg) in sorted(self._alarms.items()):
retval[alarm] = (ALARM_TEXT[level], msg)
return {"alarms": retval}
class AttachedWorkersWatcher:
def __init__(self, metrics):
self.metrics = metrics
def run(self):
# Check if 'BotMaster.attached_workers' equals
# 'AbstractWorker.attached_workers'
h = self.metrics.getHandler(MetricCountEvent)
if not h:
log.msg("Couldn't get MetricCountEvent handler")
MetricAlarmEvent.log(
'AttachedWorkersWatcher',
msg="Coudln't get MetricCountEvent handler",
level=ALARM_WARN,
)
return
botmaster_count = h.get('BotMaster.attached_workers')
worker_count = h.get('AbstractWorker.attached_workers')
# We let these be off by one since they're counted at slightly
# different times
if abs(botmaster_count - worker_count) > 1:
level = ALARM_WARN
else:
level = ALARM_OK
MetricAlarmEvent.log(
'attached_workers', msg=f'{botmaster_count} {worker_count}', level=level
)
def _get_rss():
if sys.platform == 'linux':
try:
with open(f"/proc/{os.getpid()}/statm", encoding='utf-8') as f:
return int(f.read().split()[1])
except Exception:
return 0
return 0
def periodicCheck(_reactor=reactor):
try:
# Measure how much garbage we have
garbage_count = len(gc.garbage)
MetricCountEvent.log('gc.garbage', garbage_count, absolute=True)
if garbage_count == 0:
level = ALARM_OK
else:
level = ALARM_WARN
MetricAlarmEvent.log('gc.garbage', level=level)
if resource:
r = resource.getrusage(resource.RUSAGE_SELF)
attrs = [
'ru_utime',
'ru_stime',
'ru_maxrss',
'ru_ixrss',
'ru_idrss',
'ru_isrss',
'ru_minflt',
'ru_majflt',
'ru_nswap',
'ru_inblock',
'ru_oublock',
'ru_msgsnd',
'ru_msgrcv',
'ru_nsignals',
'ru_nvcsw',
'ru_nivcsw',
]
for i, a in enumerate(attrs):
# Linux versions prior to 2.6.32 didn't report this value, but we
# can calculate it from /proc/<pid>/statm
v = r[i]
if a == 'ru_maxrss' and v == 0:
v = _get_rss() * resource.getpagesize() / 1024
MetricCountEvent.log(f'resource.{a}', v, absolute=True)
MetricCountEvent.log('resource.pagesize', resource.getpagesize(), absolute=True)
# Measure the reactor delay
then = util.now(_reactor)
dt = 0.1
def cb():
now = util.now(_reactor)
delay = (now - then) - dt
MetricTimeEvent.log("reactorDelay", delay)
_reactor.callLater(dt, cb)
except Exception:
log.err(None, "while collecting VM metrics")
class MetricLogObserver(util_service.ReconfigurableServiceMixin, service.MultiService):
_reactor = reactor
def __init__(self):
super().__init__()
self.setName('metrics')
self.enabled = False
self.periodic_task = None
self.periodic_interval = None
self.log_task = None
self.log_interval = None
# Mapping of metric type to handlers for that type
self.handlers = {}
# Register our default handlers
self.registerHandler(MetricCountEvent, MetricCountHandler(self))
self.registerHandler(MetricTimeEvent, MetricTimeHandler(self))
self.registerHandler(MetricAlarmEvent, MetricAlarmHandler(self))
self.getHandler(MetricCountEvent).addWatcher(AttachedWorkersWatcher(self))
def reconfigServiceWithBuildbotConfig(self, new_config):
# first, enable or disable
if new_config.metrics is None:
self.disable()
else:
self.enable()
metrics_config = new_config.metrics
# Start up periodic logging
log_interval = metrics_config.get('log_interval', 60)
if log_interval != self.log_interval:
if self.log_task:
self.log_task.stop()
self.log_task = None
if log_interval:
self.log_task = LoopingCall(self.report)
self.log_task.clock = self._reactor
self.log_task.start(log_interval)
# same for the periodic task
periodic_interval = metrics_config.get('periodic_interval', 10)
if periodic_interval != self.periodic_interval:
if self.periodic_task:
self.periodic_task.stop()
self.periodic_task = None
if periodic_interval:
self.periodic_task = LoopingCall(periodicCheck, self._reactor)
self.periodic_task.clock = self._reactor
self.periodic_task.start(periodic_interval)
# upcall
return super().reconfigServiceWithBuildbotConfig(new_config)
def stopService(self):
self.disable()
super().stopService()
def enable(self):
if self.enabled:
return
log.addObserver(self.emit)
self.enabled = True
def disable(self):
if not self.enabled:
return
if self.periodic_task:
self.periodic_task.stop()
self.periodic_task = None
if self.log_task:
self.log_task.stop()
self.log_task = None
log.removeObserver(self.emit)
self.enabled = False
def registerHandler(self, interface, handler):
old = self.getHandler(interface)
self.handlers[interface] = handler
return old
def getHandler(self, interface):
return self.handlers.get(interface)
def emit(self, eventDict):
# Ignore non-statistic events
metric = eventDict.get('metric')
if not metric or not isinstance(metric, MetricEvent):
return
if metric.__class__ not in self.handlers:
return
h = self.handlers[metric.__class__]
h.handle(eventDict, metric)
for w in h.watchers:
w.run()
def asDict(self):
retval = {}
for _, handler in self.handlers.items():
retval.update(handler.asDict())
return retval
def report(self):
try:
for handler in self.handlers.values():
report = handler.report()
if not report:
continue
for line in report.split("\n"):
log.msg(line)
except Exception:
log.err(None, "generating metric report")
| 14,227 | Python | .py | 397 | 26.566751 | 92 | 0.600219 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,812 | builder.py | buildbot_buildbot/master/buildbot/process/builder.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
import warnings
import weakref
from typing import TYPE_CHECKING
from typing import Any
from twisted.application import service
from twisted.internet import defer
from twisted.python import log
from buildbot import interfaces
from buildbot.data import resultspec
from buildbot.data.workers import Worker
from buildbot.interfaces import IRenderable
from buildbot.process import buildrequest
from buildbot.process import workerforbuilder
from buildbot.process.build import Build
from buildbot.process.locks import get_real_locks_from_accesses_raw
from buildbot.process.properties import Properties
from buildbot.process.results import RETRY
from buildbot.util import bytes2unicode
from buildbot.util import epoch2datetime
from buildbot.util import service as util_service
from buildbot.util.render_description import render_description
if TYPE_CHECKING:
from buildbot.config.builder import BuilderConfig
from buildbot.config.master import MasterConfig
from buildbot.master import BuildMaster
def enforceChosenWorker(bldr, workerforbuilder, breq):
if 'workername' in breq.properties:
workername = breq.properties['workername']
if isinstance(workername, str):
return workername == workerforbuilder.worker.workername
return True
class Builder(util_service.ReconfigurableServiceMixin, service.MultiService):
# reconfigure builders before workers
reconfig_priority = 196
master: BuildMaster | None = None
@property
def expectations(self):
warnings.warn("'Builder.expectations' is deprecated.", stacklevel=2)
return None
def __init__(self, name: str) -> None:
super().__init__()
self.name: str | None = name # type: ignore[assignment]
# this is filled on demand by getBuilderId; don't access it directly
self._builderid = None
# build/wannabuild slots: Build objects move along this sequence
self.building: list[Build] = []
# old_building holds active builds that were stolen from a predecessor
self.old_building: weakref.WeakKeyDictionary[Build, Any] = weakref.WeakKeyDictionary()
# workers which have connected but which are not yet available.
# These are always in the ATTACHING state.
self.attaching_workers: list[Worker] = []
# workers at our disposal. Each WorkerForBuilder instance has a
# .state that is IDLE, PINGING, or BUILDING. "PINGING" is used when a
# Build is about to start, to make sure that they're still alive.
self.workers: list[Worker] = []
self.config: BuilderConfig | None = None
# Updated in reconfigServiceWithBuildbotConfig
self.project_name = None
self.project_id = None
# Tracks config version for locks
self.config_version = None
def _find_builder_config_by_name(self, new_config: MasterConfig) -> BuilderConfig | None:
for builder_config in new_config.builders:
if builder_config.name == self.name:
return builder_config
raise AssertionError(f"no config found for builder '{self.name}'")
@defer.inlineCallbacks
def find_project_id(self, project):
if project is None:
return project
projectid = yield self.master.data.updates.find_project_id(project)
if projectid is None:
log.msg(f"{self} could not find project ID for project name {project}")
return projectid
@defer.inlineCallbacks
def reconfigServiceWithBuildbotConfig(self, new_config):
builder_config = self._find_builder_config_by_name(new_config)
old_config = self.config
self.config = builder_config
self.config_version = self.master.config_version
# allocate builderid now, so that the builder is visible in the web
# UI; without this, the builder wouldn't appear until it preformed a
# build.
builderid = yield self.getBuilderId()
if self._has_updated_config_info(old_config, builder_config):
projectid = yield self.find_project_id(builder_config.project)
self.project_name = builder_config.project
self.project_id = projectid
yield self.master.data.updates.updateBuilderInfo(
builderid,
builder_config.description,
builder_config.description_format,
render_description(builder_config.description, builder_config.description_format),
projectid,
builder_config.tags,
)
# if we have any workers attached which are no longer configured,
# drop them.
new_workernames = set(builder_config.workernames)
self.workers = [w for w in self.workers if w.worker.workername in new_workernames]
def _has_updated_config_info(self, old_config, new_config):
if old_config is None:
return True
if old_config.description != new_config.description:
return True
if old_config.description_format != new_config.description_format:
return True
if old_config.project != new_config.project:
return True
if old_config.tags != new_config.tags:
return True
return False
def __repr__(self):
return f"<Builder '{self.name!r}' at {id(self)}>"
def getBuilderIdForName(self, name):
# buildbot.config should ensure this is already unicode, but it doesn't
# hurt to check again
name = bytes2unicode(name)
return self.master.data.updates.findBuilderId(name)
@defer.inlineCallbacks
def getBuilderId(self):
# since findBuilderId is idempotent, there's no reason to add
# additional locking around this function.
if self._builderid:
return self._builderid
builderid = yield self.getBuilderIdForName(self.name)
self._builderid = builderid
return builderid
@defer.inlineCallbacks
def getOldestRequestTime(self):
"""Returns the submitted_at of the oldest unclaimed build request for
this builder, or None if there are no build requests.
@returns: datetime instance or None, via Deferred
"""
bldrid = yield self.getBuilderId()
unclaimed = yield self.master.data.get(
('builders', bldrid, 'buildrequests'),
[resultspec.Filter('claimed', 'eq', [False])],
order=['submitted_at'],
limit=1,
)
if unclaimed:
return unclaimed[0]['submitted_at']
return None
@defer.inlineCallbacks
def getNewestCompleteTime(self):
"""Returns the complete_at of the latest completed build request for
this builder, or None if there are no such build requests.
@returns: datetime instance or None, via Deferred
"""
bldrid = yield self.getBuilderId()
completed = yield self.master.data.get(
('builders', bldrid, 'buildrequests'),
[resultspec.Filter('complete', 'eq', [True])],
order=['-complete_at'],
limit=1,
)
if completed:
return completed[0]['complete_at']
else:
return None
@defer.inlineCallbacks
def get_highest_priority(self):
"""Returns the priority of the highest priority unclaimed build request
for this builder, or None if there are no build requests.
@returns: priority or None, via Deferred
"""
bldrid = yield self.getBuilderId()
unclaimed = yield self.master.data.get(
('builders', bldrid, 'buildrequests'),
[resultspec.Filter('claimed', 'eq', [False])],
order=['-priority'],
limit=1,
)
if unclaimed:
return unclaimed[0]['priority']
return None
def getBuild(self, number):
for b in self.building:
if b.number == number:
return b
for b in self.old_building:
if b.number == number:
return b
return None
def addLatentWorker(self, worker):
assert interfaces.ILatentWorker.providedBy(worker)
for w in self.workers:
if w == worker:
break
else:
wfb = workerforbuilder.LatentWorkerForBuilder(worker, self)
self.workers.append(wfb)
self.botmaster.maybeStartBuildsForBuilder(self.name)
@defer.inlineCallbacks
def attached(self, worker, commands):
"""This is invoked by the Worker when the self.workername bot
registers their builder.
@type worker: L{buildbot.worker.Worker}
@param worker: the Worker that represents the worker as a whole
@type commands: dict: string -> string, or None
@param commands: provides the worker's version of each RemoteCommand
@rtype: L{twisted.internet.defer.Deferred}
@return: a Deferred that fires (with 'self') when the worker-side
builder is fully attached and ready to accept commands.
"""
for w in self.attaching_workers + self.workers:
if w.worker == worker:
# already attached to them. This is fairly common, since
# attached() gets called each time we receive the builder
# list from the worker, and we ask for it each time we add or
# remove a builder. So if the worker is hosting builders
# A,B,C, and the config file changes A, we'll remove A and
# re-add it, triggering two builder-list requests, getting
# two redundant calls to attached() for B, and another two
# for C.
#
# Therefore, when we see that we're already attached, we can
# just ignore it.
return self
wfb = workerforbuilder.WorkerForBuilder(self)
self.attaching_workers.append(wfb)
try:
yield wfb.attached(worker, commands)
self.attaching_workers.remove(wfb)
self.workers.append(wfb)
return self
except Exception as e: # pragma: no cover
# already log.err'ed by WorkerForBuilder._attachFailure
# TODO: remove from self.workers (except that detached() should get
# run first, right?)
log.err(e, 'worker failed to attach')
return None
def _find_wfb_by_worker(self, worker):
for wfb in self.attaching_workers + self.workers:
if wfb.worker == worker:
return wfb
return None
def detached(self, worker):
"""This is called when the connection to the bot is lost."""
wfb = self._find_wfb_by_worker(worker)
if wfb is None:
log.msg(
f"WEIRD: Builder.detached({worker}) ({worker.workername})"
f" not in attaching_workers({self.attaching_workers})"
f" or workers({self.workers})"
)
return
if wfb in self.attaching_workers:
self.attaching_workers.remove(wfb)
if wfb in self.workers:
self.workers.remove(wfb)
# inform the WorkerForBuilder that their worker went away
wfb.detached()
def getAvailableWorkers(self):
return [wfb for wfb in self.workers if wfb.isAvailable()]
@defer.inlineCallbacks
def _setup_props_if_needed(self, props, workerforbuilder, buildrequest):
# don't unnecessarily setup properties for build
if props is not None:
return props
props = Properties()
yield Build.setup_properties_known_before_build_starts(
props, [buildrequest], self, workerforbuilder
)
return props
@defer.inlineCallbacks
def canStartBuild(self, workerforbuilder, buildrequest):
can_start = True
# check whether the locks that the build will acquire can actually be
# acquired
locks = self.config.locks
worker = workerforbuilder.worker
props = None
if worker.builds_may_be_incompatible:
# Check if the latent worker is actually compatible with the build.
# The instance type of the worker may depend on the properties of
# the build that substantiated it.
props = yield self._setup_props_if_needed(props, workerforbuilder, buildrequest)
can_start = yield worker.isCompatibleWithBuild(props)
if not can_start:
return False
if IRenderable.providedBy(locks):
# collect properties that would be set for a build if we
# started it now and render locks using it
props = yield self._setup_props_if_needed(props, workerforbuilder, buildrequest)
else:
props = None
locks_to_acquire = yield get_real_locks_from_accesses_raw(
locks, props, self, workerforbuilder, self.config_version
)
if locks_to_acquire:
can_start = self._can_acquire_locks(locks_to_acquire)
if not can_start:
return False
if callable(self.config.canStartBuild):
can_start = yield self.config.canStartBuild(self, workerforbuilder, buildrequest)
return can_start
def _can_acquire_locks(self, lock_list):
for lock, access in lock_list:
if not lock.isAvailable(None, access):
return False
return True
@defer.inlineCallbacks
def _startBuildFor(self, workerforbuilder, buildrequests):
build = self.config.factory.newBuild(buildrequests, self)
props = build.getProperties()
# give the properties a reference back to this build
props.build = build
yield Build.setup_properties_known_before_build_starts(
props, build.requests, build.builder, workerforbuilder
)
log.msg(f"starting build {build} using worker {workerforbuilder}")
build.setLocks(self.config.locks)
if self.config.env:
build.setWorkerEnvironment(self.config.env)
# append the build to self.building
self.building.append(build)
# The worker is ready to go. workerforbuilder.buildStarted() sets its
# state to BUILDING (so we won't try to use it for any other builds).
# This gets set back to IDLE by the Build itself when it finishes.
# Note: This can't be done in `Build.startBuild`, since it needs to be done
# synchronously, before the BuildRequestDistributor looks at
# another build request.
workerforbuilder.buildStarted()
# We put the result of startBuild into a fresh Deferred since _startBuildFor should not
# wait until the build is finished. This uses `maybeDeferred` to ensure that any exceptions
# raised by startBuild are treated as deferred errbacks (see
# http://trac.buildbot.net/ticket/2428).
d = defer.maybeDeferred(build.startBuild, workerforbuilder)
# this shouldn't happen. if it does, the worker will be wedged
d.addErrback(
log.err,
'from a running build; this is a '
'serious error - please file a bug at http://buildbot.net',
)
return True
@defer.inlineCallbacks
def setup_properties(self, props):
builderid = yield self.getBuilderId()
props.setProperty("buildername", self.name, "Builder")
props.setProperty("builderid", builderid, "Builder")
if self.project_name is not None:
props.setProperty('projectname', self.project_name, 'Builder')
if self.project_id is not None:
props.setProperty('projectid', self.project_id, 'Builder')
if self.config.properties:
for propertyname in self.config.properties:
props.setProperty(propertyname, self.config.properties[propertyname], "Builder")
if self.config.defaultProperties:
for propertyname in self.config.defaultProperties:
if propertyname not in props:
props.setProperty(
propertyname, self.config.defaultProperties[propertyname], "Builder"
)
def buildFinished(self, build, wfb):
"""This is called when the Build has finished (either success or
failure). Any exceptions during the build are reported with
results=FAILURE, not with an errback."""
# by the time we get here, the Build has already released the worker,
# which will trigger a check for any now-possible build requests
# (maybeStartBuilds)
results = build.results
self.building.remove(build)
if results == RETRY:
d = self._resubmit_buildreqs(build)
d.addErrback(log.err, 'while resubmitting a build request')
else:
complete_at_epoch = self.master.reactor.seconds()
complete_at = epoch2datetime(complete_at_epoch)
brids = [br.id for br in build.requests]
d = self.master.data.updates.completeBuildRequests(
brids, results, complete_at=complete_at
)
# nothing in particular to do with this deferred, so just log it if
# it fails..
d.addErrback(log.err, 'while marking build requests as completed')
if wfb.worker:
wfb.worker.releaseLocks()
def _resubmit_buildreqs(self, build):
brids = [br.id for br in build.requests]
d = self.master.data.updates.unclaimBuildRequests(brids)
@d.addCallback
def notify(_):
pass # XXX method does not exist
# self._msg_buildrequests_unclaimed(build.requests)
return d
# Build Creation
def maybeStartBuild(self, workerforbuilder, breqs):
# This method is called by the botmaster whenever this builder should
# start a set of buildrequests on a worker. Do not call this method
# directly - use master.botmaster.maybeStartBuildsForBuilder, or one of
# the other similar methods if more appropriate
# first, if we're not running, then don't start builds; stopService
# uses this to ensure that any ongoing maybeStartBuild invocations
# are complete before it stops.
if not self.running:
return defer.succeed(False)
# If the build fails from here on out (e.g., because a worker has failed),
# it will be handled outside of this function. TODO: test that!
return self._startBuildFor(workerforbuilder, breqs)
# a few utility functions to make the maybeStartBuild a bit shorter and
# easier to read
def getCollapseRequestsFn(self):
"""Helper function to determine which collapseRequests function to use
from L{_collapseRequests}, or None for no merging"""
# first, seek through builder, global, and the default
collapseRequests_fn = self.config.collapseRequests
if collapseRequests_fn is None:
collapseRequests_fn = self.master.config.collapseRequests
if collapseRequests_fn is None:
collapseRequests_fn = True
# then translate False and True properly
if collapseRequests_fn is False:
collapseRequests_fn = None
elif collapseRequests_fn is True:
collapseRequests_fn = self._defaultCollapseRequestFn
return collapseRequests_fn
@staticmethod
def _defaultCollapseRequestFn(master, builder, brdict1, brdict2):
return buildrequest.BuildRequest.canBeCollapsed(master, brdict1, brdict2)
| 20,555 | Python | .py | 436 | 37.529817 | 100 | 0.658909 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,813 | remotecommand.py | buildbot_buildbot/master/buildbot/process/remotecommand.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
from twisted.internet import defer
from twisted.internet import error
from twisted.python import log
from twisted.python.failure import Failure
from twisted.spread import pb
from buildbot import util
from buildbot.pbutil import decode
from buildbot.process import metrics
from buildbot.process.results import CANCELLED
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
from buildbot.util import lineboundaries
from buildbot.util.eventual import eventually
from buildbot.worker.protocols import base
class RemoteException(Exception):
pass
class RemoteCommand(base.RemoteCommandImpl):
# class-level unique identifier generator for command ids
_commandCounter = 0
active = False
rc: int | None = None
debug = False
def __init__(
self,
remote_command,
args,
ignore_updates=False,
collectStdout=False,
collectStderr=False,
decodeRC=None,
stdioLogName='stdio',
):
if decodeRC is None:
decodeRC = {0: SUCCESS}
self.logs = {}
self.delayedLogs = {}
self._closeWhenFinished = {}
self.collectStdout = collectStdout
self.collectStderr = collectStderr
self.stdout = ''
self.stderr = ''
self.updates = {}
self.stdioLogName = stdioLogName
self._startTime = None
self._remoteElapsed = None
self.remote_failure_reason = None
self.remote_command = remote_command
self.args = args
self.ignore_updates = ignore_updates
self.decodeRC = decodeRC
self.conn = None
self._is_conn_test_fake = False
self.worker = None
self.step = None
self.builder_name = None
self.commandID = None
self.deferred = None
self.interrupted = False
# a lock to make sure that only one log-handling method runs at a time.
# This is really only a problem with old-style steps, which do not
# wait for the Deferred from one method before invoking the next.
self.loglock = defer.DeferredLock()
self._line_boundary_finders = {}
def __repr__(self):
return f"<RemoteCommand '{self.remote_command}' at {id(self)}>"
@classmethod
def generate_new_command_id(cls):
cmd_id = cls._commandCounter
cls._commandCounter += 1
return f"{cmd_id}"
@classmethod
def get_last_generated_command_id(cls):
cmd_id = cls._commandCounter - 1
return f"{cmd_id}"
def run(self, step, conn, builder_name):
self.active = True
self.step = step
self.conn = conn
self.builder_name = builder_name
# This probably could be solved in a cleaner way.
self._is_conn_test_fake = hasattr(self.conn, 'is_fake_test_connection')
self.commandID = RemoteCommand.generate_new_command_id()
log.msg(f"{self}: RemoteCommand.run [{self.commandID}]")
self.deferred = defer.Deferred()
d = defer.maybeDeferred(self._start)
# _finished is called with an error for unknown commands, errors
# that occur while the command is starting (including OSErrors in
# exec()), StaleBroker (when the connection was lost before we
# started), and pb.PBConnectionLost (when the worker isn't responding
# over this connection, perhaps it had a power failure, or NAT
# weirdness). If this happens, self.deferred is fired right away.
d.addErrback(self._finished)
# Connections which are lost while the command is running are caught
# when our parent Step calls our .lostRemote() method.
return self.deferred
def useLog(self, log_, closeWhenFinished=False, logfileName=None):
# NOTE: log may be a SyngLogFileWrapper or a Log instance, depending on
# the step
if not logfileName:
logfileName = log_.getName()
assert logfileName not in self.logs
assert logfileName not in self.delayedLogs
self.logs[logfileName] = log_
self._closeWhenFinished[logfileName] = closeWhenFinished
def useLogDelayed(self, logfileName, activateCallBack, closeWhenFinished=False):
assert logfileName not in self.logs
assert logfileName not in self.delayedLogs
self.delayedLogs[logfileName] = (activateCallBack, closeWhenFinished)
def _start(self):
self._startTime = util.now()
# This method only initiates the remote command.
# We will receive remote_update messages as the command runs.
# We will get a single remote_complete when it finishes.
# We should fire self.deferred when the command is done.
d = self.conn.remoteStartCommand(
self, self.builder_name, self.commandID, self.remote_command, self.args
)
return d
@defer.inlineCallbacks
def _finished(self, failure=None):
# Finished may be called concurrently by a message from worker and interruption due to
# lost connection.
if not self.active:
return
self.active = False
# the rc is send asynchronously and there is a chance it is still in the callback queue
# when finished is received, we have to workaround in the master because worker might be
# older
if not self._is_conn_test_fake:
timeout = 10
while self.rc is None and timeout > 0:
yield util.asyncSleep(0.1)
timeout -= 1
try:
yield self.remoteComplete(failure)
# this fires the original deferred we returned from .run(),
self.deferred.callback(self)
except Exception as e:
self.deferred.errback(e)
@defer.inlineCallbacks
def interrupt(self, why):
log.msg("RemoteCommand.interrupt", self, why)
if self.conn and isinstance(why, Failure) and why.check(error.ConnectionLost):
# Note that we may be in the process of interruption and waiting for the worker to
# return the final results when the connection is disconnected.
log.msg("RemoteCommand.interrupt: lost worker")
self.conn = None
self._finished(why)
return
if not self.active or self.interrupted:
log.msg(" but this RemoteCommand is already inactive")
return
if not self.conn:
log.msg(" but our .conn went away")
return
self.interrupted = True
# tell the remote command to halt. Returns a Deferred that will fire
# when the interrupt command has been delivered.
try:
yield self.conn.remoteInterruptCommand(self.builder_name, self.commandID, str(why))
# the worker may not have remote_interruptCommand
except Exception as e:
log.msg("RemoteCommand.interrupt failed", self, e)
def remote_update_msgpack(self, updates):
self.worker.messageReceivedFromWorker()
try:
for key, value in updates:
if self.active and not self.ignore_updates:
if key in ['stdout', 'stderr', 'header']:
self.remoteUpdate(key, value[0], False)
elif key == "log":
logname, data = value
self.remoteUpdate(key, (logname, data[0]), False)
else:
self.remoteUpdate(key, value, False)
except Exception:
# log failure, terminate build, let worker retire the update
self._finished(Failure())
def split_line(self, stream, text):
try:
return self._line_boundary_finders[stream].append(text)
except KeyError:
lbf = self._line_boundary_finders[stream] = lineboundaries.LineBoundaryFinder()
return lbf.append(text)
def remote_update(self, updates):
"""
I am called by the worker's
L{buildbot_worker.base.WorkerForBuilderBase.sendUpdate} so
I can receive updates from the running remote command.
@type updates: list of [object, int]
@param updates: list of updates from the remote command
"""
updates = decode(updates)
self.worker.messageReceivedFromWorker()
max_updatenum = 0
for update, num in updates:
# log.msg("update[%d]:" % num)
try:
if self.active and not self.ignore_updates:
for key, value in update.items():
if key in ['stdout', 'stderr', 'header']:
whole_line = self.split_line(key, value)
if whole_line is not None:
self.remoteUpdate(key, whole_line, False)
elif key == "log":
logname, data = value
whole_line = self.split_line(logname, data)
value = (logname, whole_line)
if whole_line is not None:
self.remoteUpdate(key, value, False)
else:
self.remoteUpdate(key, value, False)
except Exception:
# log failure, terminate build, let worker retire the update
self._finished(Failure())
# TODO: what if multiple updates arrive? should
# skip the rest but ack them all
max_updatenum = max(max_updatenum, num)
return max_updatenum
def remote_complete(self, failure=None):
"""
Called by the worker's
L{buildbot_worker.base.WorkerForBuilderBase.commandComplete} to
notify me the remote command has finished.
@type failure: L{twisted.python.failure.Failure} or None
@rtype: None
"""
self.worker.messageReceivedFromWorker()
# call the real remoteComplete a moment later, but first return an
# acknowledgement so the worker can retire the completion message.
if self.active:
eventually(self._finished, failure)
return None
@util.deferredLocked('loglock')
def addStdout(self, data):
if self.collectStdout:
self.stdout += data
if self.stdioLogName is not None and self.stdioLogName in self.logs:
self.logs[self.stdioLogName].addStdout(data)
return defer.succeed(None)
@util.deferredLocked('loglock')
def add_stdout_lines(self, data, is_flushed):
if self.collectStdout:
if is_flushed:
data = data[:-1]
self.stdout += data
if self.stdioLogName is not None and self.stdioLogName in self.logs:
self.logs[self.stdioLogName].add_stdout_lines(data)
return defer.succeed(None)
@util.deferredLocked('loglock')
def addStderr(self, data):
if self.collectStderr:
self.stderr += data
if self.stdioLogName is not None and self.stdioLogName in self.logs:
self.logs[self.stdioLogName].addStderr(data)
return defer.succeed(None)
@util.deferredLocked('loglock')
def add_stderr_lines(self, data, is_flushed):
if self.collectStderr:
if is_flushed:
data = data[:-1]
self.stderr += data
if self.stdioLogName is not None and self.stdioLogName in self.logs:
self.logs[self.stdioLogName].add_stderr_lines(data)
return defer.succeed(None)
@util.deferredLocked('loglock')
def addHeader(self, data):
if self.stdioLogName is not None and self.stdioLogName in self.logs:
self.logs[self.stdioLogName].addHeader(data)
return defer.succeed(None)
@util.deferredLocked('loglock')
def add_header_lines(self, data):
if self.stdioLogName is not None and self.stdioLogName in self.logs:
self.logs[self.stdioLogName].add_header_lines(data)
return defer.succeed(None)
@util.deferredLocked('loglock')
@defer.inlineCallbacks
def addToLog(self, logname, data):
# Activate delayed logs on first data.
if logname in self.delayedLogs:
(activateCallBack, closeWhenFinished) = self.delayedLogs[logname]
del self.delayedLogs[logname]
loog = yield activateCallBack(self)
self.logs[logname] = loog
self._closeWhenFinished[logname] = closeWhenFinished
if logname in self.logs:
yield self.logs[logname].add_stdout_lines(data)
else:
log.msg(f"{self}.addToLog: no such log {logname}")
@metrics.countMethod('RemoteCommand.remoteUpdate()')
@defer.inlineCallbacks
def remoteUpdate(self, key, value, is_flushed):
def cleanup(data):
if self.step is None:
return data
return self.step.build.properties.cleanupTextFromSecrets(data)
if self.debug:
log.msg(f"Update[{key}]: {value}")
if key == "stdout":
yield self.add_stdout_lines(cleanup(value), is_flushed)
if key == "stderr":
yield self.add_stderr_lines(cleanup(value), is_flushed)
if key == "header":
yield self.add_header_lines(cleanup(value))
if key == "log":
logname, data = value
yield self.addToLog(logname, cleanup(data))
if key == "rc":
rc = self.rc = value
log.msg(f"{self} rc={rc}")
yield self.add_header_lines(f"program finished with exit code {rc}\n")
if key == "elapsed":
self._remoteElapsed = value
if key == "failure_reason":
self.remote_failure_reason = value
# TODO: these should be handled at the RemoteCommand level
if key not in ('stdout', 'stderr', 'header', 'rc', "failure_reason"):
if key not in self.updates:
self.updates[key] = []
self.updates[key].append(value)
@defer.inlineCallbacks
def remoteComplete(self, maybeFailure):
if self._startTime and self._remoteElapsed:
delta = (util.now() - self._startTime) - self._remoteElapsed
metrics.MetricTimeEvent.log("RemoteCommand.overhead", delta)
for key, lbf in self._line_boundary_finders.items():
if key in ['stdout', 'stderr', 'header']:
whole_line = lbf.flush()
if whole_line is not None:
yield self.remoteUpdate(key, whole_line, True)
else:
logname = key
whole_line = lbf.flush()
value = (logname, whole_line)
if whole_line is not None:
yield self.remoteUpdate("log", value, True)
try:
yield self.loglock.acquire()
for name, loog in self.logs.items():
if self._closeWhenFinished[name]:
if maybeFailure:
yield loog.addHeader(f"\nremoteFailed: {maybeFailure}")
else:
log.msg(f"closing log {loog}")
yield loog.finish()
finally:
yield self.loglock.release()
if maybeFailure:
# Message Pack protocol can not send an exception object back to the master, so
# exception information is sent as a string
if isinstance(maybeFailure, str):
raise RemoteException(maybeFailure)
# workaround http://twistedmatrix.com/trac/ticket/5507
# CopiedFailure cannot be raised back, this make debug difficult
if isinstance(maybeFailure, pb.CopiedFailure):
maybeFailure.value = RemoteException(
f"{maybeFailure.type}: {maybeFailure.value}\n{maybeFailure.traceback}"
)
maybeFailure.type = RemoteException
maybeFailure.raiseException()
def results(self):
if self.interrupted:
return CANCELLED
if self.rc in self.decodeRC:
return self.decodeRC[self.rc]
return FAILURE
def didFail(self):
return self.results() == FAILURE
LoggedRemoteCommand = RemoteCommand
class RemoteShellCommand(RemoteCommand):
def __init__(
self,
workdir,
command,
env=None,
want_stdout=1,
want_stderr=1,
timeout=20 * 60,
maxTime=None,
max_lines=None,
sigtermTime=None,
logfiles=None,
usePTY=None,
logEnviron=True,
collectStdout=False,
collectStderr=False,
interruptSignal=None,
initialStdin=None,
decodeRC=None,
stdioLogName='stdio',
):
if logfiles is None:
logfiles = {}
if decodeRC is None:
decodeRC = {0: SUCCESS}
self.command = command # stash .command, set it later
if isinstance(self.command, (str, bytes)):
# Single string command doesn't support obfuscation.
self.fake_command = command
else:
# Try to obfuscate command.
def obfuscate(arg):
if isinstance(arg, tuple) and len(arg) == 3 and arg[0] == 'obfuscated':
return arg[2]
return arg
self.fake_command = [obfuscate(c) for c in self.command]
if env is not None:
# avoid mutating the original master.cfg dictionary. Each
# ShellCommand gets its own copy, any start() methods won't be
# able to modify the original.
env = env.copy()
args = {
'workdir': workdir,
'env': env,
'want_stdout': want_stdout,
'want_stderr': want_stderr,
'logfiles': logfiles,
'timeout': timeout,
'maxTime': maxTime,
'max_lines': max_lines,
'sigtermTime': sigtermTime,
'usePTY': usePTY,
'logEnviron': logEnviron,
'initial_stdin': initialStdin,
}
if interruptSignal is not None:
args['interruptSignal'] = interruptSignal
super().__init__(
"shell",
args,
collectStdout=collectStdout,
collectStderr=collectStderr,
decodeRC=decodeRC,
stdioLogName=stdioLogName,
)
def _start(self):
if self.args['usePTY'] is None:
if self.step.workerVersionIsOlderThan("shell", "3.0"):
# Old worker default of usePTY is to use worker-configuration.
self.args['usePTY'] = "slave-config"
else:
# buildbot-worker doesn't support worker-configured usePTY,
# and usePTY defaults to False.
self.args['usePTY'] = False
self.args['command'] = self.command
if self.remote_command == "shell":
# non-ShellCommand worker commands are responsible for doing this
# fixup themselves
if self.step.workerVersion("shell", "old") == "old":
self.args['dir'] = self.args['workdir']
if self.step.workerVersionIsOlderThan("shell", "2.16"):
self.args.pop('sigtermTime', None)
what = f"command '{self.fake_command}' in dir '{self.args['workdir']}'"
log.msg(what)
return super()._start()
def __repr__(self):
return f"<RemoteShellCommand '{self.fake_command!r}'>"
| 20,392 | Python | .py | 472 | 32.516949 | 96 | 0.612107 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,814 | buildstep.py | buildbot_buildbot/master/buildbot/process/buildstep.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
import inspect
import sys
from typing import TYPE_CHECKING
from typing import Callable
from typing import ClassVar
from typing import Sequence
from twisted.internet import defer
from twisted.internet import error
from twisted.python import deprecate
from twisted.python import log
from twisted.python import versions
from twisted.python.failure import Failure
from twisted.python.reflect import accumulateClassList
from twisted.web.util import formatFailure
from zope.interface import implementer
from buildbot import config
from buildbot import interfaces
from buildbot import util
from buildbot.config.checks import check_param_bool
from buildbot.config.checks import check_param_length
from buildbot.config.checks import check_param_number_none
from buildbot.config.checks import check_param_str
from buildbot.config.checks import check_param_str_none
from buildbot.db.model import Model
from buildbot.interfaces import IRenderable
from buildbot.interfaces import WorkerSetupError
from buildbot.locks import BaseLock
from buildbot.process import log as plog
from buildbot.process import properties
from buildbot.process import remotecommand
from buildbot.process import results
from buildbot.process.locks import get_real_locks_from_accesses
# (WithProperties used to be available in this module)
from buildbot.process.properties import WithProperties
from buildbot.process.results import ALL_RESULTS
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SKIPPED
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.process.results import statusToString
from buildbot.util import bytes2unicode
from buildbot.util import debounce
from buildbot.util import deferwaiter
from buildbot.util import flatten
from buildbot.util.test_result_submitter import TestResultSubmitter
if TYPE_CHECKING:
from buildbot.process.build import Build
from buildbot.worker.base import AbstractWorker
class BuildStepFailed(Exception):
pass
class BuildStepCancelled(Exception):
# used internally for signalling
pass
class CallableAttributeError(Exception):
# attribute error raised from a callable run inside a property
pass
@implementer(interfaces.IBuildStepFactory)
class _BuildStepFactory(util.ComparableMixin):
"""
This is a wrapper to record the arguments passed to as BuildStep subclass.
We use an instance of this class, rather than a closure mostly to make it
easier to test that the right factories are getting created.
"""
compare_attrs: ClassVar[Sequence[str]] = ('factory', 'args', 'kwargs')
def __init__(self, step_class, *args, **kwargs):
self.step_class = step_class
self.args = args
self.kwargs = kwargs
def buildStep(self):
try:
step = object.__new__(self.step_class)
step._factory = self
step.__init__(*self.args, **self.kwargs)
return step
except Exception:
log.msg(
f"error while creating step, step_class={self.step_class}, args={self.args}, "
f"kwargs={self.kwargs}"
)
raise
class BuildStepStatus:
# used only for old-style steps
pass
def get_factory_from_step_or_factory(step_or_factory):
if hasattr(step_or_factory, 'get_step_factory'):
factory = step_or_factory.get_step_factory()
else:
factory = step_or_factory
# make sure the returned value actually implements IBuildStepFactory
return interfaces.IBuildStepFactory(factory)
def create_step_from_step_or_factory(step_or_factory):
return get_factory_from_step_or_factory(step_or_factory).buildStep()
class BuildStepWrapperMixin:
__init_completed: bool = False
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.__init_completed = True
def __setattr__(self, name, value):
if self.__init_completed:
config.error(
"Changes to attributes of a BuildStep instance are ignored, this is a bug. "
"Use set_step_arg(name, value) for that."
)
super().__setattr__(name, value)
# This is also needed for comparisons to work because ComparableMixin requires type(x) and
# x.__class__ to be equal in order to perform comparison at all.
_buildstep_wrapper_cache: dict[int, type[BuildStep]] = {}
def _create_buildstep_wrapper_class(klass):
class_id = id(klass)
cached = _buildstep_wrapper_cache.get(class_id, None)
if cached is not None:
return cached
wrapper = type(klass.__qualname__, (BuildStepWrapperMixin, klass), {})
_buildstep_wrapper_cache[class_id] = wrapper
return wrapper
@implementer(interfaces.IBuildStep)
class BuildStep(
results.ResultComputingConfigMixin, properties.PropertiesMixin, util.ComparableMixin
):
# Note that the BuildStep is at the same time a template from which per-build steps are
# constructed. This works by creating a new IBuildStepFactory in __new__, retrieving it via
# get_step_factory() and then calling buildStep() on that factory.
alwaysRun: bool = False
doStepIf: bool | Callable[[BuildStep], bool] = True
hideStepIf: bool | Callable[[int, BuildStep], bool] = False
compare_attrs: ClassVar[Sequence[str]] = ("_factory",)
# properties set on a build step are, by nature, always runtime properties
set_runtime_properties: bool = True
renderables: Sequence[str] = [
*results.ResultComputingConfigMixin.resultConfig,
'alwaysRun',
'description',
'descriptionDone',
'descriptionSuffix',
'doStepIf',
'hideStepIf',
'workdir',
]
# '_params_names' holds a list of all the parameters we care about, to allow
# users to instantiate a subclass of BuildStep with a mixture of
# arguments, some of which are for us, some of which are for the subclass
# (or a delegate of the subclass, like how ShellCommand delivers many
# arguments to the RemoteShellCommand that it creates). Such delegating
# subclasses will use this list to figure out which arguments are meant
# for us and which should be given to someone else.
_params_config: list[tuple[str, Callable | None]] = [
('alwaysRun', check_param_bool),
('description', None),
('descriptionDone', None),
('descriptionSuffix', None),
('doStepIf', None),
('flunkOnFailure', check_param_bool),
('flunkOnWarnings', check_param_bool),
('haltOnFailure', check_param_bool),
('updateBuildSummaryPolicy', None),
('hideStepIf', None),
('locks', None),
('logEncoding', None),
('name', check_param_str),
('progressMetrics', None),
('useProgress', None),
('warnOnFailure', check_param_bool),
('warnOnWarnings', check_param_bool),
('workdir', check_param_str_none),
]
_params_names: list[str] = [arg for arg, _ in _params_config]
name: str = "generic"
description: str | list[str] | None = None # set this to a list of short strings to override
descriptionDone: str | list[str] | None = (
None # alternate description when the step is complete
)
descriptionSuffix: str | list[str] | None = None # extra information to append to suffix
updateBuildSummaryPolicy: list[int] | None | bool = None
locks: list[str] | None = None
_locks_to_acquire: list[BaseLock] = []
progressMetrics: tuple[str, ...] = () # 'time' is implicit
useProgress: bool = True # set to False if step is really unpredictable
build: Build | None = None
step_status: None = None
progress: None = None
logEncoding: str | None = None
cmd: remotecommand.RemoteCommand | None = None
rendered: bool = False # true if attributes are rendered
_workdir: str | None = None
_waitingForLocks: bool = False
def __init__(self, **kwargs):
self.worker = None
for p, check in self.__class__._params_config:
if p in kwargs:
value = kwargs.pop(p)
if check is not None and not IRenderable.providedBy(value):
check(value, self.__class__, p)
setattr(self, p, value)
if kwargs:
config.error(
f"{self.__class__}.__init__ got unexpected keyword argument(s) {list(kwargs)}"
)
self._pendingLogObservers = []
check_param_length(
self.name, f'Step {self.__class__.__name__} name', Model.steps.c.name.type.length
)
if isinstance(self.description, str):
self.description = [self.description]
if isinstance(self.descriptionDone, str):
self.descriptionDone = [self.descriptionDone]
if isinstance(self.descriptionSuffix, str):
self.descriptionSuffix = [self.descriptionSuffix]
if self.updateBuildSummaryPolicy is None:
# compute default value for updateBuildSummaryPolicy
self.updateBuildSummaryPolicy = [EXCEPTION, RETRY, CANCELLED]
if self.flunkOnFailure or self.haltOnFailure or self.warnOnFailure:
self.updateBuildSummaryPolicy.append(FAILURE)
if self.warnOnWarnings or self.flunkOnWarnings:
self.updateBuildSummaryPolicy.append(WARNINGS)
if self.updateBuildSummaryPolicy is False:
self.updateBuildSummaryPolicy = []
if self.updateBuildSummaryPolicy is True:
self.updateBuildSummaryPolicy = ALL_RESULTS
if not isinstance(self.updateBuildSummaryPolicy, list):
config.error(
"BuildStep updateBuildSummaryPolicy must be "
"a list of result ids or boolean but it is "
f"{self.updateBuildSummaryPolicy!r}"
)
self._acquiringLocks = []
self.stopped = False
self.timed_out = False
self.max_lines_reached = False
self.master = None
self.statistics = {}
self.logs = {}
self._running = False
self.stepid = None
self.results = None
self._start_unhandled_deferreds = None
self._interrupt_deferwaiter = deferwaiter.DeferWaiter()
self._update_summary_debouncer = debounce.Debouncer(
1.0, self._update_summary_impl, lambda: self.master.reactor
)
self._test_result_submitters = {}
def __new__(klass, *args, **kwargs):
# The following code prevents changing BuildStep attributes after an instance
# is created during config time. Such attribute changes don't affect the factory,
# so they will be lost when actual build step is created.
#
# This is implemented by dynamically creating a subclass that disallows attribute
# writes after __init__ completes.
self = object.__new__(_create_buildstep_wrapper_class(klass))
self._factory = _BuildStepFactory(klass, *args, **kwargs)
return self
def is_exact_step_class(self, klass):
# Due to wrapping BuildStep in __new__, it's not possible to compare self.__class__ to
# check if self is an instance of some class (but not subclass).
if self.__class__ is klass:
return True
mro = self.__class__.mro()
if len(mro) >= 3 and mro[1] is BuildStepWrapperMixin and mro[2] is klass:
return True
return False
def __str__(self):
args = [repr(x) for x in self._factory.args]
args.extend([str(k) + "=" + repr(v) for k, v in self._factory.kwargs.items()])
return f'{self.__class__.__name__}({", ".join(args)})'
__repr__ = __str__
def setBuild(self, build: Build) -> None:
self.build = build
self.master = self.build.master
def setWorker(self, worker: AbstractWorker):
self.worker = worker
@deprecate.deprecated(versions.Version("buildbot", 0, 9, 0))
def setDefaultWorkdir(self, workdir):
if self._workdir is None:
self._workdir = workdir
@property
def workdir(self):
# default the workdir appropriately
if self._workdir is not None or self.build is None:
return self._workdir
else:
# see :ref:`Factory-Workdir-Functions` for details on how to
# customize this
if callable(self.build.workdir):
try:
return self.build.workdir(self.build.sources)
except AttributeError as e:
# if the callable raises an AttributeError
# python thinks it is actually workdir that is not existing.
# python will then swallow the attribute error and call
# __getattr__ from worker_transition
_, _, traceback = sys.exc_info()
raise CallableAttributeError(e).with_traceback(traceback) from e
# we re-raise the original exception by changing its type,
# but keeping its stacktrace
else:
return self.build.workdir
@workdir.setter
def workdir(self, workdir):
self._workdir = workdir
def getProperties(self):
return self.build.getProperties()
def get_step_factory(self):
return self._factory
def set_step_arg(self, name, value):
self._factory.kwargs[name] = value
# check if buildstep can still be constructed with the new arguments
try:
self._factory.buildStep()
except Exception:
log.msg(f"Cannot set step factory attribute {name} to {value}: step creation fails")
raise
def setupProgress(self):
# this function temporarily does nothing
pass
def setProgress(self, metric, value):
# this function temporarily does nothing
pass
def getCurrentSummary(self):
if self.description is not None:
stepsumm = util.join_list(self.description)
if self.descriptionSuffix:
stepsumm += ' ' + util.join_list(self.descriptionSuffix)
else:
stepsumm = 'running'
return {'step': stepsumm}
def getResultSummary(self):
if self.descriptionDone is not None or self.description is not None:
stepsumm = util.join_list(self.descriptionDone or self.description)
if self.descriptionSuffix:
stepsumm += ' ' + util.join_list(self.descriptionSuffix)
else:
stepsumm = 'finished'
if self.results != SUCCESS:
stepsumm += f' ({statusToString(self.results)})'
if self.timed_out:
stepsumm += " (timed out)"
elif self.max_lines_reached:
stepsumm += " (max lines reached)"
if self.build is not None:
stepsumm = self.build.properties.cleanupTextFromSecrets(stepsumm)
return {'step': stepsumm}
@defer.inlineCallbacks
def getBuildResultSummary(self):
summary = yield self.getResultSummary()
if (
self.results in self.updateBuildSummaryPolicy
and 'build' not in summary
and 'step' in summary
):
summary['build'] = summary['step']
return summary
def updateSummary(self):
self._update_summary_debouncer()
@defer.inlineCallbacks
def _update_summary_impl(self):
def methodInfo(m):
lines = inspect.getsourcelines(m)
return "\nat {}:{}:\n {}".format(
inspect.getsourcefile(m), lines[1], "\n".join(lines[0])
)
if not self._running:
summary = yield self.getResultSummary()
if not isinstance(summary, dict):
raise TypeError(
'getResultSummary must return a dictionary: '
+ methodInfo(self.getResultSummary)
)
else:
summary = yield self.getCurrentSummary()
if not isinstance(summary, dict):
raise TypeError(
'getCurrentSummary must return a dictionary: '
+ methodInfo(self.getCurrentSummary)
)
stepResult = summary.get('step', 'finished')
if not isinstance(stepResult, str):
raise TypeError(f"step result string must be unicode (got {stepResult!r})")
if self.stepid is not None:
stepResult = self.build.properties.cleanupTextFromSecrets(stepResult)
yield self.master.data.updates.setStepStateString(self.stepid, stepResult)
if not self._running:
buildResult = summary.get('build', None)
if buildResult and not isinstance(buildResult, str):
raise TypeError("build result string must be unicode")
@defer.inlineCallbacks
def addStep(self):
# create and start the step, noting that the name may be altered to
# ensure uniqueness
self.name = yield self.build.render(self.name)
self.build.setUniqueStepName(self)
self.stepid, self.number, self.name = yield self.master.data.updates.addStep(
buildid=self.build.buildid, name=util.bytes2unicode(self.name)
)
@defer.inlineCallbacks
def startStep(self, remote):
self.remote = remote
yield self.addStep()
started_at = int(self.master.reactor.seconds())
yield self.master.data.updates.startStep(self.stepid, started_at=started_at)
try:
yield self._render_renderables()
# we describe ourselves only when renderables are interpolated
self.updateSummary()
# check doStepIf (after rendering)
if isinstance(self.doStepIf, bool):
doStep = self.doStepIf
else:
doStep = yield self.doStepIf(self)
if doStep:
yield self._setup_locks()
# set up locks
if self._locks_to_acquire:
yield self.acquireLocks()
if self.stopped:
raise BuildStepCancelled
locks_acquired_at = int(self.master.reactor.seconds())
yield defer.DeferredList([
self.master.data.updates.set_step_locks_acquired_at(
self.stepid, locks_acquired_at=locks_acquired_at
),
self.master.data.updates.add_build_locks_duration(
self.build.buildid, duration_s=locks_acquired_at - started_at
),
])
else:
yield self.master.data.updates.set_step_locks_acquired_at(
self.stepid, locks_acquired_at=started_at
)
if self.stopped:
raise BuildStepCancelled
yield self.addTestResultSets()
try:
self._running = True
self.results = yield self.run()
finally:
self._running = False
else:
self.results = SKIPPED
# NOTE: all of these `except` blocks must set self.results immediately!
except BuildStepCancelled:
self.results = CANCELLED
except BuildStepFailed:
self.results = FAILURE
except error.ConnectionLost:
self.results = RETRY
except Exception:
self.results = EXCEPTION
why = Failure()
log.err(why, "BuildStep.failed; traceback follows")
yield self.addLogWithFailure(why)
if self.stopped and self.results != RETRY:
# We handle this specially because we don't care about
# the return code of an interrupted command; we know
# that this should just be exception due to interrupt
# At the same time we must respect RETRY status because it's used
# to retry interrupted build due to some other issues for example
# due to worker lost
if self.results != CANCELLED:
self.results = EXCEPTION
# determine whether we should hide this step
hidden = self.hideStepIf
if callable(hidden):
try:
hidden = hidden(self.results, self)
except Exception:
why = Failure()
log.err(why, "hidden callback failed; traceback follows")
yield self.addLogWithFailure(why)
self.results = EXCEPTION
hidden = False
# perform final clean ups
success = yield self._cleanup_logs()
if not success:
self.results = EXCEPTION
# update the summary one last time, make sure that completes,
# and then don't update it any more.
self.updateSummary()
yield self._update_summary_debouncer.stop()
for sub in self._test_result_submitters.values():
yield sub.finish()
self.releaseLocks()
yield self.master.data.updates.finishStep(self.stepid, self.results, hidden)
return self.results
@defer.inlineCallbacks
def _setup_locks(self):
self._locks_to_acquire = yield get_real_locks_from_accesses(self.locks, self.build)
if self.build._locks_to_acquire:
build_locks = [l for l, _ in self.build._locks_to_acquire]
for l, _ in self._locks_to_acquire:
if l in build_locks:
log.err(
f"{self}: lock {l} is claimed by both a Step ({self}) and the"
f" parent Build ({self.build})"
)
raise RuntimeError(f"lock claimed by both Step and Build ({l})")
@defer.inlineCallbacks
def _render_renderables(self):
# render renderables in parallel
renderables = []
accumulateClassList(self.__class__, 'renderables', renderables)
def setRenderable(res, attr):
setattr(self, attr, res)
dl = []
for renderable in renderables:
d = self.build.render(getattr(self, renderable))
d.addCallback(setRenderable, renderable)
dl.append(d)
yield defer.gatherResults(dl)
self.rendered = True
def setBuildData(self, name, value, source):
# returns a Deferred that yields nothing
return self.master.data.updates.setBuildData(self.build.buildid, name, value, source)
@defer.inlineCallbacks
def _cleanup_logs(self):
# Wait until any in-progress interrupt() to finish (that function may add new logs)
yield self._interrupt_deferwaiter.wait()
all_success = True
not_finished_logs = [v for (k, v) in self.logs.items() if not v.finished]
finish_logs = yield defer.DeferredList(
[v.finish() for v in not_finished_logs], consumeErrors=True
)
for success, res in finish_logs:
if not success:
log.err(res, "when trying to finish a log")
all_success = False
for log_ in self.logs.values():
if log_.had_errors():
all_success = False
return all_success
def addTestResultSets(self):
return defer.succeed(None)
@defer.inlineCallbacks
def addTestResultSet(self, description, category, value_unit):
sub = TestResultSubmitter()
yield sub.setup(self, description, category, value_unit)
setid = sub.get_test_result_set_id()
self._test_result_submitters[setid] = sub
return setid
def addTestResult(
self, setid, value, test_name=None, test_code_path=None, line=None, duration_ns=None
):
self._test_result_submitters[setid].add_test_result(
value,
test_name=test_name,
test_code_path=test_code_path,
line=line,
duration_ns=duration_ns,
)
def acquireLocks(self, res=None):
if not self._locks_to_acquire:
return defer.succeed(None)
if self.stopped:
return defer.succeed(None)
log.msg(f"acquireLocks(step {self}, locks {self._locks_to_acquire})")
for lock, access in self._locks_to_acquire:
for waited_lock, _, _ in self._acquiringLocks:
if lock is waited_lock:
continue
if not lock.isAvailable(self, access):
self._waitingForLocks = True
log.msg(f"step {self} waiting for lock {lock}")
d = lock.waitUntilMaybeAvailable(self, access)
self._acquiringLocks.append((lock, access, d))
d.addCallback(self.acquireLocks)
return d
# all locks are available, claim them all
for lock, access in self._locks_to_acquire:
lock.claim(self, access)
self._acquiringLocks = []
self._waitingForLocks = False
return defer.succeed(None)
def run(self):
raise NotImplementedError("A custom build step must implement run()")
@defer.inlineCallbacks
def _maybe_interrupt_cmd(self, reason):
if not self.cmd:
return
try:
yield self.cmd.interrupt(reason)
except Exception as e:
log.err(e, 'while cancelling command')
def interrupt(self, reason):
# Note that this method may be run outside usual step lifecycle (e.g. after run() has
# already completed), so extra care needs to be taken to prevent race conditions.
return self._interrupt_deferwaiter.add(self._interrupt_impl(reason))
@defer.inlineCallbacks
def _interrupt_impl(self, reason):
if self.stopped:
# If we are in the process of interruption and connection is lost then we must tell
# the command not to wait for the interruption to complete.
if isinstance(reason, Failure) and reason.check(error.ConnectionLost):
yield self._maybe_interrupt_cmd(reason)
return
self.stopped = True
if self._acquiringLocks:
for lock, access, d in self._acquiringLocks:
lock.stopWaitingUntilAvailable(self, access, d)
self._acquiringLocks = []
log_name = "cancelled while waiting for locks" if self._waitingForLocks else "cancelled"
yield self.addCompleteLog(log_name, str(reason))
yield self._maybe_interrupt_cmd(reason)
def releaseLocks(self):
log.msg(f"releaseLocks({self}): {self._locks_to_acquire}")
for lock, access in self._locks_to_acquire:
if lock.isOwner(self, access):
lock.release(self, access)
else:
# This should only happen if we've been interrupted
assert self.stopped
# utility methods that BuildSteps may find useful
def workerVersion(self, command, oldversion=None):
return self.build.getWorkerCommandVersion(command, oldversion)
def workerVersionIsOlderThan(self, command, minversion):
sv = self.build.getWorkerCommandVersion(command, None)
if sv is None:
return True
if [int(s) for s in sv.split(".")] < [int(m) for m in minversion.split(".")]:
return True
return False
def checkWorkerHasCommand(self, command):
if not self.workerVersion(command):
message = f"worker is too old, does not know about {command}"
raise WorkerSetupError(message)
def getWorkerName(self):
return self.build.getWorkerName()
def addLog(self, name, type='s', logEncoding=None):
if self.stepid is None:
raise BuildStepCancelled
d = self.master.data.updates.addLog(self.stepid, util.bytes2unicode(name), str(type))
@d.addCallback
def newLog(logid):
return self._newLog(name, type, logid, logEncoding)
return d
def getLog(self, name):
return self.logs[name]
@defer.inlineCallbacks
def addCompleteLog(self, name, text):
if self.stepid is None:
raise BuildStepCancelled
logid = yield self.master.data.updates.addLog(self.stepid, util.bytes2unicode(name), 't')
_log = self._newLog(name, 't', logid)
yield _log.addContent(text)
yield _log.finish()
@defer.inlineCallbacks
def addHTMLLog(self, name, html):
if self.stepid is None:
raise BuildStepCancelled
logid = yield self.master.data.updates.addLog(self.stepid, util.bytes2unicode(name), 'h')
_log = self._newLog(name, 'h', logid)
html = bytes2unicode(html)
yield _log.addContent(html)
yield _log.finish()
@defer.inlineCallbacks
def addLogWithFailure(self, why, logprefix=""):
# helper for showing exceptions to the users
try:
yield self.addCompleteLog(logprefix + "err.text", why.getTraceback())
yield self.addHTMLLog(logprefix + "err.html", formatFailure(why))
except Exception:
log.err(Failure(), "error while formatting exceptions")
def addLogWithException(self, why, logprefix=""):
return self.addLogWithFailure(Failure(why), logprefix)
def addLogObserver(self, logname, observer):
assert interfaces.ILogObserver.providedBy(observer)
observer.setStep(self)
self._pendingLogObservers.append((logname, observer))
self._connectPendingLogObservers()
def _newLog(self, name, type, logid, logEncoding=None):
if not logEncoding:
logEncoding = self.logEncoding
if not logEncoding:
logEncoding = self.master.config.logEncoding
log = plog.Log.new(self.master, name, type, logid, logEncoding)
self.logs[name] = log
self._connectPendingLogObservers()
return log
def _connectPendingLogObservers(self):
for logname, observer in self._pendingLogObservers[:]:
if logname in self.logs:
observer.setLog(self.logs[logname])
self._pendingLogObservers.remove((logname, observer))
@defer.inlineCallbacks
def addURL(self, name, url):
yield self.master.data.updates.addStepURL(self.stepid, str(name), str(url))
return None
@defer.inlineCallbacks
def runCommand(self, command):
if self.stopped:
return CANCELLED
self.cmd = command
command.worker = self.worker
try:
res = yield command.run(self, self.remote, self.build.builder.name)
if command.remote_failure_reason in ("timeout", "timeout_without_output"):
self.timed_out = True
elif command.remote_failure_reason in ("max_lines_failure",):
self.max_lines_reached = True
finally:
self.cmd = None
return res
def hasStatistic(self, name):
return name in self.statistics
def getStatistic(self, name, default=None):
return self.statistics.get(name, default)
def getStatistics(self):
return self.statistics.copy()
def setStatistic(self, name, value):
self.statistics[name] = value
class CommandMixin:
@defer.inlineCallbacks
def _runRemoteCommand(self, cmd, abandonOnFailure, args, makeResult=None):
cmd = remotecommand.RemoteCommand(cmd, args)
try:
log = self.getLog('stdio')
except Exception:
log = yield self.addLog('stdio')
cmd.useLog(log, False)
yield self.runCommand(cmd)
if abandonOnFailure and cmd.didFail():
raise BuildStepFailed()
if makeResult:
return makeResult(cmd)
else:
return not cmd.didFail()
def runRmdir(self, dir, log=None, abandonOnFailure=True):
return self._runRemoteCommand('rmdir', abandonOnFailure, {'dir': dir, 'logEnviron': False})
def pathExists(self, path, log=None):
return self._runRemoteCommand('stat', False, {'file': path, 'logEnviron': False})
def runMkdir(self, dir, log=None, abandonOnFailure=True):
return self._runRemoteCommand('mkdir', abandonOnFailure, {'dir': dir, 'logEnviron': False})
def runGlob(self, path):
return self._runRemoteCommand(
'glob',
True,
{'path': path, 'logEnviron': False},
makeResult=lambda cmd: cmd.updates['files'][0],
)
class ShellMixin:
command: list[str] | None = None
env: dict[str, str] = {}
want_stdout = True
want_stderr = True
usePTY: bool | None = None
logfiles: dict[str, str] = {}
lazylogfiles: bool = False
timeout = 1200
maxTime: float | None = None
max_lines: int | None = None
logEnviron = True
interruptSignal = 'KILL'
sigtermTime: int | None = None
initialStdin: str | None = None
decodeRC = {0: SUCCESS}
_shell_mixin_arg_config = [
('command', None),
('workdir', check_param_str),
('env', None),
('want_stdout', check_param_bool),
('want_stderr', check_param_bool),
('usePTY', check_param_bool),
('logfiles', None),
('lazylogfiles', check_param_bool),
('timeout', check_param_number_none),
('maxTime', check_param_number_none),
('max_lines', check_param_number_none),
('logEnviron', check_param_bool),
('interruptSignal', check_param_str_none),
('sigtermTime', check_param_number_none),
('initialStdin', check_param_str_none),
('decodeRC', None),
]
renderables: Sequence[str] = [arg for arg, _ in _shell_mixin_arg_config]
def setupShellMixin(self, constructorArgs, prohibitArgs=None):
constructorArgs = constructorArgs.copy()
if prohibitArgs is None:
prohibitArgs = []
def bad(arg):
config.error(f"invalid {self.__class__.__name__} argument {arg}")
for arg, check in self._shell_mixin_arg_config:
if arg not in constructorArgs:
continue
if arg in prohibitArgs:
bad(arg)
else:
value = constructorArgs[arg]
if check is not None and not IRenderable.providedBy(value):
check(value, self.__class__, arg)
setattr(self, arg, constructorArgs[arg])
del constructorArgs[arg]
for arg in list(constructorArgs):
if arg not in BuildStep._params_names:
bad(arg)
del constructorArgs[arg]
return constructorArgs
@defer.inlineCallbacks
def makeRemoteShellCommand(
self, collectStdout=False, collectStderr=False, stdioLogName='stdio', **overrides
):
kwargs = {arg: getattr(self, arg) for arg, _ in self._shell_mixin_arg_config}
kwargs.update(overrides)
stdio = None
if stdioLogName is not None:
# Reuse an existing log if possible; otherwise, create one.
try:
stdio = yield self.getLog(stdioLogName)
except KeyError:
stdio = yield self.addLog(stdioLogName)
kwargs['command'] = flatten(kwargs['command'], (list, tuple))
# store command away for display
self.command = kwargs['command']
# check for the usePTY flag
if kwargs['usePTY'] is not None:
if self.workerVersionIsOlderThan("shell", "2.7"):
if stdio is not None:
yield stdio.addHeader("NOTE: worker does not allow master to override usePTY\n")
del kwargs['usePTY']
# check for the interruptSignal flag
if kwargs["interruptSignal"] and self.workerVersionIsOlderThan("shell", "2.15"):
if stdio is not None:
yield stdio.addHeader(
"NOTE: worker does not allow master to specify interruptSignal\n"
)
del kwargs['interruptSignal']
# lazylogfiles are handled below
del kwargs['lazylogfiles']
# merge the builder's environment with that supplied here
builderEnv = self.build.builder.config.env
kwargs['env'] = {
**(yield self.build.render(builderEnv)),
**kwargs['env'],
}
kwargs['stdioLogName'] = stdioLogName
# default the workdir appropriately
if not kwargs.get('workdir') and not self.workdir:
if callable(self.build.workdir):
kwargs['workdir'] = self.build.workdir(self.build.sources)
else:
kwargs['workdir'] = self.build.workdir
# the rest of the args go to RemoteShellCommand
cmd = remotecommand.RemoteShellCommand(
collectStdout=collectStdout, collectStderr=collectStderr, **kwargs
)
# set up logging
if stdio is not None:
cmd.useLog(stdio, False)
for logname in self.logfiles:
if self.lazylogfiles:
# it's OK if this does, or does not, return a Deferred
def callback(cmd_arg, local_logname=logname):
return self.addLog(local_logname)
cmd.useLogDelayed(logname, callback, True)
else:
# add a LogFile
newlog = yield self.addLog(logname)
# and tell the RemoteCommand to feed it
cmd.useLog(newlog, False)
return cmd
def getResultSummary(self):
if self.descriptionDone is not None:
return super().getResultSummary()
summary = util.command_to_string(self.command)
if summary:
if self.results != SUCCESS:
summary += f' ({statusToString(self.results)})'
if self.timed_out:
summary += " (timed out)"
elif self.max_lines_reached:
summary += " (max lines)"
if self.build is not None:
summary = self.build.properties.cleanupTextFromSecrets(summary)
return {'step': summary}
return super().getResultSummary()
_hush_pyflakes = [WithProperties]
del _hush_pyflakes
| 39,392 | Python | .py | 891 | 34.308642 | 100 | 0.631545 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,815 | log.py | buildbot_buildbot/master/buildbot/process/log.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
import re
from twisted.internet import defer
from twisted.python import log
from buildbot import util
from buildbot.util import lineboundaries
class Log:
_byType: dict[str, type[Log]] = {}
def __init__(self, master, name, type, logid, decoder):
self.type = type
self.logid = logid
self.master = master
self.name = name
self.subPoint = util.subscription.SubscriptionPoint(f"{name!r} log")
self.subscriptions = {}
self._finishing = False
self.finished = False
self.finishWaiters = []
self._had_errors = False
self.lock = defer.DeferredLock()
self.decoder = decoder
@staticmethod
def _decoderFromString(cfg):
"""
Return a decoder function.
If cfg is a string such as 'latin-1' or u'latin-1',
then we return a new lambda, s.decode().
If cfg is already a lambda or function, then we return that.
"""
if isinstance(cfg, (bytes, str)):
return lambda s: s.decode(cfg, 'replace')
return cfg
@classmethod
def new(cls, master, name, type, logid, logEncoding):
type = str(type)
try:
subcls = cls._byType[type]
except KeyError as e:
raise RuntimeError(f"Invalid log type {type!r}") from e
decoder = Log._decoderFromString(logEncoding)
return subcls(master, name, type, logid, decoder)
def getName(self):
return self.name
# subscriptions
def subscribe(self, callback):
return self.subPoint.subscribe(callback)
# adding lines
@defer.inlineCallbacks
def addRawLines(self, lines):
# used by subclasses to add lines that are already appropriately
# formatted for the log type, and newline-terminated
assert lines[-1] == '\n'
assert not self.finished
yield self.lock.run(lambda: self.master.data.updates.appendLog(self.logid, lines))
# completion
def isFinished(self):
return self.finished
def waitUntilFinished(self):
d = defer.Deferred()
if self.finished:
d.succeed(None)
else:
self.finishWaiters.append(d)
return d
def had_errors(self):
return self._had_errors
@defer.inlineCallbacks
def finish(self):
assert not self._finishing, "Did you maybe forget to yield the method?"
assert not self.finished
self._finishing = True
def fToRun():
self.finished = True
return self.master.data.updates.finishLog(self.logid)
yield self.lock.run(fToRun)
# notify subscribers *after* finishing the log
self.subPoint.deliver(None, None)
yield self.subPoint.waitForDeliveriesToFinish()
# notify those waiting for finish
for d in self.finishWaiters:
d.callback(None)
self._had_errors = len(self.subPoint.pop_exceptions()) > 0
# start a compressLog call but don't make our caller wait for
# it to complete
d = self.master.data.updates.compressLog(self.logid)
d.addErrback(log.err, f"while compressing log {self.logid} (ignored)")
self.master.db.run_db_task(d)
self._finishing = False
class PlainLog(Log):
def __init__(self, master, name, type, logid, decoder):
super().__init__(master, name, type, logid, decoder)
self.lbf = lineboundaries.LineBoundaryFinder()
def addContent(self, text):
if not isinstance(text, str):
text = self.decoder(text)
# add some text in the log's default stream
lines = self.lbf.append(text)
if lines is None:
return defer.succeed(None)
self.subPoint.deliver(None, lines)
return self.addRawLines(lines)
@defer.inlineCallbacks
def finish(self):
lines = self.lbf.flush()
if lines is not None:
self.subPoint.deliver(None, lines)
yield self.addRawLines(lines)
yield super().finish()
class TextLog(PlainLog):
pass
Log._byType['t'] = TextLog
class HtmlLog(PlainLog):
pass
Log._byType['h'] = HtmlLog
class StreamLog(Log):
pat = re.compile('^', re.M)
def __init__(self, step, name, type, logid, decoder):
super().__init__(step, name, type, logid, decoder)
self.lbfs = {}
def _getLbf(self, stream):
try:
return self.lbfs[stream]
except KeyError:
lbf = self.lbfs[stream] = lineboundaries.LineBoundaryFinder()
return lbf
def _on_whole_lines(self, stream, lines):
# deliver the un-annotated version to subscribers
self.subPoint.deliver(stream, lines)
# strip the last character, as the regexp will add a
# prefix character after the trailing newline
return self.addRawLines(self.pat.sub(stream, lines)[:-1])
def split_lines(self, stream, text):
lbf = self._getLbf(stream)
lines = lbf.append(text)
if lines is None:
return defer.succeed(None)
return self._on_whole_lines(stream, lines)
def addStdout(self, text):
if not isinstance(text, str):
text = self.decoder(text)
return self.split_lines('o', text)
def addStderr(self, text):
if not isinstance(text, str):
text = self.decoder(text)
return self.split_lines('e', text)
def addHeader(self, text):
if not isinstance(text, str):
text = self.decoder(text)
return self.split_lines('h', text)
def add_stdout_lines(self, text):
if not isinstance(text, str):
text = self.decoder(text)
return self._on_whole_lines('o', text)
def add_stderr_lines(self, text):
if not isinstance(text, str):
text = self.decoder(text)
return self._on_whole_lines('e', text)
def add_header_lines(self, text):
if not isinstance(text, str):
text = self.decoder(text)
return self._on_whole_lines('h', text)
@defer.inlineCallbacks
def finish(self):
for stream, lbf in self.lbfs.items():
lines = lbf.flush()
if lines is not None:
self._on_whole_lines(stream, lines)
yield super().finish()
Log._byType['s'] = StreamLog
| 7,104 | Python | .py | 183 | 30.989071 | 90 | 0.642347 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,816 | base.py | buildbot_buildbot/master/buildbot/process/base.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from buildbot.process.build import Build
_hush_pyflakes = [Build]
| 774 | Python | .py | 16 | 47.1875 | 79 | 0.790728 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,817 | debug.py | buildbot_buildbot/master/buildbot/process/debug.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
from twisted.internet import defer
from buildbot.util import service
class DebugServices(service.ReconfigurableServiceMixin, service.AsyncMultiService):
name: str | None = 'debug_services' # type: ignore[assignment]
def __init__(self):
super().__init__()
self.debug_port = None
self.debug_password = None
self.debug_registration = None
self.manhole = None
@defer.inlineCallbacks
def reconfigServiceWithBuildbotConfig(self, new_config):
if new_config.manhole != self.manhole:
if self.manhole:
yield self.manhole.disownServiceParent()
self.manhole = None
if new_config.manhole:
self.manhole = new_config.manhole
yield self.manhole.setServiceParent(self)
# chain up
yield super().reconfigServiceWithBuildbotConfig(new_config)
@defer.inlineCallbacks
def stopService(self):
# manhole will get stopped as a sub-service
yield super().stopService()
# clean up
if self.manhole:
self.manhole = None
| 1,862 | Python | .py | 43 | 36.906977 | 83 | 0.706696 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,818 | botmaster.py | buildbot_buildbot/master/buildbot/process/botmaster.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
from typing import TYPE_CHECKING
from twisted.internet import defer
from twisted.python import log
from buildbot import locks
from buildbot import util
from buildbot.db.buildrequests import AlreadyClaimedError
from buildbot.process import metrics
from buildbot.process.builder import Builder
from buildbot.process.buildrequestdistributor import BuildRequestDistributor
from buildbot.process.results import CANCELLED
from buildbot.process.results import RETRY
from buildbot.process.workerforbuilder import States
from buildbot.util import service
from buildbot.util.render_description import render_description
from buildbot.worker.latent import AbstractLatentWorker
if TYPE_CHECKING:
from buildbot.worker import AbstractWorker
class LockRetrieverMixin:
@defer.inlineCallbacks
def getLockByID(self, lockid, config_version):
"""Convert a Lock identifier into an actual Lock instance.
@lockid: a locks.MasterLock or locks.WorkerLock instance
@config_version: The version of the config from which the list of locks has been
acquired by the downstream user.
@return: a locks.RealMasterLock or locks.RealWorkerLock instance
The real locks are tracked using lock ID and config_version. The latter is used as a
way to track most recent properties of real locks.
This approach is needed because there's no central registry of lock access instances
that are used within a Buildbot master.cfg (like there is for e.g c['builders']). All
lock accesses bring all lock information with themselves as the lockid member.
Therefore, the reconfig process is relatively complicated, because we don't know
whether a specific access instance encodes lock information before reconfig or after.
Taking into account config_version allows us to know when properties of a lock should
be updated.
Note that the user may create multiple lock ids with different maxCount values. It's
unspecified which maxCount value the real lock will have.
"""
assert isinstance(config_version, int)
lock = yield lockid.lockClass.getService(self, lockid.name)
if config_version > lock.config_version:
lock.updateFromLockId(lockid, config_version)
return lock
def getLockFromLockAccess(self, access, config_version):
# Convert a lock-access object into an actual Lock instance.
if not isinstance(access, locks.LockAccess):
# Buildbot 0.7.7 compatibility: user did not specify access
access = access.defaultAccess()
return self.getLockByID(access.lockid, config_version)
@defer.inlineCallbacks
def getLockFromLockAccesses(self, accesses, config_version):
# converts locks to their real forms
locks = yield defer.gatherResults([
self.getLockFromLockAccess(access, config_version) for access in accesses
])
return zip(locks, accesses)
class BotMaster(service.ReconfigurableServiceMixin, service.AsyncMultiService, LockRetrieverMixin):
"""This is the master-side service which manages remote buildbot workers.
It provides them with Workers, and distributes build requests to
them."""
debug = 0
name: str | None = "botmaster" # type: ignore[assignment]
def __init__(self) -> None:
super().__init__()
self.builders: dict[str, Builder] = {}
self.builderNames: list[str] = []
# builders maps Builder names to instances of bb.p.builder.Builder,
# which is the master-side object that defines and controls a build.
# Unused?
self.watchers: dict[object, object] = {}
self.shuttingDown = False
# subscription to new build requests
self.buildrequest_consumer_new = None
self.buildrequest_consumer_unclaimed = None
self.buildrequest_consumer_cancel = None
# a distributor for incoming build requests; see below
self.brd = BuildRequestDistributor(self)
self.brd.setServiceParent(self)
# Dictionary of build request ID to False or cancellation reason string in case cancellation
# has been requested.
self._starting_brid_to_cancel: dict[int, bool | str] = {}
@defer.inlineCallbacks
def cleanShutdown(self, quickMode=False, stopReactor=True):
"""Shut down the entire process, once all currently-running builds are
complete.
quickMode will mark all builds as retry (except the ones that were triggered)
"""
if self.shuttingDown:
return
log.msg("Initiating clean shutdown")
self.shuttingDown = True
# first, stop the distributor; this will finish any ongoing scheduling
# operations before firing
if quickMode:
# if quick mode, builds will be cancelled, so stop scheduling altogether
yield self.brd.disownServiceParent()
else:
# if not quick, still schedule waited child builds
# other parent will never finish
self.brd.distribute_only_waited_childs = True
# Double check that we're still supposed to be shutting down
# The shutdown may have been cancelled!
while self.shuttingDown:
if quickMode:
for builder in self.builders.values():
# As we stop the builds, builder.building might change during loop
# so we need to copy the list
for build in list(builder.building):
# if build is waited for then this is a sub-build, so
# no need to retry it
if sum(br.waitedFor for br in build.requests):
results = CANCELLED
else:
results = RETRY
is_building = (
build.workerforbuilder is not None
and build.workerforbuilder.state == States.BUILDING
)
# Master should not wait build.stopBuild for ages to complete if worker
# does not send any message about shutting the builds down quick enough.
# Just kill the connection with the worker
def lose_connection(b):
if b.workerforbuilder.worker.conn is not None:
b.workerforbuilder.worker.conn.loseConnection()
sheduled_call = self.master.reactor.callLater(5, lose_connection, build)
def cancel_lose_connection(_, call):
if call.active():
call.cancel()
d = build.stopBuild("Master Shutdown", results)
d.addBoth(cancel_lose_connection, sheduled_call)
if not is_building:
# if it is not building, then it must be a latent worker
# which is substantiating. Cancel it.
if build.workerforbuilder is not None and isinstance(
build.workerforbuilder.worker,
AbstractLatentWorker,
):
build.workerforbuilder.worker.insubstantiate()
# then wait for all builds to finish
dl = []
for builder in self.builders.values():
for build in builder.building:
# build may be waiting for ping to worker to succeed which
# may never happen if the connection to worker was broken
# without TCP connection being severed
build.workerforbuilder.abortPingIfAny()
dl.append(build.waitUntilFinished())
if not dl:
log.msg("No running jobs, starting shutdown immediately")
else:
log.msg(f"Waiting for {len(dl)} build(s) to finish")
yield defer.DeferredList(dl)
# Check that there really aren't any running builds
n = 0
for builder in self.builders.values():
if builder.building:
num_builds = len(builder.building)
log.msg(f"Builder {builder} has {num_builds} builds running")
n += num_builds
if n > 0:
log.msg(f"Not shutting down, there are {n} builds running")
log.msg("Trying shutdown sequence again")
yield util.asyncSleep(1)
else:
break
# shutdown was cancelled
if not self.shuttingDown:
if quickMode:
yield self.brd.setServiceParent(self)
else:
self.brd.distribute_only_waited_childs = False
return
if stopReactor:
log.msg("Stopping reactor")
self.master.reactor.stop()
def cancelCleanShutdown(self):
"""Cancel a clean shutdown that is already in progress, if any"""
if not self.shuttingDown:
return
log.msg("Cancelling clean shutdown")
self.shuttingDown = False
@metrics.countMethod('BotMaster.workerLost()')
def workerLost(self, bot: AbstractWorker):
metrics.MetricCountEvent.log("BotMaster.attached_workers", -1)
for b in self.builders.values():
if b.config is not None and bot.workername in b.config.workernames:
b.detached(bot)
@metrics.countMethod('BotMaster.getBuildersForWorker()')
def getBuildersForWorker(self, workername: str):
return [
b
for b in self.builders.values()
if b.config is not None and workername in b.config.workernames
]
def getBuildernames(self):
return self.builderNames
def getBuilders(self):
return list(self.builders.values())
@defer.inlineCallbacks
def getBuilderById(self, builderid):
for builder in self.getBuilders():
if builderid == (yield builder.getBuilderId()):
return builder
return None
@defer.inlineCallbacks
def startService(self):
@defer.inlineCallbacks
def buildRequestAdded(key, msg):
builderid = msg['builderid']
builder = yield self.getBuilderById(builderid)
if builder is not None:
self.maybeStartBuildsForBuilder(builder.name)
# consume both 'new' and 'unclaimed' build requests
startConsuming = self.master.mq.startConsuming
self.buildrequest_consumer_new = yield startConsuming(
buildRequestAdded, ('buildrequests', None, "new")
)
self.buildrequest_consumer_unclaimed = yield startConsuming(
buildRequestAdded, ('buildrequests', None, 'unclaimed')
)
self.buildrequest_consumer_cancel = yield startConsuming(
self._buildrequest_canceled, ('control', 'buildrequests', None, 'cancel')
)
yield super().startService()
@defer.inlineCallbacks
def _buildrequest_canceled(self, key, msg):
brid = int(key[2])
reason = msg.get('reason', 'no reason')
# first, try to claim the request; if this fails, then it's too late to
# cancel the build anyway
try:
b = yield self.master.db.buildrequests.claimBuildRequests(brids=[brid])
except AlreadyClaimedError:
self.maybe_cancel_in_progress_buildrequest(brid, reason)
# In case the build request has been claimed on this master, the call to
# maybe_cancel_in_progress_buildrequest above will ensure that they are either visible
# to the data API call below, or canceled.
builds = yield self.master.data.get(("buildrequests", brid, "builds"))
# Any other master will observe the buildrequest cancel messages and will try to
# cancel the buildrequest or builds internally.
#
# TODO: do not try to cancel builds that run on another master. Note that duplicate
# cancels do not have any downside.
for b in builds:
self.master.mq.produce(
("control", "builds", str(b['buildid']), "stop"), {'reason': reason}
)
return
# then complete it with 'CANCELLED'; this is the closest we can get to
# cancelling a request without running into trouble with dangling
# references.
yield self.master.data.updates.completeBuildRequests([brid], CANCELLED)
brdict = yield self.master.db.buildrequests.getBuildRequest(brid)
self.master.mq.produce(('buildrequests', str(brid), 'cancel'), brdict)
@defer.inlineCallbacks
def reconfigServiceWithBuildbotConfig(self, new_config):
timer = metrics.Timer("BotMaster.reconfigServiceWithBuildbotConfig")
timer.start()
yield self.reconfigProjects(new_config)
yield self.reconfigServiceBuilders(new_config)
# call up
yield super().reconfigServiceWithBuildbotConfig(new_config)
# try to start a build for every builder; this is necessary at master
# startup, and a good idea in any other case
self.maybeStartBuildsForAllBuilders()
timer.stop()
@defer.inlineCallbacks
def reconfigProjects(self, new_config):
for project_config in new_config.projects:
projectid = yield self.master.data.updates.find_project_id(project_config.name)
yield self.master.data.updates.update_project_info(
projectid,
project_config.slug,
project_config.description,
project_config.description_format,
render_description(project_config.description, project_config.description_format),
)
@defer.inlineCallbacks
def reconfigServiceBuilders(self, new_config):
timer = metrics.Timer("BotMaster.reconfigServiceBuilders")
timer.start()
# arrange builders by name
old_by_name = {b.name: b for b in list(self) if isinstance(b, Builder)}
old_set = set(old_by_name)
new_by_name = {bc.name: bc for bc in new_config.builders}
new_set = set(new_by_name)
# calculate new builders, by name, and removed builders
removed_names, added_names = util.diffSets(old_set, new_set)
if removed_names or added_names:
log.msg(f"adding {len(added_names)} new builders, removing {len(removed_names)}")
for n in removed_names:
builder = old_by_name[n]
del self.builders[n]
builder.master = None
builder.botmaster = None
yield builder.disownServiceParent()
for n in added_names:
builder = Builder(n)
self.builders[n] = builder
builder.botmaster = self
builder.master = self.master
yield builder.setServiceParent(self)
self.builderNames = list(self.builders)
yield self.master.data.updates.updateBuilderList(
self.master.masterid, [util.bytes2unicode(n) for n in self.builderNames]
)
metrics.MetricCountEvent.log("num_builders", len(self.builders), absolute=True)
timer.stop()
def stopService(self):
if self.buildrequest_consumer_new:
self.buildrequest_consumer_new.stopConsuming()
self.buildrequest_consumer_new = None
if self.buildrequest_consumer_unclaimed:
self.buildrequest_consumer_unclaimed.stopConsuming()
self.buildrequest_consumer_unclaimed = None
if self.buildrequest_consumer_cancel:
self.buildrequest_consumer_cancel.stopConsuming()
self.buildrequest_consumer_cancel = None
return super().stopService()
# Used to track buildrequests that are in progress of being started on this master.
def add_in_progress_buildrequest(self, brid):
self._starting_brid_to_cancel[brid] = False
def remove_in_progress_buildrequest(self, brid):
return self._starting_brid_to_cancel.pop(brid, None)
def maybe_cancel_in_progress_buildrequest(self, brid, reason):
"""
Ensures that after this call any builds resulting from build request will be visible or
cancelled.
"""
if brid in self._starting_brid_to_cancel:
self._starting_brid_to_cancel[brid] = reason
def maybeStartBuildsForBuilder(self, buildername):
"""
Call this when something suggests that a particular builder may now
be available to start a build.
@param buildername: the name of the builder
"""
self.brd.maybeStartBuildsOn([buildername])
def maybeStartBuildsForWorker(self, worker_name):
"""
Call this when something suggests that a particular worker may now be
available to start a build.
@param worker_name: the name of the worker
"""
builders = self.getBuildersForWorker(worker_name)
self.brd.maybeStartBuildsOn([b.name for b in builders])
def maybeStartBuildsForAllBuilders(self):
"""
Call this when something suggests that this would be a good time to
start some builds, but nothing more specific.
"""
self.brd.maybeStartBuildsOn(self.builderNames)
| 18,433 | Python | .py | 366 | 38.980874 | 100 | 0.6446 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,819 | buildrequestdistributor.py | buildbot_buildbot/master/buildbot/process/buildrequestdistributor.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
import copy
import math
import random
from datetime import datetime
from typing import TYPE_CHECKING
from twisted.internet import defer
from twisted.python import log
from twisted.python.failure import Failure
from buildbot.data import resultspec
from buildbot.db.buildrequests import BuildRequestModel
from buildbot.process import metrics
from buildbot.process.buildrequest import BuildRequest
from buildbot.util import deferwaiter
from buildbot.util import epoch2datetime
from buildbot.util import service
from buildbot.util.async_sort import async_sort
from buildbot.util.twisted import async_to_deferred
if TYPE_CHECKING:
from buildbot.process.builder import Builder
class BuildChooserBase:
#
# WARNING: This API is experimental and in active development.
#
# This internal object selects a new build+worker pair. It acts as a
# generator, initializing its state on creation and offering up new
# pairs until exhaustion. The object can be destroyed at any time
# (eg, before the list exhausts), and can be "restarted" by abandoning
# an old instance and creating a new one.
#
# The entry point is:
# * bc.chooseNextBuild() - get the next (worker, [breqs]) or
# (None, None)
#
# The default implementation of this class implements a default
# chooseNextBuild() that delegates out to two other functions:
# * bc.popNextBuild() - get the next (worker, breq) pair
def __init__(self, bldr, master):
self.bldr = bldr
self.master = master
self.breqCache = {}
self.unclaimedBrdicts = None
@defer.inlineCallbacks
def chooseNextBuild(self):
# Return the next build, as a (worker, [breqs]) pair
worker, breq = yield self.popNextBuild()
if not worker or not breq:
return (None, None)
return (worker, [breq])
# Must be implemented by subclass
def popNextBuild(self):
# Pick the next (worker, breq) pair; note this is pre-merge, so
# it's just one breq
raise NotImplementedError("Subclasses must implement this!")
# - Helper functions that are generally useful to all subclasses -
@defer.inlineCallbacks
def _fetchUnclaimedBrdicts(self):
# Sets up a cache of all the unclaimed brdicts. The cache is
# saved at self.unclaimedBrdicts cache. If the cache already
# exists, this function does nothing. If a refetch is desired, set
# the self.unclaimedBrdicts to None before calling."""
if self.unclaimedBrdicts is None:
# TODO: use order of the DATA API
brdicts = yield self.master.data.get(
('builders', (yield self.bldr.getBuilderId()), 'buildrequests'),
[resultspec.Filter('claimed', 'eq', [False])],
)
# sort by buildrequestid, so the first is the oldest
brdicts.sort(key=lambda brd: brd['buildrequestid'])
self.unclaimedBrdicts = brdicts
return self.unclaimedBrdicts
@defer.inlineCallbacks
def _getBuildRequestForBrdict(self, brdict: dict):
# Turn a brdict into a BuildRequest into a brdict. This is useful
# for API like 'nextBuild', which operate on BuildRequest objects.
breq = self.breqCache.get(brdict['buildrequestid'])
if not breq:
builder = yield self.master.data.get(
('builders', brdict['builderid']), [resultspec.ResultSpec(fields=['name'])]
)
if not builder:
return None
model = BuildRequestModel(
buildrequestid=brdict['buildrequestid'],
buildsetid=brdict['buildsetid'],
builderid=brdict['builderid'],
buildername=builder['name'],
submitted_at=brdict['submitted_at'],
)
if 'complete_at' in brdict:
model.complete_at = brdict['complete_at']
if 'complete' in brdict:
model.complete = brdict['complete']
if 'results' in brdict:
model.results = brdict['results']
if 'waited_for' in brdict:
model.waited_for = brdict['waited_for']
if 'priority' in brdict:
model.priority = brdict['priority']
if 'claimed_at' in brdict:
model.claimed_at = brdict['claimed_at']
if 'claimed_by_masterid' in brdict:
model.claimed_by_masterid = brdict['claimed_by_masterid']
breq = yield BuildRequest.fromBrdict(self.master, model)
if breq:
self.breqCache[model.buildrequestid] = breq
return breq
def _getBrdictForBuildRequest(self, breq):
# Turn a BuildRequest back into a brdict. This operates from the
# cache, which must be set up once via _fetchUnclaimedBrdicts
if breq is None:
return None
brid = breq.id
for brdict in self.unclaimedBrdicts:
if brid == brdict['buildrequestid']:
return brdict
return None
def _removeBuildRequest(self, breq):
# Remove a BuildrRequest object (and its brdict)
# from the caches
if breq is None:
return
brdict = self._getBrdictForBuildRequest(breq)
if brdict is not None:
self.unclaimedBrdicts.remove(brdict)
if breq.id in self.breqCache:
del self.breqCache[breq.id]
def _getUnclaimedBuildRequests(self):
# Retrieve the list of BuildRequest objects for all unclaimed builds
return defer.gatherResults([
self._getBuildRequestForBrdict(brdict) for brdict in self.unclaimedBrdicts
])
class BasicBuildChooser(BuildChooserBase):
# BasicBuildChooser generates build pairs via the configuration points:
# * config.nextWorker (or random.choice if not set)
# * config.nextBuild (or "pop top" if not set)
#
# For N workers, this will call nextWorker at most N times. If nextWorker
# returns a worker that cannot satisfy the build chosen by nextBuild,
# it will search for a worker that can satisfy the build. If one is found,
# the workers that cannot be used are "recycled" back into a list
# to be tried, in order, for the next chosen build.
#
# We check whether Builder.canStartBuild returns True for a particular
# worker. It evaluates any Build properties that are known before build
# and checks whether the worker may satisfy them. For example, the worker
# must have the locks available.
def __init__(self, bldr, master):
super().__init__(bldr, master)
self.nextWorker = self.bldr.config.nextWorker
if not self.nextWorker:
self.nextWorker = self.master.config.select_next_worker
if not self.nextWorker:
self.nextWorker = lambda _, workers, __: random.choice(workers) if workers else None
self.workerpool = self.bldr.getAvailableWorkers()
# Pick workers one at a time from the pool, and if the Builder says
# they're usable (eg, locks can be satisfied), then prefer those
# workers.
self.preferredWorkers = []
self.nextBuild = self.bldr.config.nextBuild
@defer.inlineCallbacks
def popNextBuild(self):
nextBuild = (None, None)
while True:
# 1. pick a build
breq = yield self._getNextUnclaimedBuildRequest()
if not breq:
break
if not self.workerpool and not self.preferredWorkers:
self.workerpool = self.bldr.getAvailableWorkers()
# 2. pick a worker
worker = yield self._popNextWorker(breq)
if not worker:
break
# either satisfy this build or we leave it for another day
self._removeBuildRequest(breq)
# 3. make sure worker+ is usable for the breq
recycledWorkers = []
while worker:
canStart = yield self.canStartBuild(worker, breq)
if canStart:
break
# try a different worker
recycledWorkers.append(worker)
worker = yield self._popNextWorker(breq)
# recycle the workers that we didn't use to the head of the queue
# this helps ensure we run 'nextWorker' only once per worker choice
if recycledWorkers:
self._unpopWorkers(recycledWorkers)
# 4. done? otherwise we will try another build
if worker:
nextBuild = (worker, breq)
break
return nextBuild
@defer.inlineCallbacks
def _getNextUnclaimedBuildRequest(self):
# ensure the cache is there
yield self._fetchUnclaimedBrdicts()
if not self.unclaimedBrdicts:
return None
if self.nextBuild:
# nextBuild expects BuildRequest objects
breqs = yield self._getUnclaimedBuildRequests()
try:
nextBreq = yield self.nextBuild(self.bldr, breqs)
if nextBreq not in breqs:
nextBreq = None
except Exception:
log.err(Failure(), f"from _getNextUnclaimedBuildRequest for builder '{self.bldr}'")
nextBreq = None
else:
# otherwise just return the build with highest priority
brdict = sorted(self.unclaimedBrdicts.data, key=lambda b: b['priority'], reverse=True)[
0
]
nextBreq = yield self._getBuildRequestForBrdict(brdict)
return nextBreq
@defer.inlineCallbacks
def _popNextWorker(self, buildrequest):
# use 'preferred' workers first, if we have some ready
if self.preferredWorkers:
worker = self.preferredWorkers.pop(0)
return worker
while self.workerpool:
try:
worker = yield self.nextWorker(self.bldr, self.workerpool, buildrequest)
except Exception:
log.err(Failure(), f"from nextWorker for builder '{self.bldr}'")
worker = None
if not worker or worker not in self.workerpool:
# bad worker or no worker returned
break
self.workerpool.remove(worker)
return worker
return None
def _unpopWorkers(self, workers):
# push the workers back to the front
self.preferredWorkers[:0] = workers
def canStartBuild(self, worker, breq):
return self.bldr.canStartBuild(worker, breq)
class BuildRequestDistributor(service.AsyncMultiService):
"""
Special-purpose class to handle distributing build requests to builders by
calling their C{maybeStartBuild} method.
This takes account of the C{prioritizeBuilders} configuration, and is
highly re-entrant; that is, if a new build request arrives while builders
are still working on the previous build request, then this class will
correctly re-prioritize invocations of builders' C{maybeStartBuild}
methods.
"""
BuildChooser = BasicBuildChooser
def __init__(self, botmaster):
super().__init__()
self.botmaster = botmaster
# lock to ensure builders are only sorted once at any time
self.pending_builders_lock = defer.DeferredLock()
# sorted list of names of builders that need their maybeStartBuild
# method invoked.
self._pending_builders = []
self.activity_lock = defer.DeferredLock()
self.active = False
self._deferwaiter = deferwaiter.DeferWaiter()
self._activity_loop_deferred = None
# Use in Master clean shutdown
# this flag will allow the distributor to still
# start new builds if it has a parent waiting on it
self.distribute_only_waited_childs = False
@property
def can_distribute(self):
return bool(self.running) or self.distribute_only_waited_childs
@defer.inlineCallbacks
def stopService(self):
# Lots of stuff happens asynchronously here, so we need to let it all
# quiesce. First, let the parent stopService succeed between
# activities; then the loop will stop calling itself, since
# self.running is false.
yield self.activity_lock.run(service.AsyncService.stopService, self)
# now let any outstanding calls to maybeStartBuildsOn to finish, so
# they don't get interrupted in mid-stride. This tends to be
# particularly painful because it can occur when a generator is gc'd.
# TEST-TODO: this behavior is not asserted in any way.
yield self._deferwaiter.wait()
@async_to_deferred
async def maybeStartBuildsOn(self, new_builders: list[str]) -> None:
"""
Try to start any builds that can be started right now. This function
returns immediately, and promises to trigger those builders
eventually.
@param new_builders: names of new builders that should be given the
opportunity to check for new requests.
"""
if not self.can_distribute:
return
try:
await self._deferwaiter.add(self._maybeStartBuildsOn(new_builders))
except Exception as e: # pragma: no cover
log.err(e, f"while starting builds on {new_builders}")
@async_to_deferred
async def _maybeStartBuildsOn(self, new_builders: list[str]) -> None:
new_builder_set = set(new_builders)
existing_pending = set(self._pending_builders)
# if we won't add any builders, there's nothing to do
if new_builder_set < existing_pending:
return
# reset the list of pending builders
try:
async with self.pending_builders_lock:
# re-fetch existing_pending, in case it has changed
# while acquiring the lock
existing_pending = set(self._pending_builders)
# then sort the new, expanded set of builders
self._pending_builders = await self._sortBuilders(
list(existing_pending | new_builder_set)
)
# start the activity loop, if we aren't already
# working on that.
if not self.active:
self._activity_loop_deferred = defer.ensureDeferred(self._activityLoop())
except Exception: # pragma: no cover
log.err(Failure(), f"while attempting to start builds on {self.name}")
@defer.inlineCallbacks
def _defaultSorter(self, master, builders):
timer = metrics.Timer("BuildRequestDistributor._defaultSorter()")
timer.start()
@defer.inlineCallbacks
def key(bldr):
# Sort primarily highest priority of build requests
priority = yield bldr.get_highest_priority()
if priority is None:
# for builders that do not have pending buildrequest, we just use large number
priority = -math.inf
# Break ties using the time of oldest build request
time = yield bldr.getOldestRequestTime()
if time is None:
# for builders that do not have pending buildrequest, we just use large number
time = math.inf
else:
if isinstance(time, datetime):
time = time.timestamp()
return (-priority, time, bldr.name)
yield async_sort(builders, key)
timer.stop()
return builders
@defer.inlineCallbacks
def _sortBuilders(self, buildernames):
timer = metrics.Timer("BuildRequestDistributor._sortBuilders()")
timer.start()
# note that this takes and returns a list of builder names
# convert builder names to builders
builders_dict = self.botmaster.builders
builders = [builders_dict.get(n) for n in buildernames if n in builders_dict]
# find a sorting function
sorter = self.master.config.prioritizeBuilders
if not sorter:
sorter = self._defaultSorter
# run it
try:
builders = yield sorter(self.master, builders)
except Exception:
log.err(Failure(), "prioritizing builders; order unspecified")
# and return the names
rv = [b.name for b in builders]
timer.stop()
return rv
@metrics.timeMethod('BuildRequestDistributor._activityLoop()')
async def _activityLoop(self) -> None:
self.active = True
pending_builders: list[Builder] = []
while True:
async with self.activity_lock:
if not self.can_distribute:
break
if not pending_builders:
# lock pending_builders, pop an element from it, and release
async with self.pending_builders_lock:
# bail out if we shouldn't keep looping
if not self._pending_builders:
break
# take that builder list, and run it until the end
# we make a copy of it, as it could be modified meanwhile
pending_builders = copy.copy(self._pending_builders)
self._pending_builders = []
bldr_name = pending_builders.pop(0)
# get the actual builder object
bldr = self.botmaster.builders.get(bldr_name)
try:
if bldr:
await self._maybeStartBuildsOnBuilder(bldr)
except Exception:
log.err(Failure(), f"from maybeStartBuild for builder '{bldr_name}'")
self.active = False
async def _maybeStartBuildsOnBuilder(self, bldr: Builder) -> None:
# create a chooser to give us our next builds
# this object is temporary and will go away when we're done
bc = self.createBuildChooser(bldr, self.master)
while True:
worker, breqs = await bc.chooseNextBuild()
if not worker or not breqs:
break
if self.distribute_only_waited_childs:
# parenting is a field of Buildset
# get the buildsets only for requests
# that are waited for
buildset_ids = set(br.bsid for br in breqs if br.waitedFor)
if not buildset_ids:
continue
# get buildsets if they have a parent
buildsets_data: list[dict] = await self.master.data.get(
('buildsets',),
filters=[
resultspec.Filter('bsid', 'in', buildset_ids),
resultspec.Filter('parent_buildid', 'ne', [None]),
],
fields=['bsid', 'parent_buildid'],
)
parented_buildset_ids = set(bs['bsid'] for bs in buildsets_data)
breqs = [br for br in breqs if br.bsid in parented_buildset_ids]
if not breqs:
continue
# claim brid's
brids = [br.id for br in breqs]
claimed_at_epoch = self.master.reactor.seconds()
claimed_at = epoch2datetime(claimed_at_epoch)
self._add_in_progress_brids(brids)
if not (
await self.master.data.updates.claimBuildRequests(brids, claimed_at=claimed_at)
):
# some brids were already claimed, so start over
bc = self.createBuildChooser(bldr, self.master)
continue
buildStarted = await bldr.maybeStartBuild(worker, breqs)
if not buildStarted:
await self.master.data.updates.unclaimBuildRequests(brids)
self._remove_in_progress_brids(brids)
# try starting builds again. If we still have a working worker,
# then this may re-claim the same buildrequests
self.botmaster.maybeStartBuildsForBuilder(self.name)
def _add_in_progress_brids(self, brids):
for brid in brids:
self.master.botmaster.add_in_progress_buildrequest(brid)
def _remove_in_progress_brids(self, brids):
for brid in brids:
self.master.botmaster.remove_in_progress_buildrequest(brid)
def createBuildChooser(self, bldr, master):
# just instantiate the build chooser requested
return self.BuildChooser(bldr, master)
@async_to_deferred
async def _waitForFinish(self):
if self._activity_loop_deferred is not None:
await self._activity_loop_deferred
| 21,642 | Python | .py | 464 | 35.795259 | 99 | 0.629334 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,820 | workerforbuilder.py | buildbot_buildbot/master/buildbot/process/workerforbuilder.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
from typing import TYPE_CHECKING
from twisted.internet import defer
from twisted.python import log
from twisted.python.constants import NamedConstant
from twisted.python.constants import Names
if TYPE_CHECKING:
from buildbot.process.builder import Builder
from buildbot.worker.base import AbstractWorker
from buildbot.worker.latent import AbstractLatentWorker
class States(Names):
# The worker isn't attached, or is in the process of attaching.
DETACHED = NamedConstant()
# The worker is available to build: either attached, or a latent worker.
AVAILABLE = NamedConstant()
# The worker is building.
BUILDING = NamedConstant()
class AbstractWorkerForBuilder:
def __init__(self, builder: Builder):
self.ping_watchers: list[defer.Deferred] = []
self.state = None # set in subclass
self.worker: AbstractWorker | None = None
self.builder = builder
self.builder_name = builder.name
self.locks = None
def __repr__(self):
r = ["<", self.__class__.__name__]
if self.builder_name:
r.extend([" builder=", repr(self.builder_name)])
if self.worker:
r.extend([" worker=", repr(self.worker.workername)])
r.extend([" state=", self.state.name, ">"])
return ''.join(r)
def getWorkerCommandVersion(self, command, oldversion=None):
if self.remoteCommands is None:
# the worker is 0.5.0 or earlier
return oldversion
return self.remoteCommands.get(command)
def isAvailable(self):
# if this WorkerForBuilder is busy, then it's definitely not available
if self.isBusy():
return False
# otherwise, check in with the Worker
if self.worker:
return self.worker.canStartBuild()
# no worker? not very available.
return False
def isBusy(self):
return self.state != States.AVAILABLE
def buildStarted(self):
self.state = States.BUILDING
self.worker.buildStarted(self)
def buildFinished(self):
self.state = States.AVAILABLE
if self.worker:
self.worker.buildFinished(self)
@defer.inlineCallbacks
def attached(self, worker, commands):
"""
@type worker: L{buildbot.worker.Worker}
@param worker: the Worker that represents the worker as a whole
@type commands: dict: string -> string, or None
@param commands: provides the worker's version of each RemoteCommand
"""
self.remoteCommands = commands # maps command name to version
if self.worker is None:
self.worker = worker
self.worker.addWorkerForBuilder(self)
else:
assert self.worker == worker
log.msg(f"Worker {worker.workername} attached to {self.builder_name}")
yield self.worker.conn.remotePrint(message="attached")
def substantiate_if_needed(self, build):
return defer.succeed(True)
def insubstantiate_if_needed(self):
pass
def ping(self):
"""Ping the worker to make sure it is still there. Returns a Deferred
that fires with True if it is.
"""
newping = not self.ping_watchers
d = defer.Deferred()
self.ping_watchers.append(d)
if newping:
Ping().ping(self.worker.conn).addBoth(self._pong)
return d
def abortPingIfAny(self):
watchers = self.ping_watchers
self.ping_watchers = []
for d in watchers:
d.errback(PingException('aborted ping'))
def _pong(self, res):
watchers = self.ping_watchers
self.ping_watchers = []
for d in watchers:
d.callback(res)
def detached(self):
log.msg(f"Worker {self.worker.workername} detached from {self.builder_name}")
if self.worker:
self.worker.removeWorkerForBuilder(self)
self.worker = None
self.remoteCommands = None
class PingException(Exception):
pass
class Ping:
running = False
def ping(self, conn):
assert not self.running
if not conn:
# clearly the ping must fail
return defer.fail(PingException("Worker not connected?"))
self.running = True
log.msg("sending ping")
self.d = defer.Deferred()
# TODO: add a distinct 'ping' command on the worker.. using 'print'
# for this purpose is kind of silly.
conn.remotePrint(message="ping").addCallbacks(
self._pong, self._ping_failed, errbackArgs=(conn,)
)
return self.d
def _pong(self, res):
log.msg("ping finished: success")
self.d.callback(True)
def _ping_failed(self, res, conn):
log.msg("ping finished: failure")
# the worker has some sort of internal error, disconnect them. If we
# don't, we'll requeue a build and ping them again right away,
# creating a nasty loop.
conn.loseConnection()
self.d.errback(res)
class WorkerForBuilder(AbstractWorkerForBuilder):
def __init__(self, builder: Builder):
super().__init__(builder)
self.state = States.DETACHED
@defer.inlineCallbacks
def attached(self, worker, commands):
yield super().attached(worker, commands)
# Only set available on non-latent workers, since latent workers
# only attach while a build is in progress.
self.state = States.AVAILABLE
def detached(self):
super().detached()
if self.worker:
self.worker.removeWorkerForBuilder(self)
self.worker = None
self.state = States.DETACHED
class LatentWorkerForBuilder(AbstractWorkerForBuilder):
def __init__(self, worker: AbstractLatentWorker, builder: Builder):
super().__init__(builder)
self.worker = worker
self.state = States.AVAILABLE
self.worker.addWorkerForBuilder(self)
log.msg(f"Latent worker {worker.workername} attached to {self.builder_name}")
def substantiate_if_needed(self, build):
self.state = States.DETACHED
d = self.substantiate(build)
return d
def insubstantiate_if_needed(self):
self.worker.insubstantiate()
def attached(self, worker, commands):
# When a latent worker is attached, it is actually because it prepared for a build
# thus building and not available like for normal worker
if self.state == States.DETACHED:
self.state = States.BUILDING
return super().attached(worker, commands)
def substantiate(self, build):
return self.worker.substantiate(self, build)
def ping(self):
if not self.worker.substantiated:
return defer.fail(PingException("worker is not substantiated"))
return super().ping()
| 7,605 | Python | .py | 185 | 33.448649 | 90 | 0.666079 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,821 | build.py | buildbot_buildbot/master/buildbot/process/build.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
from functools import reduce
from typing import TYPE_CHECKING
from twisted.internet import defer
from twisted.internet import error
from twisted.python import failure
from twisted.python import log
from twisted.python.failure import Failure
from buildbot import interfaces
from buildbot.process import buildstep
from buildbot.process import metrics
from buildbot.process import properties
from buildbot.process.locks import get_real_locks_from_accesses
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.process.results import computeResultAndTermination
from buildbot.process.results import statusToString
from buildbot.process.results import worst_status
from buildbot.reporters.utils import getURLForBuild
from buildbot.util import Notifier
from buildbot.util import bytes2unicode
from buildbot.util.eventual import eventually
if TYPE_CHECKING:
from buildbot.locks import BaseLockId
from buildbot.process.builder import Builder
from buildbot.process.workerforbuilder import AbstractWorkerForBuilder
from buildbot.util.subscription import Subscription
class Build(properties.PropertiesMixin):
"""I represent a single build by a single worker. Specialized Builders can
use subclasses of Build to hold status information unique to those build
processes.
I control B{how} the build proceeds. The actual build is broken up into a
series of steps, saved in the .buildSteps[] array as a list of
L{buildbot.process.step.BuildStep} objects. Each step is a single remote
command, possibly a shell command.
After the build, I go away.
I can be used by a factory by setting buildClass on
L{buildbot.process.factory.BuildFactory}
@ivar requests: the list of L{BuildRequest}s that triggered me
"""
VIRTUAL_BUILDERNAME_PROP = "virtual_builder_name"
VIRTUAL_BUILDERDESCRIPTION_PROP = "virtual_builder_description"
VIRTUAL_BUILDER_PROJECT_PROP = "virtual_builder_project"
VIRTUAL_BUILDERTAGS_PROP = "virtual_builder_tags"
workdir = "build"
reason = "changes"
finished = False
results: int | None = None
stopped = False
set_runtime_properties = True
subs: set[Subscription] | None = None
class Sentinel:
pass
_sentinel = Sentinel() # used as a sentinel to indicate unspecified initial_value
def __init__(self, requests, builder: Builder) -> None:
self.requests = requests
self.builder = builder
self.master = builder.master
self.workerforbuilder: AbstractWorkerForBuilder | None = None
self.locks: list[BaseLockId] = [] # list of lock accesses
self._locks_to_acquire: list[
tuple[BaseLockId, str]
] = [] # list of (real_lock, access) tuples
# build a source stamp
self.sources = requests[0].mergeSourceStampsWith(requests[1:])
self.reason = requests[0].mergeReasons(requests[1:])
self._preparation_step = None
self._locks_acquire_step = None
self.currentStep = None
self.workerEnvironment: dict[str, str] = {}
self.buildid = None
self._buildid_notifier = Notifier()
self.number = None
self.executedSteps: list[buildstep.BuildStep] = []
self.stepnames: dict[str, int] = {}
self.terminate = False
self._acquiringLock = None
self._builderid = None
# overall results, may downgrade after each step
self.results = SUCCESS
self.properties = properties.Properties()
self.stopped_reason = None
# tracks execution during the build finish phase
self._locks_released = False
self._build_finished = False
# tracks execution during substantiation
self._is_substantiating = False
# tracks the config version for locks
self.config_version = builder.config_version
def getProperties(self):
return self.properties
def setLocks(self, lockList):
self.locks = lockList
@defer.inlineCallbacks
def _setup_locks(self):
self._locks_to_acquire = yield get_real_locks_from_accesses(self.locks, self)
def setWorkerEnvironment(self, env):
# TODO: remove once we don't have anything depending on this method or attribute
# e.g., old-style steps (ShellMixin pulls the environment out of the
# builder directly)
self.workerEnvironment = env
def getSourceStamp(self, codebase=''):
for source in self.sources:
if source.codebase == codebase:
return source
return None
def getAllSourceStamps(self):
return list(self.sources)
@staticmethod
def allChangesFromSources(sources):
for s in sources:
yield from s.changes
def allChanges(self):
return Build.allChangesFromSources(self.sources)
def allFiles(self):
# return a list of all source files that were changed
files = []
for c in self.allChanges():
for f in c.files:
files.append(f)
return files
def __repr__(self):
return (
f"<Build {self.builder.name} number:{self.number!r} "
f"results:{statusToString(self.results)}>"
)
def blamelist(self):
# Note that this algorithm is also implemented in
# buildbot.reporters.utils.getResponsibleUsersForBuild, but using the data api.
# it is important for the UI to have the blamelist easily available.
# The best way is to make sure the owners property is set to full blamelist
blamelist = []
for c in self.allChanges():
if c.who not in blamelist:
blamelist.append(c.who)
for source in self.sources:
if source.patch: # Add patch author to blamelist
blamelist.append(source.patch_info[0])
blamelist.sort()
return blamelist
def changesText(self):
changetext = ""
for c in self.allChanges():
changetext += "-" * 60 + "\n\n" + c.asText() + "\n"
# consider sorting these by number
return changetext
def setStepFactories(self, step_factories):
"""Set a list of 'step factories', which are tuples of (class,
kwargs), where 'class' is generally a subclass of step.BuildStep .
These are used to create the Steps themselves when the Build starts
(as opposed to when it is first created). By creating the steps
later, their __init__ method will have access to things like
build.allFiles() ."""
self.stepFactories = list(step_factories)
useProgress = True
def getWorkerCommandVersion(self, command, oldversion=None):
return self.workerforbuilder.getWorkerCommandVersion(command, oldversion)
def getWorkerName(self):
return self.workername
@staticmethod
@defer.inlineCallbacks
def setup_properties_known_before_build_starts(props, requests, builder, workerforbuilder=None):
# Note that this function does not setup the 'builddir' worker property
# It's not possible to know it until before the actual worker has
# attached.
# start with global properties from the configuration
props.updateFromProperties(builder.master.config.properties)
# from the SourceStamps, which have properties via Change
sources = requests[0].mergeSourceStampsWith(requests[1:])
for change in Build.allChangesFromSources(sources):
props.updateFromProperties(change.properties)
# get any properties from requests (this is the path through which
# schedulers will send us properties)
for rq in requests:
props.updateFromProperties(rq.properties)
# get builder properties
yield builder.setup_properties(props)
# get worker properties
# navigate our way back to the L{buildbot.worker.Worker}
# object that came from the config, and get its properties
if workerforbuilder is not None:
workerforbuilder.worker.setupProperties(props)
@staticmethod
def setupBuildProperties(props, requests, sources=None, number=None):
# now set some properties of our own, corresponding to the
# build itself
props.setProperty("buildnumber", number, "Build")
if sources is None:
sources = requests[0].mergeSourceStampsWith(requests[1:])
if sources and len(sources) == 1:
# old interface for backwards compatibility
source = sources[0]
props.setProperty("branch", source.branch, "Build")
props.setProperty("revision", source.revision, "Build")
props.setProperty("repository", source.repository, "Build")
props.setProperty("codebase", source.codebase, "Build")
props.setProperty("project", source.project, "Build")
def setupWorkerProperties(self, workerforbuilder):
path_module = workerforbuilder.worker.path_module
# navigate our way back to the L{buildbot.worker.Worker}
# object that came from the config, and get its properties
worker_basedir = workerforbuilder.worker.worker_basedir
if worker_basedir:
builddir = path_module.join(
bytes2unicode(worker_basedir),
bytes2unicode(self.builder.config.workerbuilddir),
)
self.setProperty("basedir", worker_basedir, "Worker")
self.setProperty("builddir", builddir, "Worker")
def setupWorkerForBuilder(self, workerforbuilder: AbstractWorkerForBuilder):
assert workerforbuilder.worker is not None
self.path_module = workerforbuilder.worker.path_module
self.workername = workerforbuilder.worker.workername
self.worker_info = workerforbuilder.worker.info
@defer.inlineCallbacks
def getBuilderId(self):
if self._builderid is None:
if self.hasProperty(self.VIRTUAL_BUILDERNAME_PROP):
self._builderid = yield self.builder.getBuilderIdForName(
self.getProperty(self.VIRTUAL_BUILDERNAME_PROP)
)
description = self.getProperty(
self.VIRTUAL_BUILDERDESCRIPTION_PROP, self.builder.config.description
)
project = self.getProperty(
self.VIRTUAL_BUILDER_PROJECT_PROP, self.builder.config.project
)
tags = self.getProperty(self.VIRTUAL_BUILDERTAGS_PROP, self.builder.config.tags)
if type(tags) == type([]) and '_virtual_' not in tags:
tags.append('_virtual_')
projectid = yield self.builder.find_project_id(project)
# Note: not waiting for updateBuilderInfo to complete
self.master.data.updates.updateBuilderInfo(
self._builderid, description, None, None, projectid, tags
)
else:
self._builderid = yield self.builder.getBuilderId()
return self._builderid
@defer.inlineCallbacks
def startBuild(self, workerforbuilder: AbstractWorkerForBuilder):
"""This method sets up the build, then starts it by invoking the
first Step. It returns a Deferred which will fire when the build
finishes. This Deferred is guaranteed to never errback."""
self.workerforbuilder = workerforbuilder
self.conn = None
worker = workerforbuilder.worker
assert worker is not None
# Cache the worker information as variables instead of accessing via worker, as the worker
# will disappear during disconnection and some of these properties may still be needed.
self.workername = worker.workername
self.worker_info = worker.info
log.msg(f"{self}.startBuild")
# TODO: this will go away when build collapsing is implemented; until
# then we just assign the build to the first buildrequest
brid = self.requests[0].id
builderid = yield self.getBuilderId()
assert self.master is not None
assert self.master.data is not None
self.buildid, self.number = yield self.master.data.updates.addBuild(
builderid=builderid, buildrequestid=brid, workerid=worker.workerid
)
self._buildid_notifier.notify(self.buildid)
assert self.master.mq is not None
self.stopBuildConsumer = yield self.master.mq.startConsuming(
self.controlStopBuild, ("control", "builds", str(self.buildid), "stop")
)
# Check if buildrequest has been cancelled in the mean time. Must be done after subscription
# to stop control endpoint is established to avoid race condition.
for r in self.requests:
reason = self.master.botmaster.remove_in_progress_buildrequest(r.id)
if isinstance(reason, str):
yield self.stopBuild(reason=reason)
return
# the preparation step counts the time needed for preparing the worker and getting the
# locks.
# we cannot use a real step as we don't have a worker yet.
self._preparation_step = buildstep.create_step_from_step_or_factory(
buildstep.BuildStep(name="worker_preparation")
)
assert self._preparation_step is not None
self._preparation_step.setBuild(self)
yield self._preparation_step.addStep()
assert self.master.data.updates is not None
yield self.master.data.updates.startStep(self._preparation_step.stepid, locks_acquired=True)
Build.setupBuildProperties(self.getProperties(), self.requests, self.sources, self.number)
yield self._setup_locks()
metrics.MetricCountEvent.log('active_builds', 1)
if self._locks_to_acquire:
# Note that most of the time locks will already free because build distributor does
# not start builds that cannot acquire locks immediately. However on a loaded master
# it may happen that more builds are cleared to start than there are free locks. In
# such case some of the builds will be blocked and wait for the locks.
self._locks_acquire_step = buildstep.create_step_from_step_or_factory(
buildstep.BuildStep(name="locks_acquire")
)
self._locks_acquire_step.setBuild(self)
yield self._locks_acquire_step.addStep()
# make sure properties are available to people listening on 'new'
# events
yield self.master.data.updates.setBuildProperties(self.buildid, self)
yield self.master.data.updates.setBuildStateString(self.buildid, 'starting')
yield self.master.data.updates.generateNewBuildEvent(self.buildid)
try:
self.setupBuild() # create .steps
except Exception:
yield self.buildPreparationFailure(Failure(), "setupBuild")
yield self.buildFinished(['Build.setupBuild', 'failed'], EXCEPTION)
return
# flush properties in the beginning of the build
yield self.master.data.updates.setBuildProperties(self.buildid, self)
yield self.master.data.updates.setBuildStateString(self.buildid, 'preparing worker')
try:
ready_or_failure = False
if workerforbuilder.worker and workerforbuilder.worker.acquireLocks():
self._is_substantiating = True
ready_or_failure = yield workerforbuilder.substantiate_if_needed(self)
except Exception:
ready_or_failure = Failure()
finally:
self._is_substantiating = False
# If prepare returns True then it is ready and we start a build
# If it returns failure then we don't start a new build.
if ready_or_failure is not True:
yield self.buildPreparationFailure(ready_or_failure, "worker_prepare")
if self.stopped:
yield self.buildFinished(["worker", "cancelled"], self.results)
elif isinstance(ready_or_failure, Failure) and ready_or_failure.check(
interfaces.LatentWorkerCannotSubstantiate
):
yield self.buildFinished(["worker", "cannot", "substantiate"], EXCEPTION)
else:
yield self.buildFinished(["worker", "not", "available"], RETRY)
return
# ping the worker to make sure they're still there. If they've
# fallen off the map (due to a NAT timeout or something), this
# will fail in a couple of minutes, depending upon the TCP
# timeout.
#
# TODO: This can unnecessarily suspend the starting of a build, in
# situations where the worker is live but is pushing lots of data to
# us in a build.
yield self.master.data.updates.setBuildStateString(self.buildid, 'pinging worker')
log.msg(f"starting build {self}.. pinging the worker {workerforbuilder}")
try:
ping_success_or_failure = yield workerforbuilder.ping()
except Exception:
ping_success_or_failure = Failure()
if ping_success_or_failure is not True:
yield self.buildPreparationFailure(ping_success_or_failure, "worker_ping")
yield self.buildFinished(["worker", "not", "pinged"], RETRY)
return
yield self.master.data.updates.setStepStateString(
self._preparation_step.stepid, f"worker {self.getWorkerName()} ready"
)
yield self.master.data.updates.finishStep(self._preparation_step.stepid, SUCCESS, False)
self.conn = workerforbuilder.worker.conn
# To retrieve the worker properties, the worker must be attached as we depend on its
# path_module for at least the builddir property. Latent workers become attached only after
# preparing them, so we can't setup the builddir property earlier like the rest of
# properties
self.setupWorkerProperties(workerforbuilder)
self.setupWorkerForBuilder(workerforbuilder)
self.subs = self.conn.notifyOnDisconnect(self.lostRemote)
# tell the remote that it's starting a build, too
try:
yield self.conn.remoteStartBuild(self.builder.name)
except Exception:
yield self.buildPreparationFailure(Failure(), "start_build")
yield self.buildFinished(["worker", "not", "building"], RETRY)
return
if self._locks_to_acquire:
yield self.master.data.updates.setBuildStateString(self.buildid, "acquiring locks")
locks_acquire_start_at = int(self.master.reactor.seconds())
yield self.master.data.updates.startStep(
self._locks_acquire_step.stepid, started_at=locks_acquire_start_at
)
yield self.acquireLocks()
locks_acquired_at = int(self.master.reactor.seconds())
yield self.master.data.updates.set_step_locks_acquired_at(
self._locks_acquire_step.stepid, locks_acquired_at=locks_acquired_at
)
yield self.master.data.updates.add_build_locks_duration(
self.buildid, duration_s=locks_acquired_at - locks_acquire_start_at
)
yield self.master.data.updates.setStepStateString(
self._locks_acquire_step.stepid, "locks acquired"
)
yield self.master.data.updates.finishStep(
self._locks_acquire_step.stepid, SUCCESS, False
)
yield self.master.data.updates.setBuildStateString(self.buildid, 'building')
# start the sequence of steps
self.startNextStep()
@defer.inlineCallbacks
def buildPreparationFailure(self, why, state_string):
if self.stopped:
# if self.stopped, then this failure is a LatentWorker's failure to substantiate
# which we triggered on purpose in stopBuild()
log.msg("worker stopped while " + state_string, why)
yield self.master.data.updates.finishStep(
self._preparation_step.stepid, CANCELLED, False
)
else:
log.err(why, "while " + state_string)
self.workerforbuilder.worker.putInQuarantine()
if isinstance(why, failure.Failure):
yield self._preparation_step.addLogWithFailure(why)
elif isinstance(why, Exception):
yield self._preparation_step.addLogWithException(why)
yield self.master.data.updates.setStepStateString(
self._preparation_step.stepid, "error while " + state_string
)
yield self.master.data.updates.finishStep(
self._preparation_step.stepid, EXCEPTION, False
)
def acquireLocks(self, res=None):
self._acquiringLock = None
if not self._locks_to_acquire:
return defer.succeed(None)
if self.stopped:
return defer.succeed(None)
log.msg(f"acquireLocks(build {self}, locks {self._locks_to_acquire})")
for lock, access in self._locks_to_acquire:
if not lock.isAvailable(self, access):
log.msg(f"Build {self} waiting for lock {lock}")
d = lock.waitUntilMaybeAvailable(self, access)
d.addCallback(self.acquireLocks)
self._acquiringLock = (lock, access, d)
return d
# all locks are available, claim them all
for lock, access in self._locks_to_acquire:
lock.claim(self, access)
return defer.succeed(None)
def setUniqueStepName(self, step):
# If there are any name collisions, we add a count to the loser
# until it is unique.
name = step.name
if name in self.stepnames:
count = self.stepnames[name]
count += 1
self.stepnames[name] = count
name = f"{step.name}_{count}"
else:
self.stepnames[name] = 0
step.name = name
def setupBuildSteps(self, step_factories):
steps = []
for factory in step_factories:
step = buildstep.create_step_from_step_or_factory(factory)
step.setBuild(self)
step.setWorker(self.workerforbuilder.worker)
steps.append(step)
if self.useProgress:
step.setupProgress()
return steps
def setupBuild(self):
# create the actual BuildSteps.
self.steps = self.setupBuildSteps(self.stepFactories)
owners = set(self.blamelist())
# gather owners from build requests
owners.update({r.properties['owner'] for r in self.requests if "owner" in r.properties})
if owners:
self.setProperty('owners', sorted(owners), 'Build')
self.text = [] # list of text string lists (text2)
def addStepsAfterCurrentStep(self, step_factories):
# Add the new steps after the step that is running.
# The running step has already been popped from self.steps
self.steps[0:0] = self.setupBuildSteps(step_factories)
def addStepsAfterLastStep(self, step_factories):
# Add the new steps to the end.
self.steps.extend(self.setupBuildSteps(step_factories))
def getNextStep(self):
"""This method is called to obtain the next BuildStep for this build.
When it returns None (or raises a StopIteration exception), the build
is complete."""
if not self.steps:
return None
if not self.conn:
return None
if self.terminate or self.stopped:
# Run any remaining alwaysRun steps, and skip over the others
while True:
s = self.steps.pop(0)
if s.alwaysRun:
return s
if not self.steps:
return None
else:
return self.steps.pop(0)
def startNextStep(self):
try:
s = self.getNextStep()
except StopIteration:
s = None
if not s:
return self.allStepsDone()
self.executedSteps.append(s)
self.currentStep = s
# the following function returns a deferred, but we don't wait for it
self._start_next_step_impl(s)
return defer.succeed(None)
@defer.inlineCallbacks
def _start_next_step_impl(self, step):
try:
results = yield step.startStep(self.conn)
yield self.master.data.updates.setBuildProperties(self.buildid, self)
self.currentStep = None
if self.finished:
return # build was interrupted, don't keep building
terminate = yield self.stepDone(results, step) # interpret/merge results
if terminate:
self.terminate = True
yield self.startNextStep()
except Exception as e:
log.msg(f"{self} build got exception when running step {step}")
log.err(e)
yield self.master.data.updates.setBuildProperties(self.buildid, self)
# Note that buildFinished can't throw exception
yield self.buildFinished(["build", "exception"], EXCEPTION)
@defer.inlineCallbacks
def stepDone(self, results, step):
"""This method is called when the BuildStep completes. It is passed a
status object from the BuildStep and is responsible for merging the
Step's results into those of the overall Build."""
terminate = False
text = None
if isinstance(results, tuple):
results, text = results
assert isinstance(results, type(SUCCESS)), f"got {results!r}"
summary = yield step.getBuildResultSummary()
if 'build' in summary:
text = [summary['build']]
log.msg(f" step '{step.name}' complete: {statusToString(results)} ({text})")
if text:
self.text.extend(text)
self.master.data.updates.setBuildStateString(
self.buildid, bytes2unicode(" ".join(self.text))
)
self.results, terminate = computeResultAndTermination(step, results, self.results)
if not self.conn:
# force the results to retry if the connection was lost
self.results = RETRY
terminate = True
return terminate
def lostRemote(self, conn=None):
# the worker went away. There are several possible reasons for this,
# and they aren't necessarily fatal. For now, kill the build, but
# TODO: see if we can resume the build when it reconnects.
log.msg(f"{self}.lostRemote")
self.conn = None
self.text = ["lost", "connection"]
self.results = RETRY
if self.currentStep and self.currentStep.results is None:
# this should cause the step to finish.
log.msg(" stopping currentStep", self.currentStep)
self.currentStep.interrupt(Failure(error.ConnectionLost()))
else:
self.text = ["lost", "connection"]
self.stopped = True
if self._acquiringLock:
lock, access, d = self._acquiringLock
lock.stopWaitingUntilAvailable(self, access, d)
def controlStopBuild(self, key, params):
return self.stopBuild(**params)
@defer.inlineCallbacks
def stopBuild(self, reason="<no reason given>", results=CANCELLED):
# the idea here is to let the user cancel a build because, e.g.,
# they realized they committed a bug and they don't want to waste
# the time building something that they know will fail. Another
# reason might be to abandon a stuck build. We want to mark the
# build as failed quickly rather than waiting for the worker's
# timeout to kill it on its own.
log.msg(f" {self}: stopping build: {reason} {results}")
if self.finished:
return
self.stopped_reason = reason
self.stopped = True
if self.currentStep and self.currentStep.results is None:
yield self.currentStep.interrupt(reason)
self.results = results
if self._acquiringLock:
lock, access, d = self._acquiringLock
lock.stopWaitingUntilAvailable(self, access, d)
elif self._is_substantiating:
# We're having a latent worker that hasn't been substantiated yet. We need to abort
# that to not have a latent worker without an associated build
self.workerforbuilder.insubstantiate_if_needed()
def allStepsDone(self):
if self.results == FAILURE:
text = ["failed"]
elif self.results == WARNINGS:
text = ["warnings"]
elif self.results == EXCEPTION:
text = ["exception"]
elif self.results == RETRY:
text = ["retry"]
elif self.results == CANCELLED:
text = ["cancelled"]
else:
text = ["build", "successful"]
if self.stopped_reason is not None:
text.extend([f'({self.stopped_reason})'])
text.extend(self.text)
return self.buildFinished(text, self.results)
@defer.inlineCallbacks
def buildFinished(self, text, results):
"""This method must be called when the last Step has completed. It
marks the Build as complete and returns the Builder to the 'idle'
state.
It takes two arguments which describe the overall build status:
text, results. 'results' is one of the possible results (see buildbot.process.results).
If 'results' is SUCCESS or WARNINGS, we will permit any dependent
builds to start. If it is 'FAILURE', those builds will be
abandoned.
This method never throws."""
try:
self.stopBuildConsumer.stopConsuming()
self.finished = True
if self.conn:
self.subs.unsubscribe()
self.subs = None
self.conn = None
log.msg(f" {self}: build finished")
self.results = worst_status(self.results, results)
eventually(self.releaseLocks)
metrics.MetricCountEvent.log('active_builds', -1)
yield self.master.data.updates.setBuildStateString(
self.buildid, bytes2unicode(" ".join(text))
)
yield self.master.data.updates.finishBuild(self.buildid, self.results)
if self.results == EXCEPTION:
# When a build has an exception, put the worker in quarantine for a few seconds
# to make sure we try next build with another worker
self.workerforbuilder.worker.putInQuarantine()
elif self.results != RETRY:
# This worker looks sane if status is neither retry or exception
# Avoid a race in case the build step reboot the worker
if self.workerforbuilder.worker is not None:
self.workerforbuilder.worker.resetQuarantine()
# mark the build as finished
self.workerforbuilder.buildFinished()
self.builder.buildFinished(self, self.workerforbuilder)
self._tryScheduleBuildsAfterLockUnlock(build_finished=True)
except Exception:
log.err(
None,
'from finishing a build; this is a '
'serious error - please file a bug at http://buildbot.net',
)
def releaseLocks(self):
if self._locks_to_acquire:
log.msg(f"releaseLocks({self}): {self._locks_to_acquire}")
for lock, access in self._locks_to_acquire:
if lock.isOwner(self, access):
lock.release(self, access)
self._tryScheduleBuildsAfterLockUnlock(locks_released=True)
def _tryScheduleBuildsAfterLockUnlock(self, locks_released=False, build_finished=False):
# we need to inform the botmaster to attempt to schedule any pending
# build request if we released any locks. This is because buildrequest
# may be started for a completely unrelated builder and yet depend on
# a lock released by this build.
#
# TODO: the current approach is dumb as we just attempt to schedule
# all buildrequests. A much better idea would be to record the reason
# of why a buildrequest was not scheduled in the BuildRequestDistributor
# and then attempt to schedule only these buildrequests which may have
# had that reason resolved.
# this function is complicated by the fact that the botmaster must be
# informed only when all locks have been released and the actions in
# buildFinished have concluded. Since releaseLocks is called using
# eventually this may happen in any order.
self._locks_released = self._locks_released or locks_released
self._build_finished = self._build_finished or build_finished
if not self._locks_to_acquire:
return
if self._locks_released and self._build_finished:
self.builder.botmaster.maybeStartBuildsForAllBuilders()
def getSummaryStatistic(self, name, summary_fn, initial_value=_sentinel):
step_stats_list = [
st.getStatistic(name) for st in self.executedSteps if st.hasStatistic(name)
]
if initial_value is self._sentinel:
return reduce(summary_fn, step_stats_list)
return reduce(summary_fn, step_stats_list, initial_value)
@defer.inlineCallbacks
def getUrl(self):
builder_id = yield self.getBuilderId()
return getURLForBuild(self.master, builder_id, self.number)
@defer.inlineCallbacks
def get_buildid(self):
if self.buildid is not None:
return self.buildid
buildid = yield self._buildid_notifier.wait()
return buildid
@defer.inlineCallbacks
def waitUntilFinished(self):
buildid = yield self.get_buildid()
yield self.master.mq.waitUntilEvent(
('builds', str(buildid), 'finished'), lambda: self.finished
)
def getWorkerInfo(self):
return self.worker_info
| 35,314 | Python | .py | 720 | 38.975 | 100 | 0.657779 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,822 | locks.py | buildbot_buildbot/master/buildbot/process/locks.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
@defer.inlineCallbacks
def get_real_locks_from_accesses_raw(locks, props, builder, workerforbuilder, config_version):
workername = workerforbuilder.worker.workername
if props is not None:
locks = yield props.render(locks)
if not locks:
return []
locks = yield builder.botmaster.getLockFromLockAccesses(locks, config_version)
return [(l.getLockForWorker(workername), a) for l, a in locks]
def get_real_locks_from_accesses(locks, build):
return get_real_locks_from_accesses_raw(
locks, build, build.builder, build.workerforbuilder, build.config_version
)
| 1,353 | Python | .py | 28 | 45.178571 | 94 | 0.770691 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,823 | users.py | buildbot_buildbot/master/buildbot/process/users/users.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
import os
from binascii import hexlify
from hashlib import sha1
from typing import TYPE_CHECKING
from twisted.internet import defer
from twisted.python import log
from buildbot.util import bytes2unicode
from buildbot.util import unicode2bytes
if TYPE_CHECKING:
from buildbot.db.users import UserModel
# TODO: fossil comes from a plugin. We should have an API that plugins could use to
# register allowed user types.
srcs = ['git', 'svn', 'hg', 'cvs', 'darcs', 'bzr', 'fossil']
salt_len = 8
def createUserObject(master, author, src=None):
"""
Take a Change author and source and translate them into a User Object,
storing the user in master.db, or returning None if the src is not
specified.
@param master: link to Buildmaster for database operations
@type master: master.Buildmaster instance
@param authors: Change author if string or Authz instance
@type authors: string or www.authz instance
@param src: source from which the User Object will be created
@type src: string
"""
if not src:
log.msg("No vcs information found, unable to create User Object")
return defer.succeed(None)
if src in srcs:
usdict = {"identifier": author, "attr_type": src, "attr_data": author}
else:
log.msg(f"Unrecognized source argument: {src}")
return defer.succeed(None)
return master.db.users.findUserByAttr(
identifier=usdict['identifier'],
attr_type=usdict['attr_type'],
attr_data=usdict['attr_data'],
)
def _extractContact(user: UserModel | None, contact_types, uid):
if user is not None and user.attributes is not None:
for type in contact_types:
contact = user.attributes.get(type)
if contact:
break
else:
contact = None
if contact is None:
log.msg(
format="Unable to find any of %(contact_types)r for uid: %(uid)r",
contact_types=contact_types,
uid=uid,
)
return contact
def getUserContact(master, contact_types, uid):
"""
This is a simple getter function that returns a user attribute
that matches the contact_types argument, or returns None if no
uid/match is found.
@param master: BuildMaster used to query the database
@type master: BuildMaster instance
@param contact_types: list of contact attributes to look for in
in a given user, such as 'email' or 'nick'
@type contact_types: list of strings
@param uid: user that is searched for the contact_types match
@type uid: integer
@returns: string of contact information or None via deferred
"""
d = master.db.users.getUser(uid)
d.addCallback(_extractContact, contact_types, uid)
return d
def encrypt(passwd):
"""
Encrypts the incoming password after adding some salt to store
it in the database.
@param passwd: password portion of user credentials
@type passwd: string
@returns: encrypted/salted string
"""
m = sha1()
salt = hexlify(os.urandom(salt_len))
m.update(unicode2bytes(passwd) + salt)
crypted = bytes2unicode(salt) + m.hexdigest()
return crypted
def check_passwd(guess, passwd):
"""
Tests to see if the guess, after salting and hashing, matches the
passwd from the database.
@param guess: incoming password trying to be used for authentication
@param passwd: already encrypted password from the database
@returns: boolean
"""
m = sha1()
salt = passwd[: salt_len * 2] # salt_len * 2 due to encode('hex_codec')
m.update(unicode2bytes(guess) + unicode2bytes(salt))
crypted_guess = bytes2unicode(salt) + m.hexdigest()
return crypted_guess == bytes2unicode(passwd)
| 4,518 | Python | .py | 112 | 35.160714 | 83 | 0.710629 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,824 | manager.py | buildbot_buildbot/master/buildbot/process/users/manager.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.application import service
from twisted.internet import defer
from buildbot.util import service as util_service
class UserManagerManager(util_service.ReconfigurableServiceMixin, service.MultiService):
# this class manages a fleet of user managers; hence the name..
def __init__(self, master):
super().__init__()
self.setName('user_manager_manager')
self.master = master
@defer.inlineCallbacks
def reconfigServiceWithBuildbotConfig(self, new_config):
# this is easy - kick out all of the old managers, and add the
# new ones.
# pylint: disable=cell-var-from-loop
for mgr in list(self):
yield mgr.disownServiceParent()
for mgr in new_config.user_managers:
yield mgr.setServiceParent(self)
# reconfig any newly-added change sources, as well as existing
yield super().reconfigServiceWithBuildbotConfig(new_config)
| 1,658 | Python | .py | 34 | 43.941176 | 88 | 0.744114 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,825 | manual.py | buildbot_buildbot/master/buildbot/process/users/manual.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.python import log
from buildbot import pbutil
from buildbot.util import service
# this class is known to contain cruft and will be looked at later, so
# no current implementation utilizes it aside from scripts.runner.
class CommandlineUserManagerPerspective(pbutil.NewCredPerspective):
"""
Perspective registered in buildbot.pbmanager and contains the real
workings of `buildbot user` by working with the database when
perspective_commandline is called.
"""
def __init__(self, master):
self.master = master
def formatResults(self, op, results):
"""
This formats the results of the database operations for printing
back to the caller
@param op: operation to perform (add, remove, update, get)
@type op: string
@param results: results from db queries in perspective_commandline
@type results: list
@returns: string containing formatted results
"""
formatted_results = ""
if op == 'add':
# list, alternating ident, uid
formatted_results += "user(s) added:\n"
for user in results:
if isinstance(user, str):
formatted_results += f"identifier: {user}\n"
else:
formatted_results += f"uid: {user}\n\n"
elif op == 'remove':
# list of dictionaries
formatted_results += "user(s) removed:\n"
for user in results:
if user:
formatted_results += f"identifier: {user}\n"
elif op == 'update':
# list, alternating ident, None
formatted_results += "user(s) updated:\n"
for user in results:
if user:
formatted_results += f"identifier: {user}\n"
elif op == 'get':
# list of dictionaries
formatted_results += "user(s) found:\n"
for user in results:
if user:
formatted_results += (
f"uid: {user.uid}\n"
f"identifier: {user.identifier}\n"
f"bb_username: {user.bb_username}\n"
)
if user.attributes:
formatted_results += "attributes:\n"
formatted_results += (
''.join(f"\t{key}: {value}\n" for key, value in user.attributes.items())
+ '\n'
)
else:
formatted_results += "no match found\n"
return formatted_results
@defer.inlineCallbacks
def perspective_commandline(self, op, bb_username, bb_password, ids, info):
"""
This performs the requested operations from the `buildbot user`
call by calling the proper buildbot.db.users methods based on
the operation. It yields a deferred instance with the results
from the database methods.
@param op: operation to perform (add, remove, update, get)
@type op: string
@param bb_username: username portion of auth credentials
@type bb_username: string
@param bb_password: hashed password portion of auth credentials
@type bb_password: hashed string
@param ids: user identifiers used to find existing users
@type ids: list of strings or None
@param info: type/value pairs for each user that will be added
or updated in the database
@type info: list of dictionaries or None
@returns: results from db.users methods via deferred
"""
log.msg("perspective_commandline called")
results = []
# pylint: disable=too-many-nested-blocks
if ids:
for user in ids:
# get identifier, guaranteed to be in user from checks
# done in C{scripts.runner}
uid = yield self.master.db.users.identifierToUid(identifier=user)
result = None
if op == 'remove':
if uid:
yield self.master.db.users.removeUser(uid)
result = user
else:
log.msg(f"Unable to find uid for identifier {user}")
elif op == 'get':
if uid:
result = yield self.master.db.users.getUser(uid)
else:
log.msg(f"Unable to find uid for identifier {user}")
results.append(result)
else:
for user in info:
# get identifier, guaranteed to be in user from checks
# done in C{scripts.runner}
ident = user.pop('identifier')
uid = yield self.master.db.users.identifierToUid(identifier=ident)
# if only an identifier was in user, we're updating only
# the bb_username and bb_password.
if not user:
if uid:
result = yield self.master.db.users.updateUser(
uid=uid,
identifier=ident,
bb_username=bb_username,
bb_password=bb_password,
)
results.append(ident)
else:
log.msg(f"Unable to find uid for identifier {user}")
else:
# when adding, we update the user after the first attr
once_through = False
for attr in user:
result = None
if op == 'update' or once_through:
if uid:
result = yield self.master.db.users.updateUser(
uid=uid,
identifier=ident,
bb_username=bb_username,
bb_password=bb_password,
attr_type=attr,
attr_data=user[attr],
)
else:
log.msg(f"Unable to find uid for identifier {user}")
elif op == 'add':
result = yield self.master.db.users.findUserByAttr(
identifier=ident, attr_type=attr, attr_data=user[attr]
)
once_through = True
results.append(ident)
# result is None from updateUser calls
if result:
results.append(result)
uid = result
results = self.formatResults(op, results)
return results
class CommandlineUserManager(service.AsyncMultiService):
"""
Service that runs to set up and register CommandlineUserManagerPerspective
so `buildbot user` calls get to perspective_commandline.
"""
def __init__(self, username=None, passwd=None, port=None):
super().__init__()
assert (
username and passwd
), "A username and password pair must be given to connect and use `buildbot user`"
self.username = username
self.passwd = passwd
assert port, "A port must be specified for a PB connection"
self.port = port
self.registration = None
@defer.inlineCallbacks
def startService(self):
# set up factory and register with buildbot.pbmanager
def factory(mind, username):
return CommandlineUserManagerPerspective(self.master)
self.registration = yield self.master.pbmanager.register(
self.port, self.username, self.passwd, factory
)
yield super().startService()
def stopService(self):
d = defer.maybeDeferred(service.AsyncMultiService.stopService, self)
@d.addCallback
def unreg(_):
if self.registration:
return self.registration.unregister()
return None
return d
| 9,063 | Python | .py | 199 | 30.929648 | 100 | 0.5488 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,826 | github.py | buildbot_buildbot/master/buildbot/reporters/github.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
import re
from typing import Generator
from twisted.internet import defer
from twisted.python import log
from buildbot.process.properties import Interpolate
from buildbot.process.properties import Properties
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SKIPPED
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.reporters.base import ReporterBase
from buildbot.reporters.generators.build import BuildStartEndStatusGenerator
from buildbot.reporters.generators.buildrequest import BuildRequestGenerator
from buildbot.reporters.message import MessageFormatterRenderable
from buildbot.util import httpclientservice
from buildbot.util.giturlparse import giturlparse
HOSTED_BASE_URL = 'https://api.github.com'
class GitHubStatusPush(ReporterBase):
name: str | None = "GitHubStatusPush" # type: ignore[assignment]
def checkConfig(
self,
token,
context=None,
baseURL=None,
verbose=False,
debug=None,
verify=None,
generators=None,
**kwargs,
):
if generators is None:
generators = self._create_default_generators()
super().checkConfig(generators=generators, **kwargs)
@defer.inlineCallbacks
def reconfigService(
self,
token,
context=None,
baseURL=None,
verbose=False,
debug=None,
verify=None,
generators=None,
**kwargs,
):
self.token = token
self.debug = debug
self.verify = verify
self.verbose = verbose
self.context = self.setup_context(context)
if generators is None:
generators = self._create_default_generators()
yield super().reconfigService(generators=generators, **kwargs)
if baseURL is None:
baseURL = HOSTED_BASE_URL
if baseURL.endswith('/'):
baseURL = baseURL[:-1]
self._http = yield httpclientservice.HTTPSession(
self.master.httpservice,
baseURL,
headers={'User-Agent': 'Buildbot'},
debug=self.debug,
verify=self.verify,
)
def setup_context(self, context):
return context or Interpolate('buildbot/%(prop:buildername)s')
def _create_default_generators(self):
start_formatter = MessageFormatterRenderable('Build started.')
end_formatter = MessageFormatterRenderable('Build done.')
pending_formatter = MessageFormatterRenderable('Build pending.')
return [
BuildRequestGenerator(formatter=pending_formatter),
BuildStartEndStatusGenerator(
start_formatter=start_formatter, end_formatter=end_formatter
),
]
@defer.inlineCallbacks
def _get_auth_header(
self, props: Properties
) -> Generator[defer.Deferred[str], None, dict[str, str]]:
token = yield props.render(self.token)
return {'Authorization': f"token {token}"}
@defer.inlineCallbacks
def createStatus(
self,
repo_user,
repo_name,
sha,
state,
props,
target_url=None,
context=None,
issue=None,
description=None,
):
"""
:param repo_user: GitHub user or organization
:param repo_name: Name of the repository
:param sha: Full sha to create the status for.
:param state: one of the following 'pending', 'success', 'error'
or 'failure'.
:param target_url: Target url to associate with this status.
:param context: Build context
:param issue: Pull request number
:param description: Short description of the status.
:param props: Properties object of the build (used for render GITHUB_TOKEN secret)
:return: A deferred with the result from GitHub.
This code comes from txgithub by @tomprince.
txgithub is based on twisted's webclient agent, which is much less reliable and featureful
as txrequest (support for proxy, connection pool, keep alive, retry, etc)
"""
payload = {'state': state}
if description is not None:
payload['description'] = description
if target_url is not None:
payload['target_url'] = target_url
if context is not None:
payload['context'] = context
headers = yield self._get_auth_header(props)
ret = yield self._http.post(
'/'.join(['/repos', repo_user, repo_name, 'statuses', sha]),
json=payload,
headers=headers,
)
return ret
def is_status_2xx(self, code):
return code // 100 == 2
def _extract_issue(self, props):
branch = props.getProperty('branch')
if branch:
m = re.search(r"refs/pull/([0-9]*)/(head|merge)", branch)
if m:
return m.group(1)
return None
def _extract_github_info(self, sourcestamp):
repo_owner = None
repo_name = None
project = sourcestamp['project']
repository = sourcestamp['repository']
if project and "/" in project:
repo_owner, repo_name = project.split('/')
elif repository:
giturl = giturlparse(repository)
if giturl:
repo_owner = giturl.owner
repo_name = giturl.repo
return repo_owner, repo_name
@defer.inlineCallbacks
def sendMessage(self, reports):
report = reports[0]
build = reports[0]['builds'][0]
props = Properties.fromDict(build['properties'])
props.master = self.master
description = report.get('body', None)
if build['complete']:
state = {
SUCCESS: 'success',
WARNINGS: 'success',
FAILURE: 'failure',
SKIPPED: 'success',
EXCEPTION: 'error',
RETRY: 'pending',
CANCELLED: 'error',
}.get(build['results'], 'error')
else:
state = 'pending'
context = yield props.render(self.context)
sourcestamps = build['buildset'].get('sourcestamps')
if not sourcestamps:
return
issue = self._extract_issue(props)
for sourcestamp in sourcestamps:
repo_owner, repo_name = self._extract_github_info(sourcestamp)
if not repo_owner or not repo_name:
log.msg('Skipped status update because required repo information is missing.')
continue
sha = sourcestamp['revision']
response = None
# If the scheduler specifies multiple codebases, don't bother updating
# the ones for which there is no revision
if not sha:
log.msg(
f"Skipped status update for codebase {sourcestamp['codebase']}, "
f"context '{context}', issue {issue}."
)
continue
try:
if self.verbose:
log.msg(
f"Updating github status: repo_owner={repo_owner}, repo_name={repo_name}"
)
response = yield self.createStatus(
repo_user=repo_owner,
repo_name=repo_name,
sha=sha,
state=state,
target_url=build['url'],
context=context,
issue=issue,
description=description,
props=props,
)
if not response:
# the implementation of createStatus refused to post update due to missing data
continue
if not self.is_status_2xx(response.code):
raise RuntimeError()
if self.verbose:
log.msg(
f'Updated status with "{state}" for {repo_owner}/{repo_name} '
f'at {sha}, context "{context}", issue {issue}.'
)
except Exception as e:
if response:
content = yield response.content()
code = response.code
else:
content = code = "n/a"
log.err(
e,
(
f'Failed to update "{state}" for {repo_owner}/{repo_name} '
f'at {sha}, context "{context}", issue {issue}. '
f'http {code}, {content}'
),
)
class GitHubCommentPush(GitHubStatusPush):
name = "GitHubCommentPush"
def setup_context(self, context):
return ''
def _create_default_generators(self):
start_formatter = MessageFormatterRenderable(None)
end_formatter = MessageFormatterRenderable('Build done.')
return [
BuildStartEndStatusGenerator(
start_formatter=start_formatter, end_formatter=end_formatter
)
]
@defer.inlineCallbacks
def sendMessage(self, reports):
report = reports[0]
if 'body' not in report or report['body'] is None:
return
yield super().sendMessage(reports)
@defer.inlineCallbacks
def createStatus(
self,
repo_user,
repo_name,
sha,
state,
props,
target_url=None,
context=None,
issue=None,
description=None,
):
"""
:param repo_user: GitHub user or organization
:param repo_name: Name of the repository
:param sha: Full sha to create the status for.
:param state: unused
:param target_url: unused
:param context: unused
:param issue: Pull request number
:param description: Short description of the status.
:param props: Properties object of the build (used for render GITHUB_TOKEN secret)
:return: A deferred with the result from GitHub.
This code comes from txgithub by @tomprince.
txgithub is based on twisted's webclient agent, which is much less reliable and featureful
as txrequest (support for proxy, connection pool, keep alive, retry, etc)
"""
payload = {'body': description}
if issue is None:
log.msg(
f'Skipped status update for repo {repo_name} sha {sha} as issue is not specified'
)
return None
url = '/'.join(['/repos', repo_user, repo_name, 'issues', issue, 'comments'])
headers = yield self._get_auth_header(props)
ret = yield self._http.post(url, json=payload, headers=headers)
return ret
| 11,791 | Python | .py | 300 | 28.833333 | 99 | 0.601696 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,827 | gitlab.py | buildbot_buildbot/master/buildbot/reporters/gitlab.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Member
from __future__ import annotations
from urllib.parse import quote_plus as urlquote_plus
from twisted.internet import defer
from twisted.python import log
from buildbot.process.properties import Interpolate
from buildbot.process.properties import Properties
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SKIPPED
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.reporters.base import ReporterBase
from buildbot.reporters.generators.build import BuildStartEndStatusGenerator
from buildbot.reporters.generators.buildrequest import BuildRequestGenerator
from buildbot.reporters.message import MessageFormatterRenderable
from buildbot.util import giturlparse
from buildbot.util import httpclientservice
HOSTED_BASE_URL = 'https://gitlab.com'
class GitLabStatusPush(ReporterBase):
name: str | None = "GitLabStatusPush" # type: ignore[assignment]
def checkConfig(
self,
token,
context=None,
baseURL=None,
verbose=False,
debug=None,
verify=None,
generators=None,
**kwargs,
):
if generators is None:
generators = self._create_default_generators()
super().checkConfig(generators=generators, **kwargs)
@defer.inlineCallbacks
def reconfigService(
self,
token,
context=None,
baseURL=None,
verbose=False,
debug=None,
verify=None,
generators=None,
**kwargs,
):
token = yield self.renderSecrets(token)
self.debug = debug
self.verify = verify
self.verbose = verbose
self.context = context or Interpolate('buildbot/%(prop:buildername)s')
if generators is None:
generators = self._create_default_generators()
yield super().reconfigService(generators=generators, **kwargs)
if baseURL is None:
baseURL = HOSTED_BASE_URL
if baseURL.endswith('/'):
baseURL = baseURL[:-1]
self.baseURL = baseURL
self._http = yield httpclientservice.HTTPSession(
self.master.httpservice,
baseURL,
headers={'PRIVATE-TOKEN': token},
debug=self.debug,
verify=self.verify,
)
self.project_ids = {}
def _create_default_generators(self):
start_formatter = MessageFormatterRenderable('Build started.')
end_formatter = MessageFormatterRenderable('Build done.')
pending_formatter = MessageFormatterRenderable('Build pending.')
return [
BuildRequestGenerator(formatter=pending_formatter),
BuildStartEndStatusGenerator(
start_formatter=start_formatter, end_formatter=end_formatter
),
]
def createStatus(
self, project_id, branch, sha, state, target_url=None, description=None, context=None
):
"""
:param project_id: Project ID from GitLab
:param branch: Branch name to create the status for.
:param sha: Full sha to create the status for.
:param state: one of the following 'pending', 'success', 'failed'
or 'canceled'.
:param target_url: Target url to associate with this status.
:param description: Short description of the status.
:param context: Context of the result
:return: A deferred with the result from GitLab.
"""
payload = {'state': state, 'ref': branch}
if description is not None:
payload['description'] = description
if target_url is not None:
payload['target_url'] = target_url
if context is not None:
payload['name'] = context
return self._http.post(f'/api/v4/projects/{project_id}/statuses/{sha}', json=payload)
@defer.inlineCallbacks
def getProjectId(self, sourcestamp):
# retrieve project id via cache
url = giturlparse(sourcestamp['repository'])
if url is None:
return None
project_full_name = f"{url.owner}/{url.repo}"
# gitlab needs project name to be fully url quoted to get the project id
project_full_name = urlquote_plus(project_full_name)
if project_full_name not in self.project_ids:
response = yield self._http.get(f'/api/v4/projects/{project_full_name}')
proj = yield response.json()
if response.code not in (200,):
log.msg(
'Unknown (or hidden) gitlab project'
f'{project_full_name}: {proj.get("message")}'
)
return None
self.project_ids[project_full_name] = proj['id']
return self.project_ids[project_full_name]
@defer.inlineCallbacks
def sendMessage(self, reports):
report = reports[0]
build = reports[0]['builds'][0]
props = Properties.fromDict(build['properties'])
props.master = self.master
description = report.get('body', None)
if build['complete']:
state = {
SUCCESS: 'success',
WARNINGS: 'success',
FAILURE: 'failed',
SKIPPED: 'success',
EXCEPTION: 'failed',
RETRY: 'pending',
CANCELLED: 'canceled',
}.get(build['results'], 'failed')
elif build.get('started_at'):
state = 'running'
else:
state = 'pending'
context = yield props.render(self.context)
sourcestamps = build['buildset']['sourcestamps']
# FIXME: probably only want to report status for the last commit in the changeset
for sourcestamp in sourcestamps:
sha = sourcestamp['revision']
if 'source_project_id' in props:
proj_id = props['source_project_id']
else:
proj_id = yield self.getProjectId(sourcestamp)
if proj_id is None:
continue
try:
if 'source_branch' in props:
branch = props['source_branch']
else:
branch = sourcestamp['branch']
target_url = build['url']
res = yield self.createStatus(
project_id=proj_id,
branch=branch,
sha=sha,
state=state,
target_url=target_url,
context=context,
description=description,
)
if res.code not in (200, 201, 204):
message = yield res.json()
message = message.get('message', 'unspecified error')
log.msg(
f'Could not send status "{state}" for '
f'{sourcestamp["repository"]} at {sha}: {message}'
)
elif self.verbose:
log.msg(f'Status "{state}" sent for {sourcestamp["repository"]} at {sha}.')
except Exception as e:
log.err(
e,
(
f'Failed to send status "{state}" for '
f'{sourcestamp["repository"]} at {sha}'
),
)
| 8,236 | Python | .py | 199 | 30.834171 | 95 | 0.609168 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,828 | message.py | buildbot_buildbot/master/buildbot/reporters/message.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from typing import ClassVar
from typing import Sequence
import jinja2
from twisted.internet import defer
from buildbot import config
from buildbot import util
from buildbot.process.properties import Properties
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SKIPPED
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.process.results import Results
from buildbot.process.results import statusToString
from buildbot.reporters import utils
def get_detected_status_text(mode, results, previous_results):
if results == FAILURE:
if (
('change' in mode or 'problem' in mode)
and previous_results is not None
and previous_results != FAILURE
):
text = "new failure"
else:
text = "failed build"
elif results == WARNINGS:
text = "problem in the build"
elif results == SUCCESS:
if "change" in mode and previous_results is not None and previous_results != results:
text = "restored build"
else:
text = "passing build"
elif results == EXCEPTION:
text = "build exception"
else:
text = f"{statusToString(results)} build"
return text
def get_message_summary_text(build, results):
t = build['state_string']
if t:
t = ": " + t
else:
t = ""
if results == SUCCESS:
text = "Build succeeded!"
elif results == WARNINGS:
text = f"Build Had Warnings{t}"
elif results == CANCELLED:
text = "Build was cancelled"
else:
text = f"BUILD FAILED{t}"
return text
def get_message_source_stamp_text(source_stamps):
text = ""
for ss in source_stamps:
source = ""
if ss['branch']:
source += f"[branch {ss['branch']}] "
if ss['revision']:
source += str(ss['revision'])
else:
source += "HEAD"
if ss['patch'] is not None:
source += " (plus patch)"
discriminator = ""
if ss['codebase']:
discriminator = f" '{ss['codebase']}'"
text += f"Build Source Stamp{discriminator}: {source}\n"
return text
def get_projects_text(source_stamps, master):
projects = set()
for ss in source_stamps:
if ss['project']:
projects.add(ss['project'])
if not projects:
projects = [master.config.title]
return ', '.join(list(projects))
def create_context_for_build(mode, build, is_buildset, master, blamelist):
buildset = build['buildset']
ss_list = buildset['sourcestamps']
results = build['results']
if 'prev_build' in build and build['prev_build'] is not None:
previous_results = build['prev_build']['results']
else:
previous_results = None
return {
'results': build['results'],
'result_names': Results,
'mode': mode,
'buildername': build['builder']['name'],
'workername': build['properties'].get('workername', ["<unknown>"])[0],
'buildset': buildset,
'build': build,
'is_buildset': is_buildset,
'projects': get_projects_text(ss_list, master),
'previous_results': previous_results,
'status_detected': get_detected_status_text(mode, results, previous_results),
'build_url': utils.getURLForBuild(master, build['builder']['builderid'], build['number']),
'buildbot_title': master.config.title,
'buildbot_url': master.config.buildbotURL,
'blamelist': blamelist,
'summary': get_message_summary_text(build, results),
'sourcestamps': get_message_source_stamp_text(ss_list),
}
def create_context_for_buildset(mode, buildset, builds, master, blamelist):
ss_list = buildset['sourcestamps']
results = buildset["results"]
return {
"results": results,
"result_names": Results,
"mode": mode,
"buildset": buildset,
"builds": builds,
"is_buildset": True,
"projects": get_projects_text(ss_list, master),
"status_detected": get_detected_status_text(mode, results, None),
"buildbot_title": master.config.title,
"buildbot_url": master.config.buildbotURL,
"blamelist": blamelist,
"sourcestamps": get_message_source_stamp_text(ss_list),
}
def create_context_for_worker(master, worker):
return {
'buildbot_title': master.config.title,
'buildbot_url': master.config.buildbotURL,
'worker': worker,
}
class MessageFormatterBase(util.ComparableMixin):
template_type = 'plain'
def __init__(
self,
ctx=None,
want_properties=True,
want_steps=False,
want_logs=False,
want_logs_content=False,
):
if ctx is None:
ctx = {}
self.context = ctx
self.want_properties = want_properties
self.want_steps = want_steps
self.want_logs = want_logs
self.want_logs_content = want_logs_content
def buildAdditionalContext(self, master, ctx):
pass
@defer.inlineCallbacks
def render_message_dict(self, master, context):
"""Generate a buildbot reporter message and return a dictionary
containing the message body, type and subject.
This is an informal description of what message dictionaries are expected to be
produced. It is an internal API and expected to change even within bugfix releases, if
needed.
The message dictionary contains the 'body', 'type' and 'subject' keys:
- 'subject' is a string that defines a subject of the message. It's not necessarily
used on all reporters. It may be None.
- 'type' must be 'plain', 'html' or 'json'.
- 'body' is the content of the message. It may be None. The type of the data depends
on the value of the 'type' parameter:
- 'plain': Must be a string
- 'html': Must be a string
- 'json': Must be a non-encoded jsonable value. The root element must be either
of dictionary, list or string. This must not change during all invocations of
a particular instance of the formatter.
- "extra_info" is an optional dictionary of dictionaries of extra information.
In case of a report being created for multiple builds (e.g. in the case of a buildset),
the values returned by message formatter are concatenated. If this is not possible
(e.g. if the body is a dictionary), any subsequent messages are ignored.
"""
yield self.buildAdditionalContext(master, context)
context.update(self.context)
body, subject, extra_info = yield defer.gatherResults([
defer.maybeDeferred(self.render_message_body, context),
defer.maybeDeferred(self.render_message_subject, context),
defer.maybeDeferred(self.render_message_extra_info, context),
])
return {
"body": body,
'type': self.template_type,
"subject": subject,
"extra_info": extra_info,
}
def render_message_body(self, context):
return None
def render_message_subject(self, context):
return None
def render_message_extra_info(self, context):
return None
def format_message_for_build(self, master, build, **kwargs):
# Known kwargs keys: mode, users, is_buildset
raise NotImplementedError
def format_message_for_buildset(self, master, buildset, builds, **kwargs):
# Known kwargs keys: mode, users, is_buildset
raise NotImplementedError
class MessageFormatterEmpty(MessageFormatterBase):
def format_message_for_build(self, master, build, **kwargs):
return {'body': None, 'type': 'plain', 'subject': None, "extra_info": None}
def format_message_for_buildset(self, master, buildset, builds, **kwargs):
return {"body": None, "type": "plain", "subject": None}
class MessageFormatterFunctionRaw(MessageFormatterBase):
def __init__(self, function, **kwargs):
super().__init__(**kwargs)
self._function = function
@defer.inlineCallbacks
def format_message_for_build(self, master, build, is_buildset=False, users=None, mode=None):
ctx = create_context_for_build(mode, build, is_buildset, master, users)
msgdict = yield self._function(master, ctx)
return {
"body": msgdict.get("body", None),
"type": msgdict.get("type", "plain"),
"subject": msgdict.get("subject", None),
"extra_info": msgdict.get("extra_info", None),
}
@defer.inlineCallbacks
def format_message_for_buildset(
self, master, buildset, builds, users=None, mode=None, **kwargs
):
ctx = create_context_for_buildset(mode, buildset, builds, master, users)
msgdict = yield self._function(master, ctx)
return {
"body": msgdict.get("body", None),
"type": msgdict.get("type", "plain"),
"subject": msgdict.get("subject", None),
"extra_info": msgdict.get("extra_info", None),
}
class MessageFormatterFunction(MessageFormatterBase):
def __init__(self, function, template_type, **kwargs):
super().__init__(**kwargs)
self.template_type = template_type
self._function = function
@defer.inlineCallbacks
def format_message_for_build(self, master, build, **kwargs):
msgdict = yield self.render_message_dict(master, {'build': build})
return msgdict
@defer.inlineCallbacks
def format_message_for_buildset(self, master, buildset, builds, **kwargs):
msgdict = yield self.render_message_dict(master, {"buildset": buildset, "builds": builds})
return msgdict
def render_message_body(self, context):
return self._function(context)
def render_message_subject(self, context):
return None
class MessageFormatterRenderable(MessageFormatterBase):
template_type = 'plain'
def __init__(self, template, subject=None):
super().__init__()
self.template = template
self.subject = subject
@defer.inlineCallbacks
def format_message_for_build(self, master, build, **kwargs):
msgdict = yield self.render_message_dict(master, {'build': build, 'master': master})
return msgdict
def format_message_for_buildset(self, master, buildset, builds, **kwargs):
raise NotImplementedError
@defer.inlineCallbacks
def render_message_body(self, context):
props = Properties.fromDict(context['build']['properties'])
props.master = context['master']
body = yield props.render(self.template)
return body
@defer.inlineCallbacks
def render_message_subject(self, context):
if self.subject is None:
return None
props = Properties.fromDict(context['build']['properties'])
props.master = context['master']
body = yield props.render(self.subject)
return body
default_body_template_plain = """\
A {{ status_detected }} has been detected on builder {{ buildername }} while building {{ projects }}.
Full details are available at:
{{ build_url }}
Build state: {{ build['state_string'] }}
Revision: {{ build['properties'].get('got_revision', ['(unknown)'])[0] }}
Worker: {{ workername }}
Build Reason: {{ build['properties'].get('reason', ["(unknown)"])[0] }}
Blamelist: {{ ", ".join(blamelist) }}
Steps:
{% if build['steps'] %}{% for step in build['steps'] %}
- {{ step['number'] }}: {{ step['name'] }} ( {{ result_names[step['results']] }} )
{% if step['logs'] %} Logs:{% for log in step['logs'] %}
- {{ log.name }}: {{ log.url }}{% endfor %}
{% endif %}{% endfor %}
{% else %}
- (no steps)
{% endif %}
"""
default_body_template_html = """\
<p>A {{ status_detected }} has been detected on builder
<a href="{{ build_url }}">{{ buildername }}</a>
while building {{ projects }}.</p>
<p>Information:</p>
<ul>
<li>Build state: {{ build['state_string'] }}</li>
<li>Revision: {{ build['properties'].get('got_revision', ['(unknown)'])[0] }}</li>
<li>Worker: {{ workername }}</li>
<li>Build Reason: {{ build['properties'].get('reason', ["(unknown)"])[0] }}</li>
<li>Blamelist: {{ ", ".join(blamelist) }}</li>
</ul>
<p>Steps:</p>
<ul>
{% if build['steps'] %}{% for step in build['steps'] %}
<li style="{{ results_style[step['results']] }}">
{{ step['number'] }}: {{ step['name'] }} ( {{ result_names[step['results']] }} )
{% if step['logs'] %}({% for log in step['logs'] %}
<a href="{{ log.url }}"><{{ log.name }}></a>{% endfor %}
)
{% endif %}</li>
{% endfor %}{% else %}
<li>No steps</li>
{% endif %}
</ul>
"""
default_subject_template = """\
{{ '☠' if result_names[results] == 'failure' else '☺' if result_names[results] == 'success' else '☝' }} \
Buildbot ({{ buildbot_title }}): {{ build['properties'].get('project', ['whole buildset'])[0] if is_buildset else buildername }} \
- \
{{ build['state_string'] }} \
{{ '(%s)' % (build['properties']['branch'][0] if (build['properties']['branch'] and build['properties']['branch'][0]) else build['properties'].get('got_revision', ['(unknown revision)'])[0]) }}"""
class MessageFormatterBaseJinja(MessageFormatterBase):
compare_attrs: ClassVar[Sequence[str]] = ['body_template', 'subject_template', 'template_type']
subject_template = None
template_type = 'plain'
uses_default_body_template = False
def __init__(
self, template=None, subject=None, template_type=None, extra_info_cb=None, **kwargs
):
if template_type is not None:
self.template_type = template_type
if template is None:
self.uses_default_body_template = True
if self.template_type == 'plain':
template = default_body_template_plain
elif self.template_type == 'html':
template = default_body_template_html
else:
config.error(
f'{self.__class__.__name__}: template type {self.template_type} '
'is not known to pick default template'
)
kwargs['want_steps'] = True
kwargs['want_logs'] = True
if subject is None:
subject = default_subject_template
self.body_template = jinja2.Template(template)
self.subject_template = jinja2.Template(subject)
self.extra_info_cb = extra_info_cb
super().__init__(**kwargs)
def buildAdditionalContext(self, master, ctx):
if self.uses_default_body_template:
ctx['results_style'] = {
SUCCESS: '',
EXCEPTION: 'color: #f0f; font-weight: bold;',
FAILURE: 'color: #f00; font-weight: bold;',
RETRY: 'color: #4af;',
SKIPPED: 'color: #4af;',
WARNINGS: 'color: #f80;',
CANCELLED: 'color: #4af;',
}
def render_message_body(self, context):
return self.body_template.render(context)
def render_message_subject(self, context):
return self.subject_template.render(context)
def render_message_extra_info(self, context):
if self.extra_info_cb is None:
return None
return self.extra_info_cb(context)
class MessageFormatter(MessageFormatterBaseJinja):
@defer.inlineCallbacks
def format_message_for_build(self, master, build, is_buildset=False, users=None, mode=None):
ctx = create_context_for_build(mode, build, is_buildset, master, users)
msgdict = yield self.render_message_dict(master, ctx)
return msgdict
@defer.inlineCallbacks
def format_message_for_buildset(self, master, buildset, builds, users=None, mode=None):
ctx = create_context_for_buildset(mode, buildset, builds, master, users)
msgdict = yield self.render_message_dict(master, ctx)
return msgdict
default_missing_template_plain = """\
The Buildbot worker named {{worker.name}} went away.
It last disconnected at {{worker.last_connection}}.
{% if 'admin' in worker['workerinfo'] %}
The admin on record (as reported by WORKER:info/admin) was {{worker.workerinfo.admin}}.
{% endif %}
"""
default_missing_template_html = """\
<p>The Buildbot worker named {{worker.name}} went away.</p>
<p>It last disconnected at {{worker.last_connection}}.</p>
{% if 'admin' in worker['workerinfo'] %}
<p>The admin on record (as reported by WORKER:info/admin) was {{worker.workerinfo.admin}}.</p>
{% endif %}
"""
default_missing_worker_subject_template = (
'Buildbot {{ buildbot_title }} worker {{ worker.name }} missing'
)
class MessageFormatterMissingWorker(MessageFormatterBaseJinja):
def __init__(self, template=None, subject=None, template_type=None, **kwargs):
if template_type is None:
template_type = 'plain'
if template is None:
if template_type == 'plain':
template = default_missing_template_plain
elif template_type == 'html':
template = default_missing_template_html
else:
config.error(
f'{self.__class__.__name__}: template type {self.template_type} '
'is not known to pick default template'
)
if subject is None:
subject = default_missing_worker_subject_template
super().__init__(template=template, subject=subject, template_type=template_type, **kwargs)
@defer.inlineCallbacks
def formatMessageForMissingWorker(self, master, worker):
ctx = create_context_for_worker(master, worker)
msgdict = yield self.render_message_dict(master, ctx)
return msgdict
| 18,791 | Python | .py | 433 | 35.95843 | 196 | 0.638941 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,829 | words.py | buildbot_buildbot/master/buildbot/reporters/words.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import random
import re
import shlex
from twisted.application.service import Service
from twisted.internet import defer
from twisted.internet import protocol
from twisted.internet import reactor
from twisted.python import log
from twisted.python import usage
from twisted.web import resource
from twisted.web import server
from buildbot import util
from buildbot import version
from buildbot.data import resultspec
from buildbot.plugins.db import get_plugins
from buildbot.process.properties import Properties
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SKIPPED
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.process.results import statusToString
from buildbot.reporters import utils
from buildbot.util import epoch2datetime
from buildbot.util import service
from buildbot.util import unicode2bytes
# Used in command_HELLO and it's test. 'Hi' in 100 languages.
GREETINGS = [
"ږغ كول ، هركلى كول ږغ، هركلى",
"Goeie dag",
"Tungjatjeta",
"Yatasay",
"Ahlan bik",
"Voghdzuyin",
"hola",
"kaixo",
"Horas",
"Pryvitańnie",
"Nomoskar",
"Oki",
"Selam",
"Dez-mat",
"Zdrávejte",
"Mingala ba",
"Hola",
"Hafa dai",
"Oh-see-YOH",
"Nín hao",
"Bonjou",
"Zdravo",
"Nazdar",
"Hallo",
"Hallo",
"Iiti",
"Kotáka",
"Saluton",
"Tere",
"Hallo",
"Hallo",
"Bula",
"Helo",
"Hei",
"Goede morgen",
"Bonjour",
"Hoi",
"Ola",
"Gamardžoba",
"Guten Tag",
"Mauri",
"Geia!",
"Inuugujoq",
"Kem cho",
"Sannu",
"Aloha",
"Shalóm",
"Namasté",
"Szia",
"Halló",
"Hai",
"Kiana",
"Dia is muire dhuit",
"Buongiorno",
"Kónnichi wa",
"Salam",
"Annyeonghaseyo",
"Na",
"Sabai dii",
"Ave",
"Es mīlu tevi",
"Labas.",
"Selamat petang",
"Ni hao",
"Kia ora",
"Yokwe",
"Kwe",
"sain baina uu",
"niltze",
"Yá'át'ééh",
"Namaste",
"Hallo.",
"Salâm",
"Witajcie",
"Olá",
"Kâils",
"Aroha",
"Salut",
"Privét",
"Talofa",
"Namo namah",
"ćao",
"Nazdar",
"Zdravo",
"Hola",
"Jambo",
"Hej",
"Sälü",
"Halo",
"Selam",
"Sàwàtdee kráp",
"Dumela",
"Merhaba",
"Pryvít",
"Adaab arz hai",
"Chào",
"Glidis",
"Helo",
"Sawubona",
"Hoi",
]
class UsageError(ValueError):
# pylint: disable=useless-super-delegation
def __init__(self, string="Invalid usage", *more):
# This is not useless as we change the default value of an argument.
# This bug is reported as "fixed" but apparently, it is not.
# https://github.com/PyCQA/pylint/issues/1085
# (Maybe there is a problem with builtin exceptions).
super().__init__(string, *more)
class ForceOptions(usage.Options):
optParameters = [
["builder", None, None, "which Builder to start"],
["codebase", None, "", "which codebase to build"],
["branch", None, "master", "which branch to build"],
["revision", None, "HEAD", "which revision to build"],
["project", None, "", "which project to build"],
["reason", None, None, "the reason for starting the build"],
[
"props",
None,
None,
"A set of properties made available in the build environment, "
"format is --properties=prop1=value1,prop2=value2,.. "
"option can be specified multiple times.",
],
]
def parseArgs(self, *args):
args = list(args)
if args:
if self['builder'] is not None:
raise UsageError("--builder provided in two ways")
self['builder'] = args.pop(0)
if args: # args might be modified above
if self['reason'] is not None:
raise UsageError("--reason provided in two ways")
self['reason'] = " ".join(args)
dangerous_commands = []
def dangerousCommand(method):
command = method.__name__
if not command.startswith('command_'):
raise ValueError('@dangerousCommand can be used only for commands')
dangerous_commands.append(command[8:])
return method
class Channel(service.AsyncService):
"""
This class holds what should be shared between users on a single channel.
In particular it is responsible for maintaining notification states and
send notifications.
"""
def __init__(self, bot, channel):
self.name = f"Channel({channel})"
self.id = channel
self.bot = bot
self.notify_events = set()
self.subscribed = []
self.build_subscriptions = []
self.reported_builds = [] # tuples (when, buildername, buildnum)
self.missing_workers = set()
self.useRevisions = bot.useRevisions
def send(self, message, **kwargs):
return self.bot.send_message(self.id, message, **kwargs)
@defer.inlineCallbacks
def stopService(self):
if self.subscribed:
yield self.unsubscribe_from_build_events()
def validate_notification_event(self, event):
if not re.compile(
"^(started|finished|success|warnings|failure|exception|"
"cancelled|problem|recovery|worse|better|worker|"
# this is deprecated list
"(success|warnings|failure|exception)To"
"(Success|Warnings|Failure|Exception))$"
).match(event):
raise UsageError("Try '" + self.bot.commandPrefix + "notify on|off _EVENT_'.")
@defer.inlineCallbacks
def list_notified_events(self):
if self.notify_events:
yield self.send(
'The following events are being notified: '
f'{", ".join(sorted(self.notify_events))}.'
)
else:
yield self.send("No events are being notified.")
def notify_for(self, *events):
for event in events:
if event in self.notify_events:
return True
return False
@defer.inlineCallbacks
def subscribe_to_build_events(self):
startConsuming = self.master.mq.startConsuming
def buildStarted(key, msg):
return self.buildStarted(msg)
def buildFinished(key, msg):
return self.buildFinished(msg)
def workerEvent(key, msg):
if key[2] == 'missing':
return self.workerMissing(msg)
if key[2] == 'connected':
return self.workerConnected(msg)
return None
for e, f in (
("new", buildStarted), # BuilderStarted
("finished", buildFinished),
): # BuilderFinished
handle = yield startConsuming(f, ('builders', None, 'builds', None, e))
self.subscribed.append(handle)
handle = yield startConsuming(workerEvent, ('workers', None, None))
self.subscribed.append(handle)
@defer.inlineCallbacks
def unsubscribe_from_build_events(self):
# Cancel all the subscriptions we have
old_list = self.subscribed
self.subscribed = []
for handle in old_list:
yield handle.stopConsuming()
def add_notification_events(self, events):
for event in events:
self.validate_notification_event(event)
self.notify_events.add(event)
if not self.subscribed:
self.subscribe_to_build_events()
@defer.inlineCallbacks
def remove_notification_events(self, events):
for event in events:
self.validate_notification_event(event)
self.notify_events.remove(event)
if not self.notify_events:
yield self.unsubscribe_from_build_events()
@defer.inlineCallbacks
def remove_all_notification_events(self):
self.notify_events = set()
if self.subscribed:
yield self.unsubscribe_from_build_events()
def shouldReportBuild(self, builder, buildnum):
"""Returns True if this build should be reported for this contact
(eliminating duplicates), and also records the report for later"""
for _, b, n in self.reported_builds:
if b == builder and n == buildnum:
return False
self.reported_builds.append([util.now(), builder, buildnum])
# clean the reported builds
horizon = util.now() - 60
while self.reported_builds and self.reported_builds[0][0] < horizon:
self.reported_builds.pop(0)
# and return True, since this is a new one
return True
@defer.inlineCallbacks
def buildStarted(self, build):
builder = yield self.bot.getBuilder(builderid=build['builderid'])
builderName = builder['name']
buildNumber = build['number']
log.msg(f"[Contact] Builder {builder['name']} started")
# only notify about builders we are interested in
if self.bot.tags is not None and not self.builderMatchesAnyTag(builder.get('tags', [])):
log.msg('Not notifying for a build that does not match any tags')
return
if not self.notify_for('started'):
return
if self.useRevisions:
revisions = yield self.getRevisionsForBuild(build)
r = f"Build containing revision(s) {','.join(revisions)} on {builderName} started"
else:
# Abbreviate long lists of changes to simply two
# revisions, and the number of additional changes.
# TODO: We can't get the list of the changes related to a build in
# nine
changes_str = ""
url = utils.getURLForBuild(self.master, builder['builderid'], build['number'])
r = f"Build [#{buildNumber}]({url}) of `{builderName}` started"
if changes_str:
r += f" ({changes_str})"
self.send(r + ".")
@defer.inlineCallbacks
def buildFinished(self, build, watched=False):
builder = yield self.bot.getBuilder(builderid=build['builderid'])
builderName = builder['name']
buildNumber = build['number']
# only notify about builders we are interested in
if self.bot.tags is not None and not self.bot.builderMatchesAnyTag(builder.get('tags', [])):
log.msg('Not notifying for a build that does not match any tags')
return
if not (watched or (yield self.notify_for_finished(build))):
return
if not self.shouldReportBuild(builderName, buildNumber):
return
url = utils.getURLForBuild(self.master, builder['builderid'], buildNumber)
if self.useRevisions:
revisions = yield self.getRevisionsForBuild(build)
r = (
f"Build on `{builderName}` containing revision(s) {','.join(revisions)} "
f"{self.bot.format_build_status(build)}"
)
else:
r = (
f"Build [#{buildNumber}]({url}) of `{builderName}` "
f"{self.bot.format_build_status(build)}"
)
s = build.get('status_string')
if build['results'] != SUCCESS and s is not None:
r += ": " + s
else:
r += "."
# FIXME: where do we get the list of changes for a build ?
# if self.bot.showBlameList and buildResult != SUCCESS and len(build.changes) != 0:
# r += ' blamelist: ' + ', '.join(list(set([c.who for c in build.changes])))
self.send(r)
@defer.inlineCallbacks
def notify_for_finished(self, build):
if self.notify_for('finished'):
return True
result = build['results']
result_name = statusToString(result)
if self.notify_for(result_name):
return True
if result in self.bot.results_severity and (
self.notify_for('better', 'worse', 'problem', 'recovery')
or any('To' in e for e in self.notify_events)
):
prev_build = yield self.master.data.get((
'builders',
build['builderid'],
'builds',
build['number'] - 1,
))
if prev_build:
prev_result = prev_build['results']
if prev_result in self.bot.results_severity:
result_severity = self.bot.results_severity.index(result)
prev_result_severity = self.bot.results_severity.index(prev_result)
if self.notify_for('better') and result_severity < prev_result_severity:
return True
if self.notify_for('worse') and result_severity > prev_result_severity:
return True
if (
self.notify_for('problem')
and prev_result in (SUCCESS, WARNINGS)
and result in (FAILURE, EXCEPTION)
):
return True
if (
self.notify_for('recovery')
and prev_result in (FAILURE, EXCEPTION)
and result in (SUCCESS, WARNINGS)
):
return True
# DEPRECATED
required_notification_control_string = ''.join((
statusToString(prev_result).lower(),
'To',
result_name.capitalize(),
))
if self.notify_for(required_notification_control_string):
return True
return False
@defer.inlineCallbacks
def workerMissing(self, worker):
self.missing_workers.add(worker['workerid'])
if self.notify_for('worker'):
self.send(
f"Worker `{worker['name']}` is missing. It was seen last on "
f"{worker['last_connection']}."
)
yield self.bot.saveMissingWorkers()
@defer.inlineCallbacks
def workerConnected(self, worker):
workerid = worker['workerid']
if workerid in self.missing_workers:
self.missing_workers.remove(workerid)
if self.notify_for('worker'):
self.send(f"Worker `{worker['name']}` is back online.")
yield self.bot.saveMissingWorkers()
class Contact:
"""I hold the state for a single user's interaction with the buildbot.
There will be one instance of me for each user who interacts personally
with the buildbot. There will be an additional instance for each
'broadcast contact' (chat rooms, IRC channels as a whole).
"""
def __init__(self, user, channel):
"""
:param StatusBot bot: StatusBot this Contact belongs to
:param user: User ID representing this contact
:param channel: Channel this contact is on
"""
self.user_id = user
self.channel = channel
@property
def bot(self):
return self.channel.bot
@property
def master(self):
return self.channel.bot.master
@property
def is_private_chat(self):
return self.user_id == self.channel.id
@staticmethod
def overrideCommand(meth):
try:
base_meth = getattr(Contact, meth.__name__)
except AttributeError:
pass
else:
try:
meth.__doc__ = base_meth.__doc__
except AttributeError:
pass
try:
meth.usage = base_meth.usage
except AttributeError:
pass
return meth
# Communication with the user
def send(self, message, **kwargs):
return self.channel.send(message, **kwargs)
def access_denied(self, *args, **kwargs):
return self.send(f"Thou shall not pass, {self.user_id}!!!")
# Main dispatchers for incoming messages
def getCommandMethod(self, command):
command = command.upper()
try:
method = getattr(self, 'command_' + command)
except AttributeError:
return None
get_authz = self.bot.authz.get
acl = get_authz(command)
if acl is None:
if command in dangerous_commands:
acl = get_authz('!', False)
else:
acl = get_authz('', True)
acl = get_authz('*', acl)
if isinstance(acl, (list, tuple)):
acl = self.user_id in acl
elif acl not in (True, False, None):
acl = self.user_id == acl
if not acl:
return self.access_denied
return method
@defer.inlineCallbacks
def handleMessage(self, message, **kwargs):
message = message.lstrip()
parts = message.split(' ', 1)
if len(parts) == 1:
parts = [*parts, ""]
cmd, args = parts
cmd_suffix = self.bot.commandSuffix
if cmd_suffix and cmd.endswith(cmd_suffix):
cmd = cmd[: -len(cmd_suffix)]
self.bot.log(f"Received command `{cmd}` from {self.describeUser()}")
if cmd.startswith(self.bot.commandPrefix):
meth = self.getCommandMethod(cmd[len(self.bot.commandPrefix) :])
else:
meth = None
if not meth:
if message[-1] == '!':
self.send("What you say!")
return None
elif cmd.startswith(self.bot.commandPrefix):
self.send(f"I don't get this '{cmd}'...")
meth = self.command_COMMANDS
else:
if self.is_private_chat:
self.send("Say what?")
return None
try:
result = yield meth(args.strip(), **kwargs)
except UsageError as e:
self.send(str(e))
return None
except Exception as e:
self.bot.log_err(e)
self.send("Something bad happened (see logs)")
return None
return result
def splitArgs(self, args):
"""Returns list of arguments parsed by shlex.split() or
raise UsageError if failed"""
try:
return shlex.split(args)
except ValueError as e:
raise UsageError(e) from e
def command_HELLO(self, args, **kwargs):
"""say hello"""
self.send(random.choice(GREETINGS))
def command_VERSION(self, args, **kwargs):
"""show buildbot version"""
self.send(f"This is buildbot-{version} at your service")
@defer.inlineCallbacks
def command_LIST(self, args, **kwargs):
"""list configured builders or workers"""
args = self.splitArgs(args)
all = False
num = 10
try:
num = int(args[0])
del args[0]
except ValueError:
if args[0] == 'all':
all = True
del args[0]
except IndexError:
pass
if not args:
raise UsageError(
"Try " f"'{self.bot.commandPrefix}list [all|N] builders|workers|changes'."
)
if args[0] == 'builders':
bdicts = yield self.bot.getAllBuilders()
online_builderids = yield self.bot.getOnlineBuilders()
response = ["I found the following builders:"]
for bdict in bdicts:
if bdict['builderid'] in online_builderids:
response.append(bdict['name'])
elif all:
response.append(bdict['name'])
response.append("[offline]")
self.send(' '.join(response))
elif args[0] == 'workers':
workers = yield self.master.data.get(('workers',))
response = ["I found the following workers:"]
for worker in workers:
if worker['configured_on']:
response.append(worker['name'])
if not worker['connected_to']:
response.append("[disconnected]")
elif all:
response.append(worker['name'])
response.append("[offline]")
self.send(' '.join(response))
elif args[0] == 'changes':
if all:
self.send(
"Do you really want me to list all changes? It can be thousands!\n"
"If you want to be flooded, specify the maximum number of changes "
"to show.\n"
"Right now, I will show up to 100 recent changes."
)
num = 100
changes = yield self.master.data.get(('changes',), order=['-changeid'], limit=num)
response = ["I found the following recent changes:"]
for change in reversed(changes):
change['comment'] = change['comments'].split('\n')[0]
change['date'] = epoch2datetime(change['when_timestamp']).strftime('%Y-%m-%d %H:%M')
response.append(
f"{change['comment']})\n"
f"Author: {change['author']}\n"
f"Date: {change['date']}\n"
f"Repository: {change['repository']}\n"
f"Branch: {change['branch']}\n"
f"Revision: {change['revision']}\n"
)
self.send('\n\n'.join(response))
command_LIST.usage = ( # type: ignore[attr-defined]
"list [all|N] builders|workers|changes - "
"list configured builders, workers, or N recent changes"
)
@defer.inlineCallbacks
def command_STATUS(self, args, **kwargs):
"""list status of a builder (or all builders)"""
args = self.splitArgs(args)
if not args:
which = ""
elif len(args) == 1:
which = args[0]
else:
raise UsageError("Try '" + self.bot.commandPrefix + "status _builder_'.")
response = []
if which == "":
builders = yield self.bot.getAllBuilders()
online_builderids = yield self.bot.getOnlineBuilders()
for builder in builders:
if builder['builderid'] in online_builderids:
status = yield self.bot.getBuildStatus(builder['name'], short=True)
response.append(status)
elif which == "all":
builders = yield self.bot.getAllBuilders()
for builder in builders:
status = yield self.bot.getBuildStatus(builder['name'], short=True)
response.append(status)
else:
status = yield self.bot.getBuildStatus(which)
response.append(status)
if response:
self.send('\n'.join(response))
command_STATUS.usage = "status [_which_] - list status of a builder (or all builders)" # type: ignore[attr-defined]
@defer.inlineCallbacks
def command_NOTIFY(self, args, **kwargs):
"""notify me about build events"""
args = self.splitArgs(args)
if not args:
raise UsageError("Try '" + self.bot.commandPrefix + "notify on|off|list [_EVENT_]'.")
action = args.pop(0)
events = args
if action in ("on", "on-quiet"):
if not events:
events = ('started', 'finished')
self.channel.add_notification_events(events)
if action == "on":
yield self.channel.list_notified_events()
self.bot.saveNotifyEvents()
elif action in ("off", "off-quiet"):
if events:
yield self.channel.remove_notification_events(events)
else:
yield self.channel.remove_all_notification_events()
if action == "off":
yield self.channel.list_notified_events()
self.bot.saveNotifyEvents()
elif action == "list":
yield self.channel.list_notified_events()
else:
raise UsageError("Try '" + self.bot.commandPrefix + "notify on|off|list [_EVENT_]'.")
command_NOTIFY.usage = ( # type: ignore[attr-defined]
"notify on|off|list [_EVENT_] ... - notify me about build events;"
" event should be one or more of: 'started', 'finished', 'failure',"
" 'success', 'exception', 'problem', 'recovery', 'better', or 'worse'"
)
@defer.inlineCallbacks
def command_WATCH(self, args, **kwargs):
"""announce the completion of an active build"""
args = self.splitArgs(args)
if len(args) != 1:
raise UsageError("Try '" + self.bot.commandPrefix + "watch _builder_'.")
which = args[0]
builder = yield self.bot.getBuilder(buildername=which)
# Get current builds on this builder.
builds = yield self.bot.getRunningBuilds(builder['builderid'])
if not builds:
self.send("There are no currently running builds.")
return
def watchForCompleteEvent(key, msg):
if key[-1] in ('finished', 'complete'):
return self.channel.buildFinished(msg, watched=True)
return None
for build in builds:
startConsuming = self.master.mq.startConsuming
handle = yield startConsuming(
watchForCompleteEvent, ('builds', str(build['buildid']), None)
)
self.channel.build_subscriptions.append((build['buildid'], handle))
url = utils.getURLForBuild(self.master, builder['builderid'], build['number'])
if self.bot.useRevisions:
revisions = yield self.bot.getRevisionsForBuild(build)
r = (
f"Watching build on `{which}` containing revision(s) "
f"{','.join(revisions)} until it finishes..."
)
else:
r = f"Watching build [#{build['number']}]({url}) of `{which}` until it finishes..."
self.send(r)
command_WATCH.usage = "watch _which_ - announce the completion of an active build" # type: ignore[attr-defined]
@defer.inlineCallbacks
@dangerousCommand
def command_FORCE(self, args, **kwargs):
"""force a build"""
# FIXME: NEED TO THINK ABOUT!
errReply = f"Try '{self.bot.commandPrefix}{self.command_FORCE.usage}'"
args = self.splitArgs(args)
if not args:
raise UsageError(errReply)
what = args.pop(0)
if what != "build":
raise UsageError(errReply)
opts = ForceOptions()
opts.parseOptions(args)
builderName = opts['builder']
builder = yield self.bot.getBuilder(buildername=builderName)
branch = opts['branch']
revision = opts['revision']
codebase = opts['codebase']
project = opts['project']
reason = opts['reason']
props = opts['props']
if builderName is None:
raise UsageError("you must provide a Builder, " + errReply)
# keep weird stuff out of the branch, revision, and properties args.
branch_validate = self.master.config.validation['branch']
revision_validate = self.master.config.validation['revision']
pname_validate = self.master.config.validation['property_name']
pval_validate = self.master.config.validation['property_value']
if branch and not branch_validate.match(branch):
self.bot.log(f"Force: bad branch '{branch}'")
self.send(f"Sorry, bad branch '{branch}'")
return
if revision and not revision_validate.match(revision):
self.bot.log(f"Force: bad revision '{revision}'")
self.send(f"Sorry, bad revision '{revision}'")
return
properties = Properties()
properties.master = self.master
if props:
# split props into name:value dict
pdict = {}
propertylist = props.split(",")
for prop in propertylist:
splitproperty = prop.split("=", 1)
pdict[splitproperty[0]] = splitproperty[1]
# set properties
for pname, pvalue in pdict.items():
if not pname_validate.match(pname) or not pval_validate.match(pvalue):
self.bot.log(f"Force: bad property name='{pname}', value='{pvalue}'")
self.send(f"Sorry, bad property name='{pname}', value='{pvalue}'")
return
properties.setProperty(pname, pvalue, "Force Build Chat")
properties.setProperty("reason", reason, "Force Build Chat")
properties.setProperty("owner", self.describeUser(), "Force Build Chat")
reason = f"forced: by {self.describeUser()}: {reason}"
try:
yield self.master.data.updates.addBuildset(
builderids=[builder['builderid']],
# For now, we just use
# this as the id.
scheduler="status.words",
sourcestamps=[
{
'codebase': codebase,
'branch': branch,
'revision': revision,
'project': project,
'repository': "",
}
],
reason=reason,
properties=properties.asDict(),
waited_for=False,
)
except AssertionError as e:
self.send("I can't: " + str(e))
else:
self.send("Force build successfully requested.")
command_FORCE.usage = ( # type: ignore[attr-defined]
"force build [--codebase=CODEBASE] [--branch=branch] "
"[--revision=revision] [--props=prop1=val1,prop2=val2...] "
"_which_ _reason_ - Force a build"
)
@defer.inlineCallbacks
@dangerousCommand
def command_STOP(self, args, **kwargs):
"""stop a running build"""
args = self.splitArgs(args)
if len(args) < 3 or args[0] != 'build':
raise UsageError("Try '" + self.bot.commandPrefix + "stop build _which_ _reason_'.")
which = args[1]
reason = ' '.join(args[2:])
r = f"stopped: by {self.describeUser()}: {reason}"
# find an in-progress build
builder = yield self.bot.getBuilder(buildername=which)
builderid = builder['builderid']
builds = yield self.bot.getRunningBuilds(builderid)
if not builds:
self.send("Sorry, no build is currently running.")
return
for bdict in builds:
num = bdict['number']
yield self.master.data.control(
'stop', {'reason': r}, ('builders', builderid, 'builds', num)
)
if self.bot.useRevisions:
revisions = yield self.bot.getRevisionsForBuild(bdict)
response = f"Build containing revision(s) {','.join(revisions)} interrupted"
else:
url = utils.getURLForBuild(self.master, builderid, num)
response = f"Build [#{num}]({url}) of `{which}` interrupted."
self.send(response)
command_STOP.usage = "stop build _which_ _reason_ - Stop a running build" # type: ignore[attr-defined]
@defer.inlineCallbacks
def command_LAST(self, args, **kwargs):
"""list last build status for a builder"""
# FIXME: NEED TO THINK ABOUT!
args = self.splitArgs(args)
if not args:
builders = yield self.bot.getAllBuilders()
online_builderids = yield self.bot.getOnlineBuilders()
builders = [b for b in builders if b['builderid'] in online_builderids]
elif len(args) == 1:
arg = args[0]
if arg == 'all':
builders = yield self.bot.getAllBuilders()
else:
builder = yield self.bot.getBuilder(buildername=arg)
if not builder:
raise UsageError("no such builder")
builders = [builder]
else:
raise UsageError("Try '" + self.bot.commandPrefix + "last _builder_'.")
messages = []
for builder in builders:
lastBuild = yield self.bot.getLastCompletedBuild(builder['builderid'])
if not lastBuild:
status = "no builds run since last restart"
else:
complete_at = lastBuild['complete_at']
if complete_at:
complete_at = util.datetime2epoch(complete_at)
ago = util.fuzzyInterval(int(reactor.seconds() - complete_at))
else:
ago = "??"
status = self.bot.format_build_status(lastBuild)
status = f'last build {status} ({ago} ago)'
if lastBuild['results'] != SUCCESS:
status += f": {lastBuild['state_string']}"
messages.append(f"`{builder['name']}`: {status}")
if messages:
self.send('\n'.join(messages))
command_LAST.usage = "last [_which_] - list last build status for builder _which_" # type: ignore[attr-defined]
@classmethod
def build_commands(cls):
commands = []
for k in dir(cls):
if k.startswith('command_'):
commands.append(k[8:].lower())
commands.sort()
return commands
def describeUser(self):
if self.is_private_chat:
return self.user_id
return f"{self.user_id} on {self.channel.id}"
# commands
def command_HELP(self, args, **kwargs):
"""give help for a command or one of it's arguments"""
args = self.splitArgs(args)
if not args:
commands = self.build_commands()
response = []
for command in commands:
meth = getattr(self, 'command_' + command.upper())
doc = getattr(meth, '__doc__', None)
if doc:
response.append(f"{command} - {doc}")
if response:
self.send('\n'.join(response))
return
command = args[0]
if command.startswith(self.bot.commandPrefix):
command = command[len(self.bot.commandPrefix) :]
meth = getattr(self, 'command_' + command.upper(), None)
if not meth:
raise UsageError(f"There is no such command '{args[0]}'.")
doc = getattr(meth, 'usage', None)
if isinstance(doc, dict):
if len(args) == 1:
k = None # command
elif len(args) == 2:
k = args[1] # command arg
else:
k = tuple(args[1:]) # command arg subarg ...
doc = doc.get(k, None)
elif callable(doc):
try:
doc = doc(*args[1:])
except (TypeError, ValueError):
doc = None
if doc:
self.send(f"Usage: {self.bot.commandPrefix}{doc}")
else:
self.send("No usage info for " + ' '.join([f"'{arg}'" for arg in args]))
command_HELP.usage = ( # type: ignore[attr-defined]
"help [_command_ _arg_ [_subarg_ ...]] - "
"Give help for _command_ or one of it's arguments"
)
def command_SOURCE(self, args, **kwargs):
"the source code for buildbot"
self.send("My source can be found at https://github.com/buildbot/buildbot")
command_SOURCE.usage = "source - the source code for Buildbot" # type: ignore[attr-defined]
def command_COMMANDS(self, args, **kwargs):
"""list available commands"""
commands = self.build_commands()
str = "Buildbot commands: " + ", ".join(self.bot.commandPrefix + c for c in commands)
self.send(str)
command_COMMANDS.usage = "commands - List available commands" # type: ignore[attr-defined]
@dangerousCommand
def command_SHUTDOWN(self, args, **kwargs):
"""shutdown the buildbot master"""
# FIXME: NEED TO THINK ABOUT!
if args not in ('check', 'start', 'stop', 'now'):
raise UsageError("Try '" + self.bot.commandPrefix + "shutdown check|start|stop|now'.")
botmaster = self.channel.master.botmaster
shuttingDown = botmaster.shuttingDown
if args == 'check':
if shuttingDown:
self.send("Status: buildbot is shutting down.")
else:
self.send("Status: buildbot is running.")
elif args == 'start':
if shuttingDown:
self.send("Shutdown already started.")
else:
self.send("Starting clean shutdown.")
botmaster.cleanShutdown()
elif args == 'stop':
if not shuttingDown:
self.send("There is no ongoing shutdown to stop.")
else:
self.send("Stopping clean shutdown.")
botmaster.cancelCleanShutdown()
elif args == 'now':
self.send("Stopping buildbot.")
reactor.stop()
command_SHUTDOWN.usage = {
None: "shutdown check|start|stop|now - shutdown the buildbot master",
"check": "shutdown check - check if the buildbot master is running or shutting down",
"start": "shutdown start - start a clean shutdown",
"stop": "shutdown cancel - stop the clean shutdown",
"now": "shutdown now - shutdown immediately without waiting for the builders to finish",
}
class StatusBot(service.AsyncMultiService):
"""Abstract status bot"""
contactClass = Contact
channelClass = Channel
commandPrefix = ''
commandSuffix = None
offline_string = "offline"
idle_string = "idle"
running_string = "running:"
nickname: str
parent: Service # type: ignore[assignment]
def __init__(
self, authz=None, tags=None, notify_events=None, useRevisions=False, showBlameList=False
):
super().__init__()
self.tags = tags
if notify_events is None:
notify_events = {}
self.notify_events = notify_events
self.useRevisions = useRevisions
self.showBlameList = showBlameList
self.authz = self.expand_authz(authz)
self.contacts = {}
self.channels = {}
@staticmethod
def expand_authz(authz):
if authz is None:
authz = {}
expanded_authz = {}
for cmds, val in authz.items():
if not isinstance(cmds, (tuple, list)):
cmds = (cmds,)
for cmd in cmds:
expanded_authz[cmd.upper()] = val
return expanded_authz
def isValidUser(self, user):
for auth in self.authz.values():
if auth is True or (isinstance(auth, (list, tuple)) and user in auth) or user == auth:
return True
# If user is in '', we have already returned; otherwise check if defaults apply
return '' not in self.authz
def getContact(self, user, channel):
"""get a Contact instance for ``user`` on ``channel``"""
try:
return self.contacts[(channel, user)]
except KeyError:
valid = self.isValidUser(user)
new_contact = self.contactClass(user=user, channel=self.getChannel(channel, valid))
if valid:
self.contacts[(channel, user)] = new_contact
return new_contact
def getChannel(self, channel, valid=True):
try:
return self.channels[channel]
except KeyError:
new_channel = self.channelClass(self, channel)
if valid:
self.channels[channel] = new_channel
new_channel.setServiceParent(self)
return new_channel
def _get_object_id(self):
return self.master.db.state.getObjectId(
self.nickname, f'{self.__class__.__module__}.{self.__class__.__name__}'
)
@defer.inlineCallbacks
def _save_channels_state(self, attr, json_type=None):
if json_type is None:
json_type = lambda x: x
data = [
(k, v)
for k, v in (
(channel.id, json_type(getattr(channel, attr)))
for channel in self.channels.values()
)
if v
]
try:
objectid = yield self._get_object_id()
yield self.master.db.state.setState(objectid, attr, data)
except Exception as err:
self.log_err(err, f"saveState '{attr}'")
@defer.inlineCallbacks
def _load_channels_state(self, attr, setter):
try:
objectid = yield self._get_object_id()
data = yield self.master.db.state.getState(objectid, attr, ())
except Exception as err:
self.log_err(err, f"loadState ({attr})")
else:
if data is not None:
for c, d in data:
try:
setter(self.getChannel(c), d)
except Exception as err:
self.log_err(err, f"loadState '{attr}' ({c})")
@defer.inlineCallbacks
def loadState(self):
yield self._load_channels_state('notify_events', lambda c, e: c.add_notification_events(e))
yield self._load_channels_state('missing_workers', lambda c, w: c.missing_workers.update(w))
@defer.inlineCallbacks
def saveNotifyEvents(self):
yield self._save_channels_state('notify_events', list)
@defer.inlineCallbacks
def saveMissingWorkers(self):
yield self._save_channels_state('missing_workers', list)
def send_message(self, chat, message, **kwargs):
raise NotImplementedError()
def _get_log_system(self, source):
if source is None:
source = self.__class__.__name__
try:
parent = self.parent.name
except AttributeError:
parent = '-'
name = f"{parent},{source}"
return name
def log(self, msg, source=None):
log.callWithContext({"system": self._get_log_system(source)}, log.msg, msg)
def log_err(self, error=None, why=None, source=None):
log.callWithContext({"system": (self._get_log_system(source))}, log.err, error, why)
def builderMatchesAnyTag(self, builder_tags):
return any(tag for tag in builder_tags if tag in self.tags)
def getRunningBuilds(self, builderid):
d = self.master.data.get(
('builds',),
filters=[
resultspec.Filter('builderid', 'eq', [builderid]),
resultspec.Filter('complete', 'eq', [False]),
],
)
return d
def getLastCompletedBuild(self, builderid):
d = self.master.data.get(
('builds',),
filters=[
resultspec.Filter('builderid', 'eq', [builderid]),
resultspec.Filter('complete', 'eq', [True]),
],
order=['-number'],
limit=1,
)
@d.addCallback
def listAsOneOrNone(res):
if res:
return res[0]
return None
return d
def getCurrentBuildstep(self, build):
d = self.master.data.get(
('builds', build['buildid'], 'steps'),
filters=[resultspec.Filter('complete', 'eq', [False])],
order=['number'],
limit=1,
)
return d
@defer.inlineCallbacks
def getBuildStatus(self, which, short=False):
response = f'`{which}`: '
builder = yield self.getBuilder(buildername=which)
builderid = builder['builderid']
runningBuilds = yield self.getRunningBuilds(builderid)
# pylint: disable=too-many-nested-blocks
if not runningBuilds:
onlineBuilders = yield self.getOnlineBuilders()
if builderid in onlineBuilders:
response += self.idle_string
lastBuild = yield self.getLastCompletedBuild(builderid)
if lastBuild:
complete_at = lastBuild['complete_at']
if complete_at:
complete_at = util.datetime2epoch(complete_at)
ago = util.fuzzyInterval(int(reactor.seconds() - complete_at))
else:
ago = "??"
status = self.format_build_status(lastBuild, short=short)
if not short:
status = ", " + status
if lastBuild['results'] != SUCCESS:
status_string = lastBuild.get('status_string')
if status_string:
status += ": " + status_string
response += f' last build {ago} ago{status}'
else:
response += self.offline_string
else:
response += self.running_string
buildInfo = []
for build in runningBuilds:
step = yield self.getCurrentBuildstep(build)
if step:
s = f"({step[-1]['state_string']})"
else:
s = "(no current step)"
bnum = build['number']
url = utils.getURLForBuild(self.master, builderid, bnum)
buildInfo.append(f"build [#{bnum}]({url}) {s}")
response += ' ' + ', '.join(buildInfo)
return response
@defer.inlineCallbacks
def getBuilder(self, buildername=None, builderid=None):
if buildername:
bdicts = yield self.master.data.get(
('builders',), filters=[resultspec.Filter('name', 'eq', [buildername])]
)
if bdicts:
# Could there be more than one? One is enough.
bdict = bdicts[0]
else:
bdict = None
elif builderid:
bdict = yield self.master.data.get(('builders', builderid))
else:
raise UsageError("no builder specified")
if bdict is None:
if buildername:
which = buildername
else:
which = f'number {builderid}'
raise UsageError(f"no such builder '{which}'")
return bdict
def getAllBuilders(self):
d = self.master.data.get(('builders',))
return d
@defer.inlineCallbacks
def getOnlineBuilders(self):
all_workers = yield self.master.data.get(('workers',))
online_builderids = set()
for worker in all_workers:
connected = worker['connected_to']
if not connected:
continue
builders = worker['configured_on']
builderids = [builder['builderid'] for builder in builders]
online_builderids.update(builderids)
return list(online_builderids)
@defer.inlineCallbacks
def getRevisionsForBuild(self, bdict):
# FIXME: Need to get revision info! (build -> buildreq -> buildset ->
# sourcestamps)
return ["TODO"]
results_descriptions = {
SKIPPED: "was skipped",
SUCCESS: "completed successfully",
WARNINGS: "completed with warnings",
FAILURE: "failed",
EXCEPTION: "stopped with exception",
RETRY: "has been retried",
CANCELLED: "was cancelled",
}
results_severity = (SKIPPED, SUCCESS, WARNINGS, FAILURE, CANCELLED, EXCEPTION)
def format_build_status(self, build, short=False):
"""Optionally add color to the message"""
return self.results_descriptions[build['results']]
class ThrottledClientFactory(protocol.ClientFactory):
lostDelay = random.randint(1, 5)
failedDelay = random.randint(45, 60)
def __init__(self, lostDelay=None, failedDelay=None):
if lostDelay is not None:
self.lostDelay = lostDelay
if failedDelay is not None:
self.failedDelay = failedDelay
def clientConnectionLost(self, connector, reason):
reactor.callLater(self.lostDelay, connector.connect)
def clientConnectionFailed(self, connector, reason):
reactor.callLater(self.failedDelay, connector.connect)
class WebhookResource(resource.Resource, service.AsyncService):
"""
This is a service be used by chat bots based on web-hooks.
It automatically sets and deletes the resource and calls ``process_webhook``
method of its parent.
"""
def __init__(self, path):
resource.Resource.__init__(self)
www = get_plugins('www', None, load_now=True)
if 'base' not in www:
raise RuntimeError("could not find buildbot-www; is it installed?")
self._root = www.get('base').resource
self.path = path
def startService(self):
self._root.putChild(unicode2bytes(self.path), self)
try:
super().startService()
except AttributeError:
pass
def stopService(self):
try:
super().stopService()
except AttributeError:
pass
self._root.delEntity(unicode2bytes(self.path))
def render_GET(self, request):
return self.render_POST(request)
def render_POST(self, request):
try:
d = self.parent.process_webhook(request)
except Exception:
d = defer.fail()
def ok(_):
request.setResponseCode(202)
request.finish()
def err(error):
try:
self.parent.log_err(error, "processing telegram request", self.__class__.__name__)
except AttributeError:
log.err(error, "processing telegram request")
request.setResponseCode(500)
request.finish()
d.addCallbacks(ok, err)
return server.NOT_DONE_YET
| 50,889 | Python | .py | 1,258 | 29.655008 | 120 | 0.573839 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,830 | pushover.py | buildbot_buildbot/master/buildbot/reporters/pushover.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.python import log as twlog
from buildbot import config
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.reporters.base import ReporterBase
from buildbot.reporters.generators.build import BuildStatusGenerator
from buildbot.reporters.message import MessageFormatter
from buildbot.util import httpclientservice
from .utils import merge_reports_prop
from .utils import merge_reports_prop_take_first
ENCODING = 'utf8'
VALID_PARAMS = {
"sound",
"callback",
"timestamp",
"url",
"url_title",
"device",
"retry",
"expire",
"html",
}
PRIORITIES = {
CANCELLED: 'cancelled',
EXCEPTION: 'exception',
FAILURE: 'failing',
SUCCESS: 'passing',
WARNINGS: 'warnings',
}
DEFAULT_MSG_TEMPLATE = (
'The Buildbot has detected a <a href="{{ build_url }}">{{ status_detected }}</a>'
+ 'of <i>{{ buildername }}</i> while building {{ projects }} on {{ workername }}.'
)
class PushoverNotifier(ReporterBase):
def checkConfig(self, user_key, api_token, priorities=None, otherParams=None, generators=None):
if generators is None:
generators = self._create_default_generators()
super().checkConfig(generators=generators)
if otherParams is not None and set(otherParams.keys()) - VALID_PARAMS:
config.error(
"otherParams can be only 'sound', 'callback', 'timestamp', "
"'url', 'url_title', 'device', 'retry', 'expire', or 'html'"
)
@defer.inlineCallbacks
def reconfigService(
self, user_key, api_token, priorities=None, otherParams=None, generators=None
):
user_key, api_token = yield self.renderSecrets(user_key, api_token)
if generators is None:
generators = self._create_default_generators()
yield super().reconfigService(generators=generators)
self.user_key = user_key
self.api_token = api_token
if priorities is None:
self.priorities = {}
else:
self.priorities = priorities
if otherParams is None:
self.otherParams = {}
else:
self.otherParams = otherParams
self._http = yield httpclientservice.HTTPSession(
self.master.httpservice, 'https://api.pushover.net'
)
def _create_default_generators(self):
formatter = MessageFormatter(template_type='html', template=DEFAULT_MSG_TEMPLATE)
return [BuildStatusGenerator(message_formatter=formatter)]
def sendMessage(self, reports):
body = merge_reports_prop(reports, 'body')
subject = merge_reports_prop_take_first(reports, 'subject')
type = merge_reports_prop_take_first(reports, 'type')
results = merge_reports_prop(reports, 'results')
worker = merge_reports_prop_take_first(reports, 'worker')
msg = {'message': body, 'title': subject}
if type == 'html':
msg['html'] = '1'
try:
priority_name = PRIORITIES[results] if worker is None else 'worker_missing'
msg['priority'] = self.priorities[priority_name]
except KeyError:
pass
return self.sendNotification(msg)
def sendNotification(self, params):
twlog.msg("sending pushover notification")
params.update({"user": self.user_key, "token": self.api_token})
params.update(self.otherParams)
return self._http.post('/1/messages.json', params=params)
| 4,400 | Python | .py | 105 | 35.695238 | 99 | 0.691534 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,831 | gerrit.py | buildbot_buildbot/master/buildbot/reporters/gerrit.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
Push events to Gerrit
"""
from __future__ import annotations
import time
import warnings
from packaging.version import parse as parse_version
from twisted.internet import defer
from twisted.internet import reactor
from twisted.internet.protocol import ProcessProtocol
from twisted.python import log
from zope.interface import implementer
from buildbot import interfaces
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.process.results import Results
from buildbot.reporters import utils
from buildbot.reporters.base import ReporterBase
from buildbot.util import bytes2unicode
# Cache the version that the gerrit server is running for this many seconds
GERRIT_VERSION_CACHE_TIMEOUT = 600
GERRIT_LABEL_VERIFIED = 'Verified'
GERRIT_LABEL_REVIEWED = 'Code-Review'
def makeReviewResult(message, *labels):
"""
helper to produce a review result
"""
return {"message": message, "labels": dict(labels)}
def _old_add_label(label, value):
if label == GERRIT_LABEL_VERIFIED:
return [f"--verified {int(value)}"]
elif label == GERRIT_LABEL_REVIEWED:
return [f"--code-review {int(value)}"]
warnings.warn(
'Gerrit older than 2.6 does not support custom labels. ' f'Setting {label} is ignored.',
stacklevel=1,
)
return []
def _new_add_label(label, value):
return [f"--label {label}={int(value)}"]
def defaultReviewCB(builderName, build, result, master, arg):
if result == RETRY:
return makeReviewResult(None)
message = "Buildbot finished compiling your patchset\n"
message += f"on configuration: {builderName}\n"
message += f"The result is: {Results[result].upper()}\n"
return makeReviewResult(message, (GERRIT_LABEL_VERIFIED, result == SUCCESS or -1))
def defaultSummaryCB(buildInfoList, results, master, arg):
success = False
failure = False
msgs = []
for buildInfo in buildInfoList:
msg = f"Builder {buildInfo['name']} {buildInfo['resultText']} ({buildInfo['text']})"
link = buildInfo.get('url', None)
if link:
msg += " - " + link
else:
msg += "."
msgs.append(msg)
if buildInfo['result'] == SUCCESS: # pylint: disable=simplifiable-if-statement
success = True
else:
failure = True
if success and not failure:
verified = 1
else:
verified = -1
return makeReviewResult('\n\n'.join(msgs), (GERRIT_LABEL_VERIFIED, verified))
# These are just sentinel values for GerritStatusPush.__init__ args
class DEFAULT_REVIEW:
pass
class DEFAULT_SUMMARY:
pass
@defer.inlineCallbacks
def extract_project_revision(master, report):
props = None
if report["builds"]:
props = report["builds"][0].get("properties", None)
if props is None:
props = yield master.data.get(("buildsets", report["buildset"]["bsid"], "properties"))
def get_property(props, name):
if props is None:
return None
return props.get(name, [None])[0]
# Gerrit + Repo
downloads = get_property(props, "repo_downloads")
downloaded = get_property(props, "repo_downloaded")
if downloads is not None and downloaded is not None:
downloaded = downloaded.split(" ")
if downloads and 2 * len(downloads) == len(downloaded):
for i, download in enumerate(downloads):
try:
project, change1 = download.split(" ")
except ValueError:
return None, None # something is wrong, abort
change2 = downloaded[2 * i]
revision = downloaded[2 * i + 1]
if change1 == change2:
return project, revision
else:
return None, None
return None, None
# Gerrit + Git
# used only to verify Gerrit source
if get_property(props, "event.change.id") is not None:
project = get_property(props, "event.change.project")
codebase = get_property(props, "codebase")
revision = (
get_property(props, "event.patchSet.revision")
or get_property(props, "got_revision")
or get_property(props, "revision")
)
if isinstance(revision, dict):
# in case of the revision is a codebase revision, we just take
# the revisionfor current codebase
if codebase is not None:
revision = revision[codebase]
else:
revision = None
return project, revision
return None, None
class GerritStatusGeneratorBase:
def __init__(self, callback, callback_arg, builders, want_steps, want_logs):
self.callback = callback
self.callback_arg = callback_arg
self.builders = builders
self.want_steps = want_steps
self.want_logs = want_logs
def is_build_reported(self, build):
return self.builders is None or build["builder"]["name"] in self.builders
@defer.inlineCallbacks
def get_build_details(self, master, build):
br = yield master.data.get(("buildrequests", build["buildrequestid"]))
buildset = yield master.data.get(("buildsets", br["buildsetid"]))
yield utils.getDetailsForBuilds(
master, buildset, [build], want_properties=True, want_steps=self.want_steps
)
@implementer(interfaces.IReportGenerator)
class GerritBuildSetStatusGenerator(GerritStatusGeneratorBase):
wanted_event_keys = [
("buildsets", None, "complete"),
]
def check(self):
pass
@defer.inlineCallbacks
def generate(self, master, reporter, key, message):
bsid = message["bsid"]
res = yield utils.getDetailsForBuildset(
master,
bsid,
want_properties=True,
want_steps=self.want_steps,
want_logs=self.want_logs,
want_logs_content=self.want_logs,
)
builds = res["builds"]
buildset = res["buildset"]
builds = [build for build in builds if self.is_build_reported(build)]
if not builds:
return None
def get_build_info(build):
result = build["results"]
resultText = {
SUCCESS: "succeeded",
FAILURE: "failed",
WARNINGS: "completed with warnings",
EXCEPTION: "encountered an exception",
}.get(result, f"completed with unknown result {result}")
return {
"name": build["builder"]["name"],
"result": result,
"resultText": resultText,
"text": build["state_string"],
"url": utils.getURLForBuild(master, build["builder"]["builderid"], build["number"]),
"build": build,
}
build_info_list = sorted(
[get_build_info(build) for build in builds], key=lambda bi: bi["name"]
)
result = yield self.callback(
build_info_list, Results[buildset["results"]], master, self.callback_arg
)
return {
"body": result.get("message", None),
"extra_info": {
"labels": result.get("labels"),
},
"builds": [builds[0]],
"buildset": buildset,
}
@implementer(interfaces.IReportGenerator)
class GerritBuildStartStatusGenerator(GerritStatusGeneratorBase):
wanted_event_keys = [
("builds", None, "new"),
]
def check(self):
pass
@defer.inlineCallbacks
def generate(self, master, reporter, key, message):
build = message
yield self.get_build_details(master, build)
if not self.is_build_reported(build):
return None
result = yield self.callback(build["builder"]["name"], build, self.callback_arg)
return {
"body": result.get("message", None),
"extra_info": {
"labels": result.get("labels"),
},
"builds": [build],
"buildset": build["buildset"],
}
@implementer(interfaces.IReportGenerator)
class GerritBuildEndStatusGenerator(GerritStatusGeneratorBase):
wanted_event_keys = [
('builds', None, 'finished'),
]
def check(self):
pass
@defer.inlineCallbacks
def generate(self, master, reporter, key, message):
build = message
yield self.get_build_details(master, build)
if not self.is_build_reported(build):
return None
result = yield self.callback(
build['builder']['name'], build, build['results'], master, self.callback_arg
)
return {
"body": result.get("message", None),
"extra_info": {
"labels": result.get("labels"),
},
"builds": [build],
"buildset": build["buildset"],
}
class GerritStatusPush(ReporterBase):
"""Event streamer to a gerrit ssh server."""
name: str | None = "GerritStatusPush" # type: ignore[assignment]
gerrit_server = None
gerrit_username = None
gerrit_port = None
gerrit_version_time = None
gerrit_version = None
gerrit_identity_file = None
_gerrit_notify = None
def checkConfig(
self,
server,
username,
port=29418,
identity_file=None,
notify=None,
generators=None,
**kwargs,
):
if generators is None:
generators = []
generators.append(
GerritBuildSetStatusGenerator(
callback=defaultSummaryCB,
callback_arg=None,
builders=None,
want_steps=False,
want_logs=False,
)
)
super().checkConfig(generators=generators, **kwargs)
def reconfigService(
self,
server,
username,
port=29418,
identity_file=None,
notify=None,
generators=None,
**kwargs,
):
self.gerrit_server = server
self.gerrit_username = username
self.gerrit_port = port
self.gerrit_version = None
self.gerrit_version_time = 0
self.gerrit_identity_file = identity_file
self._gerrit_notify = notify
if generators is None:
generators = []
generators.append(
GerritBuildSetStatusGenerator(
callback=defaultSummaryCB,
callback_arg=None,
builders=None,
want_steps=False,
want_logs=False,
)
)
super().reconfigService(generators=generators, **kwargs)
def _gerritCmd(self, *args):
"""Construct a command as a list of strings suitable for
:func:`subprocess.call`.
"""
if self.gerrit_identity_file is not None:
options = ['-i', self.gerrit_identity_file]
else:
options = []
return [
'ssh',
'-o',
'BatchMode=yes',
*options,
'@'.join((self.gerrit_username, self.gerrit_server)),
'-p',
str(self.gerrit_port),
'gerrit',
*list(args),
]
class VersionPP(ProcessProtocol):
def __init__(self, func):
self.func = func
self.gerrit_version = None
def outReceived(self, data):
vstr = b"gerrit version "
if not data.startswith(vstr):
log.msg(b"Error: Cannot interpret gerrit version info: " + data)
return
vers = data[len(vstr) :].strip()
log.msg(b"gerrit version: " + vers)
self.gerrit_version = parse_version(bytes2unicode(vers))
def errReceived(self, data):
log.msg(b"gerriterr: " + data)
def processEnded(self, reason):
if reason.value.exitCode:
log.msg("gerrit version status: ERROR:", reason)
return
if self.gerrit_version:
self.func(self.gerrit_version)
def getCachedVersion(self):
if self.gerrit_version is None:
return None
if time.time() - self.gerrit_version_time > GERRIT_VERSION_CACHE_TIMEOUT:
# cached version has expired
self.gerrit_version = None
return self.gerrit_version
def processVersion(self, gerrit_version, func):
self.gerrit_version = gerrit_version
self.gerrit_version_time = time.time()
func()
def callWithVersion(self, func):
command = self._gerritCmd("version")
def callback(gerrit_version):
return self.processVersion(gerrit_version, func)
self.spawnProcess(self.VersionPP(callback), command[0], command, env=None)
class LocalPP(ProcessProtocol):
def __init__(self, status):
self.status = status
def outReceived(self, data):
log.msg("gerritout:", data)
def errReceived(self, data):
log.msg("gerriterr:", data)
def processEnded(self, reason):
if reason.value.exitCode:
log.msg("gerrit status: ERROR:", reason)
else:
log.msg("gerrit status: OK")
@defer.inlineCallbacks
def sendMessage(self, reports):
report = reports[0]
project, revision = yield extract_project_revision(self.master, report)
if report["body"] is None or project is None or revision is None:
return None
labels = None
extra_info = report.get("extra_info", None)
if extra_info is not None:
labels = extra_info.get("labels", None)
if labels is None and report.get("builds", None):
# At least one build
success = False
failure = False
pending = False
for build in report["builds"]:
if build["results"] is None:
pending = True
elif build["results"] == SUCCESS:
success = True
else:
failure = True
if failure:
verified = -1
elif pending:
verified = 0
elif success:
verified = 1
else:
verified = -1
labels = {GERRIT_LABEL_VERIFIED: verified}
self.send_code_review(project, revision, report["body"], labels)
return None
def send_code_review(self, project, revision, message, labels):
gerrit_version = self.getCachedVersion()
if gerrit_version is None:
self.callWithVersion(lambda: self.send_code_review(project, revision, message, labels))
return
assert gerrit_version
command = self._gerritCmd("review", f"--project {project}")
if gerrit_version >= parse_version("2.13"):
command.append('--tag autogenerated:buildbot')
if self._gerrit_notify is not None:
command.append(f'--notify {self._gerrit_notify!s}')
if message:
message = message.replace("'", "\"")
command.append(f"--message '{message}'")
if labels:
if gerrit_version < parse_version("2.6"):
add_label = _old_add_label
else:
add_label = _new_add_label
for label, value in labels.items():
command.extend(add_label(label, value))
command.append(revision)
command = [str(s) for s in command]
self.spawnProcess(self.LocalPP(self), command[0], command, env=None)
def spawnProcess(self, *arg, **kw):
reactor.spawnProcess(*arg, **kw)
| 16,788 | Python | .py | 440 | 28.520455 | 100 | 0.600185 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,832 | utils.py | buildbot_buildbot/master/buildbot/reporters/utils.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
import dataclasses
from collections import UserList
from typing import TYPE_CHECKING
from twisted.internet import defer
from twisted.python import log
from buildbot.data import resultspec
from buildbot.process.properties import renderer
from buildbot.process.results import RETRY
from buildbot.util import flatten
if TYPE_CHECKING:
from buildbot.db.buildrequests import BuildRequestModel
@defer.inlineCallbacks
def getPreviousBuild(master, build):
# naive n-1 algorithm. Still need to define what we should skip
# SKIP builds? forced builds? rebuilds?
# don't hesitate to contribute improvements to that algorithm
n = build['number'] - 1
while n >= 0:
prev = yield master.data.get(("builders", build['builderid'], "builds", n))
if prev and prev['results'] != RETRY:
return prev
n -= 1
return None
@defer.inlineCallbacks
def getDetailsForBuildset(
master,
bsid,
want_properties=False,
want_steps=False,
want_previous_build=False,
want_logs=False,
add_logs=None,
want_logs_content=False,
):
# Here we will do a bunch of data api calls on behalf of the reporters
# We do try to make *some* calls in parallel with the help of gatherResults, but don't commit
# to much in that. The idea is to do parallelism while keeping the code readable
# and maintainable.
# first, just get the buildset and all build requests for our buildset id
dl = [
master.data.get(("buildsets", bsid)),
master.data.get(
('buildrequests',), filters=[resultspec.Filter('buildsetid', 'eq', [bsid])]
),
]
(buildset, breqs) = yield defer.gatherResults(dl)
# next, get the bdictlist for each build request
dl = [master.data.get(("buildrequests", breq['buildrequestid'], 'builds')) for breq in breqs]
builds = yield defer.gatherResults(dl)
builds = flatten(builds, types=(list, UserList))
if builds:
yield getDetailsForBuilds(
master,
buildset,
builds,
want_properties=want_properties,
want_steps=want_steps,
want_previous_build=want_previous_build,
want_logs=want_logs,
add_logs=add_logs,
want_logs_content=want_logs_content,
)
return {"buildset": buildset, "builds": builds}
@defer.inlineCallbacks
def getDetailsForBuild(
master,
build,
want_properties=False,
want_steps=False,
want_previous_build=False,
want_logs=False,
add_logs=None,
want_logs_content=False,
):
buildrequest = yield master.data.get(("buildrequests", build['buildrequestid']))
buildset = yield master.data.get(("buildsets", buildrequest['buildsetid']))
build['buildrequest'] = buildrequest
build['buildset'] = buildset
parentbuild = None
parentbuilder = None
if buildset['parent_buildid']:
parentbuild = yield master.data.get(("builds", buildset['parent_buildid']))
parentbuilder = yield master.data.get(("builders", parentbuild['builderid']))
build['parentbuild'] = parentbuild
build['parentbuilder'] = parentbuilder
ret = yield getDetailsForBuilds(
master,
buildset,
[build],
want_properties=want_properties,
want_steps=want_steps,
want_previous_build=want_previous_build,
want_logs=want_logs,
add_logs=add_logs,
want_logs_content=want_logs_content,
)
return ret
@defer.inlineCallbacks
def get_details_for_buildrequest(master, buildrequest: BuildRequestModel, build):
buildset = yield master.data.get(("buildsets", buildrequest.buildsetid))
builder = yield master.data.get(("builders", buildrequest.builderid))
build['buildrequest'] = dataclasses.asdict(buildrequest)
build['buildset'] = buildset
build['builderid'] = buildrequest.builderid
build['builder'] = builder
build['url'] = getURLForBuildrequest(master, buildrequest.buildrequestid)
build['results'] = None
build['complete'] = False
def should_attach_log(logs_config, log):
if isinstance(logs_config, bool):
return logs_config
if log['name'] in logs_config:
return True
long_name = f"{log['stepname']}.{log['name']}"
if long_name in logs_config:
return True
return False
@defer.inlineCallbacks
def getDetailsForBuilds(
master,
buildset,
builds,
want_properties=False,
want_steps=False,
want_previous_build=False,
want_logs=False,
add_logs=None,
want_logs_content=False,
):
builderids = {build['builderid'] for build in builds}
builders = yield defer.gatherResults([master.data.get(("builders", _id)) for _id in builderids])
buildersbyid = {builder['builderid']: builder for builder in builders}
if want_properties:
buildproperties = yield defer.gatherResults([
master.data.get(("builds", build['buildid'], 'properties')) for build in builds
])
else: # we still need a list for the big zip
buildproperties = list(range(len(builds)))
if want_previous_build:
prev_builds = yield defer.gatherResults([
getPreviousBuild(master, build) for build in builds
])
else: # we still need a list for the big zip
prev_builds = list(range(len(builds)))
if add_logs is not None:
logs_config = add_logs
elif want_logs_content is not None:
logs_config = want_logs_content
else:
logs_config = False
if logs_config is not False:
want_logs = True
if want_logs:
want_steps = True
if want_steps: # pylint: disable=too-many-nested-blocks
buildsteps = yield defer.gatherResults([
master.data.get(("builds", build['buildid'], 'steps')) for build in builds
])
if want_logs:
for build, build_steps in zip(builds, buildsteps):
for s in build_steps:
logs = yield master.data.get(("steps", s['stepid'], 'logs'))
s['logs'] = list(logs)
for l in s['logs']:
l['stepname'] = s['name']
l['url'] = get_url_for_log(
master, build['builderid'], build['number'], s['number'], l['slug']
)
l['url_raw'] = get_url_for_log_raw(master, l['logid'], 'raw')
l['url_raw_inline'] = get_url_for_log_raw(master, l['logid'], 'raw_inline')
if should_attach_log(logs_config, l):
l['content'] = yield master.data.get(("logs", l['logid'], 'contents'))
else: # we still need a list for the big zip
buildsteps = list(range(len(builds)))
# a big zip to connect everything together
for build, properties, steps, prev in zip(builds, buildproperties, buildsteps, prev_builds):
build['builder'] = buildersbyid[build['builderid']]
build['buildset'] = buildset
build['url'] = getURLForBuild(master, build['builderid'], build['number'])
if want_properties:
build['properties'] = properties
if want_steps:
build['steps'] = list(steps)
if want_previous_build:
build['prev_build'] = prev
# perhaps we need data api for users with sourcestamps/:id/users
@defer.inlineCallbacks
def getResponsibleUsersForSourceStamp(master, sourcestampid):
changesd = master.data.get(("sourcestamps", sourcestampid, "changes"))
sourcestampd = master.data.get(("sourcestamps", sourcestampid))
changes, sourcestamp = yield defer.gatherResults([changesd, sourcestampd])
blamelist = set()
# normally, we get only one, but just assume there might be several
for c in changes:
blamelist.add(c['author'])
# Add patch author to blamelist
if 'patch' in sourcestamp and sourcestamp['patch'] is not None:
blamelist.add(sourcestamp['patch']['author'])
blamelist = list(blamelist)
blamelist.sort()
return blamelist
# perhaps we need data api for users with builds/:id/users
@defer.inlineCallbacks
def getResponsibleUsersForBuild(master, buildid):
dl = [
master.data.get(("builds", buildid, "changes")),
master.data.get(("builds", buildid, 'properties')),
]
changes, properties = yield defer.gatherResults(dl)
blamelist = set()
# add users from changes
for c in changes:
blamelist.add(c['author'])
# add owner from properties
if 'owner' in properties:
owner = properties['owner'][0]
if isinstance(owner, str):
blamelist.add(owner)
else:
blamelist.update(owner)
log.msg(f"Warning: owner property is a list for buildid {buildid}. ")
log.msg(f"Please report a bug: changes: {changes}. properties: {properties}")
# add owner from properties
if 'owners' in properties:
blamelist.update(properties['owners'][0])
blamelist = list(blamelist)
blamelist.sort()
return blamelist
# perhaps we need data api for users with buildsets/:id/users
@defer.inlineCallbacks
def get_responsible_users_for_buildset(master, buildsetid):
props = yield master.data.get(("buildsets", buildsetid, "properties"))
# TODO: This currently does not track what changes were in the buildset. getChangesForBuild()
# would walk the change graph until it finds last successful build and uses the authors of
# the changes as blame list. Probably this needs to be done here too
owner = props.get("owner", None)
if owner:
return [owner[0]]
return []
def getURLForBuild(master, builderid, build_number):
prefix = master.config.buildbotURL
return prefix + f"#/builders/{builderid}/builds/{build_number}"
def getURLForBuildrequest(master, buildrequestid):
prefix = master.config.buildbotURL
return f"{prefix}#/buildrequests/{buildrequestid}"
def get_url_for_log(master, builderid, build_number, step_number, log_slug):
prefix = master.config.buildbotURL
return (
f"{prefix}#/builders/{builderid}/builds/{build_number}/"
+ f"steps/{step_number}/logs/{log_slug}"
)
def get_url_for_log_raw(master, logid, suffix):
prefix = master.config.buildbotURL
return f"{prefix}api/v2/logs/{logid}/{suffix}"
@renderer
def URLForBuild(props):
build = props.getBuild()
return build.getUrl()
def merge_reports_prop(reports, prop):
result = None
for report in reports:
if prop in report and report[prop] is not None:
if result is None:
result = report[prop]
else:
result += report[prop]
return result
def merge_reports_prop_take_first(reports, prop):
for report in reports:
if prop in report and report[prop] is not None:
return report[prop]
return None
| 11,697 | Python | .py | 287 | 33.937282 | 100 | 0.669958 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,833 | bitbucketserver.py | buildbot_buildbot/master/buildbot/reporters/bitbucketserver.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
import datetime
import re
from urllib.parse import urlparse
from twisted.internet import defer
from twisted.python import log
from buildbot import config
from buildbot.plugins import util
from buildbot.process.properties import Interpolate
from buildbot.process.properties import Properties
from buildbot.process.results import SUCCESS
from buildbot.reporters.base import ReporterBase
from buildbot.reporters.generators.build import BuildStartEndStatusGenerator
from buildbot.reporters.generators.build import BuildStatusGenerator
from buildbot.reporters.generators.buildrequest import BuildRequestGenerator
from buildbot.reporters.message import MessageFormatterRenderable
from buildbot.util import bytes2unicode
from buildbot.util import datetime2epoch
from buildbot.util import httpclientservice
from buildbot.util import unicode2bytes
from .utils import merge_reports_prop
# Magic words understood by Bitbucket Server REST API
INPROGRESS = 'INPROGRESS'
SUCCESSFUL = 'SUCCESSFUL'
FAILED = 'FAILED'
STATUS_API_URL = '/rest/build-status/1.0/commits/{sha}'
STATUS_CORE_API_URL = '/rest/api/1.0/projects/{proj_key}/repos/{repo_slug}/commits/{sha}/builds'
COMMENT_API_URL = '/rest/api/1.0{path}/comments'
HTTP_PROCESSED = 204
HTTP_CREATED = 201
class BitbucketServerStatusPush(ReporterBase):
name: str | None = "BitbucketServerStatusPush" # type: ignore[assignment]
def checkConfig(
self,
base_url,
user,
password,
key=None,
statusName=None,
verbose=False,
debug=None,
verify=None,
generators=None,
**kwargs,
):
if generators is None:
generators = self._create_default_generators()
super().checkConfig(generators=generators, **kwargs)
@defer.inlineCallbacks
def reconfigService(
self,
base_url,
user,
password,
key=None,
statusName=None,
verbose=False,
debug=None,
verify=None,
generators=None,
**kwargs,
):
user, password = yield self.renderSecrets(user, password)
self.debug = debug
self.verify = verify
self.verbose = verbose
if generators is None:
generators = self._create_default_generators()
yield super().reconfigService(generators=generators, **kwargs)
self.key = key or Interpolate('%(prop:buildername)s')
self.context = statusName
self._http = yield httpclientservice.HTTPSession(
self.master.httpservice,
base_url,
auth=(user, password),
debug=self.debug,
verify=self.verify,
)
def _create_default_generators(self):
start_formatter = MessageFormatterRenderable('Build started.')
end_formatter = MessageFormatterRenderable('Build done.')
return [
BuildStartEndStatusGenerator(
start_formatter=start_formatter, end_formatter=end_formatter
)
]
def createStatus(self, sha, state, url, key, description=None, context=None):
payload = {
'state': state,
'url': url,
'key': key,
}
if description:
payload['description'] = description
if context:
payload['name'] = context
return self._http.post(STATUS_API_URL.format(sha=sha), json=payload)
@defer.inlineCallbacks
def sendMessage(self, reports):
report = reports[0]
build = reports[0]['builds'][0]
props = Properties.fromDict(build['properties'])
props.master = self.master
description = report.get('body', None)
results = build['results']
if build['complete']:
state = SUCCESSFUL if results == SUCCESS else FAILED
else:
state = INPROGRESS
key = yield props.render(self.key)
context = yield props.render(self.context) if self.context else None
sourcestamps = build['buildset']['sourcestamps']
for sourcestamp in sourcestamps:
try:
sha = sourcestamp['revision']
if sha is None:
log.msg("Unable to get the commit hash")
continue
url = build['url']
res = yield self.createStatus(
sha=sha, state=state, url=url, key=key, description=description, context=context
)
if res.code not in (HTTP_PROCESSED,):
content = yield res.content()
log.msg(f"{res.code}: Unable to send Bitbucket Server status: {content}")
elif self.verbose:
log.msg(f'Status "{state}" sent for {sha}.')
except Exception as e:
log.err(
e, f"Failed to send status '{state}' for {sourcestamp['repository']} at {sha}"
)
class BitbucketServerCoreAPIStatusPush(ReporterBase):
name: str | None = "BitbucketServerCoreAPIStatusPush" # type: ignore[assignment]
secrets = ["token", "auth"]
def checkConfig(
self,
base_url,
token=None,
auth=None,
statusName=None,
statusSuffix=None,
key=None,
parentName=None,
buildNumber=None,
ref=None,
duration=None,
testResults=None,
verbose=False,
debug=None,
verify=None,
generators=None,
**kwargs,
):
if generators is None:
generators = self._create_default_generators()
super().checkConfig(generators=generators, **kwargs)
if not base_url:
config.error("Parameter base_url has to be given")
if token is not None and auth is not None:
config.error("Only one authentication method can be given (token or auth)")
@defer.inlineCallbacks
def reconfigService(
self,
base_url,
token=None,
auth=None,
statusName=None,
statusSuffix=None,
key=None,
parentName=None,
buildNumber=None,
ref=None,
duration=None,
testResults=None,
verbose=False,
debug=None,
verify=None,
generators=None,
**kwargs,
):
self.status_name = statusName
self.status_suffix = statusSuffix
self.key = key or Interpolate('%(prop:buildername)s')
self.parent_name = parentName
self.build_number = buildNumber or Interpolate('%(prop:buildnumber)s')
self.ref = ref
self.duration = duration
self.debug = debug
self.verify = verify
self.verbose = verbose
if generators is None:
generators = self._create_default_generators()
yield super().reconfigService(generators=generators, **kwargs)
if testResults:
self.test_results = testResults
else:
@util.renderer
def r_testresults(props):
failed = props.getProperty("tests_failed", 0)
skipped = props.getProperty("tests_skipped", 0)
successful = props.getProperty("tests_successful", 0)
if any([failed, skipped, successful]):
return {"failed": failed, "skipped": skipped, "successful": successful}
return None
self.test_results = r_testresults
headers = {}
if token:
headers["Authorization"] = f"Bearer {token}"
self._http = yield httpclientservice.HTTPSession(
self.master.httpservice,
base_url,
auth=auth,
headers=headers,
debug=debug,
verify=verify,
)
def _create_default_generators(self):
start_formatter = MessageFormatterRenderable('Build started.')
end_formatter = MessageFormatterRenderable('Build done.')
pending_formatter = MessageFormatterRenderable('Build pending.')
return [
BuildRequestGenerator(formatter=pending_formatter),
BuildStartEndStatusGenerator(
start_formatter=start_formatter, end_formatter=end_formatter
),
]
def createStatus(
self,
proj_key,
repo_slug,
sha,
state,
url,
key,
parent,
build_number,
ref,
description,
name,
duration,
test_results,
):
payload = {
'state': state,
'url': url,
'key': key,
'parent': parent,
'ref': ref,
'buildNumber': build_number,
'description': description,
'name': name,
'duration': duration,
'testResults': test_results,
}
if self.verbose:
log.msg(f"Sending payload: '{payload}' for {proj_key}/{repo_slug} {sha}.")
_url = STATUS_CORE_API_URL.format(proj_key=proj_key, repo_slug=repo_slug, sha=sha)
return self._http.post(_url, json=payload)
@defer.inlineCallbacks
def sendMessage(self, reports):
report = reports[0]
build = reports[0]['builds'][0]
props = Properties.fromDict(build['properties'])
props.master = self.master
description = report.get('body', None)
duration = None
test_results = None
if build['complete']:
state = SUCCESSFUL if build['results'] == SUCCESS else FAILED
if self.duration:
duration = yield props.render(self.duration)
elif "complete_at" in build:
complete_at = build['complete_at']
started_at = build['started_at']
if isinstance(complete_at, datetime.datetime):
complete_at = datetime2epoch(complete_at)
if isinstance(started_at, datetime.datetime):
started_at = datetime2epoch(started_at)
duration = int(complete_at - started_at) * 1000
if self.test_results:
test_results = yield props.render(self.test_results)
else:
state = INPROGRESS
duration = None
parent_name = (build.get('parentbuilder') or {}).get('name')
if self.parent_name:
parent = yield props.render(self.parent_name)
elif parent_name:
parent = parent_name
else:
parent = build['builder']['name']
if self.status_name:
status_name = yield props.render(self.status_name)
else:
build_identifier = props.getProperty("buildnumber") or "(build request)"
status_name = f'{props.getProperty("buildername")} #{build_identifier}'
if parent_name:
status_name = (
f"{parent_name} #{build['parentbuild']['number']} \u00bb {status_name}"
)
if self.status_suffix:
status_name = status_name + (yield props.render(self.status_suffix))
key = yield props.render(self.key)
build_number = yield props.render(self.build_number)
url = build['url']
sourcestamps = build['buildset']['sourcestamps']
for sourcestamp in sourcestamps:
try:
ssid = sourcestamp.get('ssid')
sha = sourcestamp.get('revision')
branch = sourcestamp.get('branch')
repo = sourcestamp.get('repository')
if not sha:
log.msg(f"Unable to get the commit hash for SSID: {ssid}")
continue
ref = None
if self.ref is None:
if branch is not None:
if branch.startswith("refs/"):
ref = branch
else:
ref = f"refs/heads/{branch}"
else:
ref = yield props.render(self.ref)
if not ref:
log.msg(
f"WARNING: Unable to resolve ref for SSID: {ssid}. "
"Build status will not be visible on Builds or "
"PullRequest pages only for commits"
)
r = re.search(r"^.*?/([^/]+)/([^/]+?)(?:\.git)?$", repo or "")
if r:
proj_key = r.group(1)
repo_slug = r.group(2)
else:
log.msg(f"Unable to parse repository info from '{repo}' for SSID: {ssid}")
continue
res = yield self.createStatus(
proj_key=proj_key,
repo_slug=repo_slug,
sha=sha,
state=state,
url=url,
key=key,
parent=parent,
build_number=build_number,
ref=ref,
description=description,
name=status_name,
duration=duration,
test_results=test_results,
)
if res.code not in (HTTP_PROCESSED,):
content = yield res.content()
log.msg(
f"{res.code}: Unable to send Bitbucket Server status for "
f"{proj_key}/{repo_slug} {sha}: {content}"
)
elif self.verbose:
log.msg(f'Status "{state}" sent for {proj_key}/{repo_slug} {sha}')
except Exception as e:
log.err(e, f'Failed to send status "{state}" for {proj_key}/{repo_slug} {sha}')
class BitbucketServerPRCommentPush(ReporterBase):
name: str | None = "BitbucketServerPRCommentPush" # type: ignore[assignment]
@defer.inlineCallbacks
def reconfigService(
self,
base_url,
user,
password,
verbose=False,
debug=None,
verify=None,
generators=None,
**kwargs,
):
user, password = yield self.renderSecrets(user, password)
self.verbose = verbose
if generators is None:
generators = self._create_default_generators()
yield super().reconfigService(generators=generators, **kwargs)
self._http = yield httpclientservice.HTTPSession(
self.master.httpservice, base_url, auth=(user, password), debug=debug, verify=verify
)
def checkConfig(
self,
base_url,
user,
password,
verbose=False,
debug=None,
verify=None,
generators=None,
**kwargs,
):
if generators is None:
generators = self._create_default_generators()
super().checkConfig(generators=generators, **kwargs)
def _create_default_generators(self):
return [BuildStatusGenerator()]
def sendComment(self, pr_url, text):
path = urlparse(unicode2bytes(pr_url)).path
payload = {'text': text}
return self._http.post(COMMENT_API_URL.format(path=bytes2unicode(path)), json=payload)
@defer.inlineCallbacks
def sendMessage(self, reports):
body = merge_reports_prop(reports, 'body')
builds = merge_reports_prop(reports, 'builds')
pr_urls = set()
for build in builds:
props = Properties.fromDict(build['properties'])
pr_urls.add(props.getProperty("pullrequesturl"))
for pr_url in pr_urls:
if pr_url is None:
continue
try:
res = yield self.sendComment(pr_url=pr_url, text=body)
if res.code not in (HTTP_CREATED,):
content = yield res.content()
log.msg(f"{res.code}: Unable to send a comment: {content}")
elif self.verbose:
log.msg(f'Comment sent to {pr_url}')
except Exception as e:
log.err(e, f'Failed to send a comment to "{pr_url}"')
| 16,886 | Python | .py | 438 | 27.557078 | 100 | 0.581104 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,834 | zulip.py | buildbot_buildbot/master/buildbot/reporters/zulip.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Member
from __future__ import annotations
from twisted.internet import defer
from twisted.logger import Logger
from buildbot import config
from buildbot.reporters.base import ReporterBase
from buildbot.reporters.generators.build import BuildStartEndStatusGenerator
from buildbot.util import httpclientservice
log = Logger()
class ZulipStatusPush(ReporterBase):
name: str | None = "ZulipStatusPush" # type: ignore[assignment]
def checkConfig(self, endpoint, token, stream=None, debug=None, verify=None):
if not isinstance(endpoint, str):
config.error("Endpoint must be a string")
if not isinstance(token, str):
config.error("Token must be a string")
super().checkConfig(generators=[BuildStartEndStatusGenerator()])
@defer.inlineCallbacks
def reconfigService(self, endpoint, token, stream=None, debug=None, verify=None):
self.debug = debug
self.verify = verify
yield super().reconfigService(generators=[BuildStartEndStatusGenerator()])
self._http = yield httpclientservice.HTTPSession(
self.master.httpservice, endpoint, debug=self.debug, verify=self.verify
)
self.token = token
self.stream = stream
@defer.inlineCallbacks
def sendMessage(self, reports):
build = reports[0]['builds'][0]
event = ("new", "finished")[0 if build["complete"] is False else 1]
jsondata = {
"event": event,
"buildid": build["buildid"],
"buildername": build["builder"]["name"],
"url": build["url"],
"project": build["properties"]["project"][0],
}
if event == "new":
jsondata["timestamp"] = int(build["started_at"].timestamp())
elif event == "finished":
jsondata["timestamp"] = int(build["complete_at"].timestamp())
jsondata["results"] = build["results"]
if self.stream is not None:
url = f"/api/v1/external/buildbot?api_key={self.token}&stream={self.stream}"
else:
url = f"/api/v1/external/buildbot?api_key={self.token}"
response = yield self._http.post(url, json=jsondata)
if response.code != 200:
content = yield response.content()
log.error(
"{code}: Error pushing build status to Zulip: {content}",
code=response.code,
content=content,
)
| 3,148 | Python | .py | 68 | 38.911765 | 88 | 0.671987 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,835 | base.py | buildbot_buildbot/master/buildbot/reporters/base.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import abc
from typing import ClassVar
from typing import Sequence
from twisted.internet import defer
from twisted.python import log
from buildbot import config
from buildbot.reporters import utils
from buildbot.util import service
from buildbot.util import tuplematch
ENCODING = 'utf-8'
class ReporterBase(service.BuildbotService):
name = None
__meta__ = abc.ABCMeta
compare_attrs: ClassVar[Sequence[str]] = ['generators']
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.generators = None
self._event_consumers = {}
self._pending_got_event_calls = {}
def checkConfig(self, generators):
if not isinstance(generators, list):
config.error('{}: generators argument must be a list')
for g in generators:
g.check()
if self.name is None:
self.name = self.__class__.__name__
for g in generators:
self.name += "_" + g.generate_name()
@defer.inlineCallbacks
def reconfigService(self, generators):
self.generators = generators
wanted_event_keys = set()
for g in self.generators:
wanted_event_keys.update(g.wanted_event_keys)
# Remove consumers for keys that are no longer wanted
for key in list(self._event_consumers.keys()):
if key not in wanted_event_keys:
yield self._event_consumers[key].stopConsuming()
del self._event_consumers[key]
# Add consumers for new keys
for key in sorted(list(wanted_event_keys)):
if key not in self._event_consumers:
self._event_consumers[key] = yield self.master.mq.startConsuming(
self._got_event, key
)
@defer.inlineCallbacks
def stopService(self):
for consumer in self._event_consumers.values():
yield consumer.stopConsuming()
self._event_consumers = {}
yield from list(self._pending_got_event_calls.values())
self._pending_got_event_calls = {}
yield super().stopService()
def _does_generator_want_key(self, generator, key):
for filter in generator.wanted_event_keys:
if tuplematch.matchTuple(key, filter):
return True
return False
def _get_chain_key_for_event(self, key, msg):
if key[0] in ["builds", "buildrequests"]:
return ("buildrequestid", msg["buildrequestid"])
return None
@defer.inlineCallbacks
def _got_event(self, key, msg):
chain_key = self._get_chain_key_for_event(key, msg)
if chain_key is not None:
d = defer.Deferred()
pending_call = self._pending_got_event_calls.get(chain_key)
self._pending_got_event_calls[chain_key] = d
# Wait for previously pending call, if any, to ensure
# reports are sent out in the order events were queued.
if pending_call is not None:
yield pending_call
try:
reports = []
for g in self.generators:
if self._does_generator_want_key(g, key):
try:
report = yield g.generate(self.master, self, key, msg)
if report is not None:
reports.append(report)
except Exception as e:
log.err(
e,
"Got exception when handling reporter events: "
f"key: {key} generator: {g}",
)
if reports:
yield self.sendMessage(reports)
except Exception as e:
log.err(e, 'Got exception when handling reporter events')
if chain_key is not None:
if self._pending_got_event_calls.get(chain_key) == d:
del self._pending_got_event_calls[chain_key]
d.callback(None) # This event is now fully handled
def getResponsibleUsersForBuild(self, master, buildid):
# Use library method but subclassers may want to override that
return utils.getResponsibleUsersForBuild(master, buildid)
@abc.abstractmethod
def sendMessage(self, reports):
pass
| 5,030 | Python | .py | 115 | 33.782609 | 81 | 0.622418 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,836 | irc.py | buildbot_buildbot/master/buildbot/reporters/irc.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import base64
from typing import ClassVar
from typing import Sequence
from twisted.application import internet
from twisted.internet import defer
from twisted.internet import reactor
from twisted.internet import task
from twisted.python import log
from twisted.words.protocols import irc
from buildbot import config
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SKIPPED
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.reporters.words import Channel
from buildbot.reporters.words import Contact
from buildbot.reporters.words import StatusBot
from buildbot.reporters.words import ThrottledClientFactory
from buildbot.reporters.words import dangerousCommand
from buildbot.util import service
from buildbot.util import ssl
class UsageError(ValueError):
# pylint: disable=useless-super-delegation
def __init__(self, string="Invalid usage", *more):
# This is not useless as we change the default value of an argument.
# This bug is reported as "fixed" but apparently, it is not.
# https://github.com/PyCQA/pylint/issues/1085
# (Maybe there is a problem with builtin exceptions).
super().__init__(string, *more)
_irc_colors = (
'WHITE',
'BLACK',
'NAVY_BLUE',
'GREEN',
'RED',
'BROWN',
'PURPLE',
'OLIVE',
'YELLOW',
'LIME_GREEN',
'TEAL',
'AQUA_LIGHT',
'ROYAL_BLUE',
'PINK',
'DARK_GRAY',
'LIGHT_GRAY',
)
class IRCChannel(Channel):
def __init__(self, bot, channel):
super().__init__(bot, channel)
self.muted = False
def send(self, message, **kwargs):
if self.id[0] in irc.CHANNEL_PREFIXES:
send = self.bot.groupSend
else:
send = self.bot.msg
if not self.muted:
send(self.id, message)
def act(self, action):
if self.muted:
return
self.bot.groupDescribe(self.id, action)
class IRCContact(Contact):
def __init__(self, user, channel=None):
if channel is None:
channel = user
super().__init__(user, channel)
def act(self, action):
return self.channel.act(action)
def handleAction(self, action):
# this is sent when somebody performs an action that mentions the
# buildbot (like '/me kicks buildbot'). 'self.user' is the name/nick/id of
# the person who performed the action, so if their action provokes a
# response, they can be named. This is 100% silly.
if not action.endswith("s " + self.bot.nickname):
return
words = action.split()
verb = words[-2]
if verb == "kicks":
response = f"{verb} back"
elif verb == "threatens":
response = f"hosts a red wedding for {self.user_id}"
else:
response = f"{verb} {self.user_id} too"
self.act(response)
@defer.inlineCallbacks
def op_required(self, command):
if self.is_private_chat or self.user_id in self.bot.authz.get(command.upper(), ()):
return False
ops = yield self.bot.getChannelOps(self.channel.id)
return self.user_id not in ops
# IRC only commands
@dangerousCommand
def command_JOIN(self, args, **kwargs):
"""join a channel"""
args = self.splitArgs(args)
for channel in args:
self.bot.join(channel)
command_JOIN.usage = "join #channel - join a channel #channel" # type: ignore[attr-defined]
@dangerousCommand
def command_LEAVE(self, args, **kwargs):
"""leave a channel"""
args = self.splitArgs(args)
for channel in args:
self.bot.leave(channel)
command_LEAVE.usage = "leave #channel - leave a channel #channel" # type: ignore[attr-defined]
@defer.inlineCallbacks
def command_MUTE(self, args, **kwargs):
if (yield self.op_required('mute')):
yield self.send(
"Only channel operators or explicitly allowed users "
f"can mute me here, {self.user_id}... Blah, blah, blah..."
)
return
# The order of these is important! ;)
yield self.send("Shutting up for now.")
self.channel.muted = True
command_MUTE.usage = "mute - suppress all messages until a corresponding 'unmute' is issued" # type: ignore[attr-defined]
@defer.inlineCallbacks
def command_UNMUTE(self, args, **kwargs):
if self.channel.muted:
if (yield self.op_required('mute')):
return
# The order of these is important! ;)
self.channel.muted = False
yield self.send("I'm baaaaaaaaaaack!")
else:
yield self.send(
"No one had told me to be quiet, but it's the thought that counts, right?"
)
command_UNMUTE.usage = "unmute - disable a previous 'mute'" # type: ignore[attr-defined]
@defer.inlineCallbacks
@Contact.overrideCommand
def command_NOTIFY(self, args, **kwargs):
if not self.is_private_chat:
argv = self.splitArgs(args)
if argv and argv[0] in ('on', 'off') and (yield self.op_required('notify')):
yield self.send(
"Only channel operators can change notified events for this "
f"channel. And you, {self.user_id}, are neither!"
)
return
super().command_NOTIFY(args, **kwargs)
def command_DANCE(self, args, **kwargs):
"""dance, dance academy..."""
reactor.callLater(1.0, self.send, "<(^.^<)")
reactor.callLater(2.0, self.send, "<(^.^)>")
reactor.callLater(3.0, self.send, "(>^.^)>")
reactor.callLater(3.5, self.send, "(7^.^)7")
reactor.callLater(5.0, self.send, "(>^.^<)")
def command_DESTROY(self, args):
if self.bot.nickname not in args:
self.act("readies phasers")
else:
self.send(f"Better destroy yourself, {self.user_id}!")
def command_HUSTLE(self, args):
self.act("does the hustle")
command_HUSTLE.usage = "dondon on #qutebrowser: qutebrowser-bb needs to learn to do the hustle" # type: ignore[attr-defined]
class IrcStatusBot(StatusBot, irc.IRCClient):
"""I represent the buildbot to an IRC server."""
contactClass = IRCContact
channelClass = IRCChannel
def __init__(
self,
nickname,
password,
join_channels,
pm_to_nicks,
noticeOnChannel,
*args,
useColors=False,
useSASL=False,
**kwargs,
):
super().__init__(*args, **kwargs)
self.nickname = nickname
self.join_channels = join_channels
self.pm_to_nicks = pm_to_nicks
self.password = password
self.hasQuit = 0
self.noticeOnChannel = noticeOnChannel
self.useColors = useColors
self.useSASL = useSASL
self._keepAliveCall = task.LoopingCall(lambda: self.ping(self.nickname))
self._channel_names = {}
def register(self, nickname, hostname="foo", servername="bar"):
if not self.useSASL:
super().register(nickname, hostname, servername)
return
if self.password is not None:
self.sendLine("CAP REQ :sasl")
self.setNick(nickname)
if self.username is None:
self.username = nickname
self.sendLine(f"USER {self.username} {hostname} {servername} :{self.realname}")
if self.password is not None:
self.sendLine("AUTHENTICATE PLAIN")
def irc_AUTHENTICATE(self, prefix, params):
nick = self.nickname.encode()
passwd = self.password.encode()
code = base64.b64encode(nick + b'\0' + nick + b'\0' + passwd)
self.sendLine("AUTHENTICATE " + code.decode())
self.sendLine("CAP END")
def connectionMade(self):
super().connectionMade()
self._keepAliveCall.start(60)
def connectionLost(self, reason):
if self._keepAliveCall.running:
self._keepAliveCall.stop()
super().connectionLost(reason)
# The following methods are called when we write something.
def groupSend(self, channel, message):
if self.noticeOnChannel:
self.notice(channel, message)
else:
self.msg(channel, message)
def groupDescribe(self, channel, action):
self.describe(channel, action)
def getContact(self, user, channel=None):
# nicknames and channel names are case insensitive
user = user.lower()
if channel is None:
channel = user
channel = channel.lower()
return super().getContact(user, channel)
# the following irc.IRCClient methods are called when we have input
def privmsg(self, user, channel, message):
user = user.split('!', 1)[0] # rest is ~user@hostname
# channel is '#twisted' or 'buildbot' (for private messages)
if channel == self.nickname:
# private message
contact = self.getContact(user=user)
d = contact.handleMessage(message)
return d
# else it's a broadcast message, maybe for us, maybe not. 'channel'
# is '#twisted' or the like.
contact = self.getContact(user=user, channel=channel)
if message.startswith(f"{self.nickname}:") or message.startswith(f"{self.nickname},"):
message = message[len(f"{self.nickname}:") :]
d = contact.handleMessage(message)
return d
return None
def action(self, user, channel, data):
user = user.split('!', 1)[0] # rest is ~user@hostname
# somebody did an action (/me actions) in the broadcast channel
contact = self.getContact(user=user, channel=channel)
if self.nickname in data:
contact.handleAction(data)
def signedOn(self):
if self.password:
self.msg("Nickserv", "IDENTIFY " + self.password)
for c in self.join_channels:
if isinstance(c, dict):
channel = c.get('channel', None)
password = c.get('password', None)
else:
channel = c
password = None
self.join(channel=channel, key=password)
for c in self.pm_to_nicks:
contact = self.getContact(c)
contact.channel.add_notification_events(self.notify_events)
self.loadState()
def getNames(self, channel):
channel = channel.lower()
d = defer.Deferred()
callbacks = self._channel_names.setdefault(channel, ([], []))[0]
callbacks.append(d)
self.sendLine(f"NAMES {channel}")
return d
def irc_RPL_NAMREPLY(self, prefix, params):
channel = params[2].lower()
if channel not in self._channel_names:
return
nicks = params[3].split(' ')
nicklist = self._channel_names[channel][1]
nicklist += nicks
def irc_RPL_ENDOFNAMES(self, prefix, params):
channel = params[1].lower()
try:
callbacks, namelist = self._channel_names.pop(channel)
except KeyError:
return
for cb in callbacks:
cb.callback(namelist)
@defer.inlineCallbacks
def getChannelOps(self, channel):
names = yield self.getNames(channel)
return [n[1:] for n in names if n[0] in '@&~%']
def joined(self, channel):
self.log(f"Joined {channel}")
# trigger contact constructor, which in turn subscribes to notify events
channel = self.getChannel(channel=channel)
channel.add_notification_events(self.notify_events)
def left(self, channel):
self.log(f"Left {channel}")
def kickedFrom(self, channel, kicker, message):
self.log(f"I have been kicked from {channel} by {kicker}: {message}")
def userLeft(self, user, channel):
if user:
user = user.lower()
if channel:
channel = channel.lower()
if (channel, user) in self.contacts:
del self.contacts[(channel, user)]
def userKicked(self, kickee, channel, kicker, message):
self.userLeft(kickee, channel)
def userQuit(self, user, quitMessage=None):
if user:
user = user.lower()
for c, u in list(self.contacts):
if u == user:
del self.contacts[(c, u)]
results_colors = {
SUCCESS: 'GREEN',
WARNINGS: 'YELLOW',
FAILURE: 'RED',
SKIPPED: 'ROYAL_BLUE',
EXCEPTION: 'PURPLE',
RETRY: 'AQUA_LIGHT',
CANCELLED: 'PINK',
}
short_results_descriptions = {
SUCCESS: ", Success",
WARNINGS: ", Warnings",
FAILURE: ", Failure",
SKIPPED: ", Skipped",
EXCEPTION: ", Exception",
RETRY: ", Retry",
CANCELLED: ", Cancelled",
}
def format_build_status(self, build, short=False):
br = build['results']
if short:
text = self.short_results_descriptions[br]
else:
text = self.results_descriptions[br]
if self.useColors:
return f"\x03{_irc_colors.index(self.results_colors[br])}{text}\x0f"
else:
return text
class IrcStatusFactory(ThrottledClientFactory):
protocol = IrcStatusBot # type: ignore[assignment]
shuttingDown = False
p = None
def __init__(
self,
nickname,
password,
join_channels,
pm_to_nicks,
authz,
tags,
notify_events,
noticeOnChannel=False,
useRevisions=False,
showBlameList=False,
useSASL=False,
parent=None,
lostDelay=None,
failedDelay=None,
useColors=True,
):
super().__init__(lostDelay=lostDelay, failedDelay=failedDelay)
self.nickname = nickname
self.password = password
self.join_channels = join_channels
self.pm_to_nicks = pm_to_nicks
self.tags = tags
self.authz = authz
self.parent = parent
self.notify_events = notify_events
self.noticeOnChannel = noticeOnChannel
self.useRevisions = useRevisions
self.showBlameList = showBlameList
self.useColors = useColors
self.useSASL = useSASL
def __getstate__(self):
d = self.__dict__.copy()
del d['p']
return d
def shutdown(self):
self.shuttingDown = True
if self.p:
self.p.quit("buildmaster reconfigured: bot disconnecting")
def buildProtocol(self, address):
if self.p:
self.p.disownServiceParent()
p = self.protocol(
self.nickname,
self.password,
self.join_channels,
self.pm_to_nicks,
self.noticeOnChannel,
self.authz,
self.tags,
self.notify_events,
useColors=self.useColors,
useSASL=self.useSASL,
useRevisions=self.useRevisions,
showBlameList=self.showBlameList,
)
p.setServiceParent(self.parent)
p.factory = self
self.p = p
return p
# TODO: I think a shutdown that occurs while the connection is being
# established will make this explode
def clientConnectionLost(self, connector, reason):
if self.shuttingDown:
log.msg("not scheduling reconnection attempt")
return
super().clientConnectionLost(connector, reason)
def clientConnectionFailed(self, connector, reason):
if self.shuttingDown:
log.msg("not scheduling reconnection attempt")
return
super().clientConnectionFailed(connector, reason)
class IRC(service.BuildbotService):
name = "IRC"
in_test_harness = False
f = None
compare_attrs: ClassVar[Sequence[str]] = (
"host",
"port",
"nick",
"password",
"authz",
"channels",
"pm_to_nicks",
"useSSL",
"useSASL",
"useRevisions",
"tags",
"useColors",
"allowForce",
"allowShutdown",
"lostDelay",
"failedDelay",
)
secrets = ['password']
def checkConfig(
self,
host,
nick,
channels,
pm_to_nicks=None,
port=6667,
allowForce=None,
tags=None,
password=None,
notify_events=None,
showBlameList=True,
useRevisions=False,
useSSL=False,
useSASL=False,
lostDelay=None,
failedDelay=None,
useColors=True,
allowShutdown=None,
noticeOnChannel=False,
authz=None,
**kwargs,
):
deprecated_params = list(kwargs)
if deprecated_params:
config.error(f'{",".join(deprecated_params)} are deprecated')
# deprecated
if allowForce is not None:
if authz is not None:
config.error("If you specify authz, you must not use allowForce anymore")
if allowForce not in (True, False):
config.error(f"allowForce must be boolean, not {allowForce!r}")
log.msg('IRC: allowForce is deprecated: use authz instead')
if allowShutdown is not None:
if authz is not None:
config.error("If you specify authz, you must not use allowShutdown anymore")
if allowShutdown not in (True, False):
config.error(f"allowShutdown must be boolean, not {allowShutdown!r}")
log.msg('IRC: allowShutdown is deprecated: use authz instead')
# ###
if noticeOnChannel not in (True, False):
config.error(f"noticeOnChannel must be boolean, not {noticeOnChannel!r}")
if useSSL:
# SSL client needs a ClientContextFactory for some SSL mumbo-jumbo
ssl.ensureHasSSL(self.__class__.__name__)
if authz is not None:
for acl in authz.values():
if not isinstance(acl, (list, tuple, bool)):
config.error("authz values must be bool or a list of nicks")
def reconfigService(
self,
host,
nick,
channels,
pm_to_nicks=None,
port=6667,
allowForce=None,
tags=None,
password=None,
notify_events=None,
showBlameList=True,
useRevisions=False,
useSSL=False,
useSASL=False,
lostDelay=None,
failedDelay=None,
useColors=True,
allowShutdown=None,
noticeOnChannel=False,
authz=None,
**kwargs,
):
# need to stash these so we can detect changes later
self.host = host
self.port = port
self.nick = nick
self.join_channels = channels
if pm_to_nicks is None:
pm_to_nicks = []
self.pm_to_nicks = pm_to_nicks
self.password = password
if authz is None:
self.authz = {}
else:
self.authz = authz
self.useRevisions = useRevisions
self.tags = tags
if notify_events is None:
notify_events = {}
self.notify_events = notify_events
self.noticeOnChannel = noticeOnChannel
# deprecated...
if allowForce is not None:
self.authz[('force', 'stop')] = allowForce
if allowShutdown is not None:
self.authz[('shutdown')] = allowShutdown
# ###
# This function is only called in case of reconfig with changes
# We don't try to be smart here. Just restart the bot if config has
# changed.
if self.f is not None:
self.f.shutdown()
self.f = IrcStatusFactory(
self.nick,
self.password,
self.join_channels,
self.pm_to_nicks,
self.authz,
self.tags,
self.notify_events,
parent=self,
noticeOnChannel=noticeOnChannel,
useRevisions=useRevisions,
useSASL=useSASL,
showBlameList=showBlameList,
lostDelay=lostDelay,
failedDelay=failedDelay,
useColors=useColors,
)
if useSSL:
cf = ssl.ClientContextFactory()
c = internet.SSLClient(self.host, self.port, self.f, cf)
else:
c = internet.TCPClient(self.host, self.port, self.f)
c.setServiceParent(self)
| 21,454 | Python | .py | 583 | 27.903945 | 129 | 0.609609 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,837 | telegram.py | buildbot_buildbot/master/buildbot/reporters/telegram.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
import io
import json
import random
import shlex
from typing import Any
from typing import ClassVar
from typing import Sequence
from twisted.internet import defer
from twisted.internet import reactor
from buildbot import config
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.reporters.words import Channel
from buildbot.reporters.words import Contact
from buildbot.reporters.words import StatusBot
from buildbot.reporters.words import UsageError
from buildbot.reporters.words import WebhookResource
from buildbot.schedulers.forcesched import CollectedValidationError
from buildbot.schedulers.forcesched import ForceScheduler
from buildbot.util import Notifier
from buildbot.util import asyncSleep
from buildbot.util import bytes2unicode
from buildbot.util import epoch2datetime
from buildbot.util import httpclientservice
from buildbot.util import service
from buildbot.util import unicode2bytes
class TelegramChannel(Channel):
def __init__(self, bot, channel):
assert isinstance(channel, dict), "channel must be a dict provided by Telegram API"
super().__init__(bot, channel['id'])
self.chat_info = channel
@defer.inlineCallbacks
def list_notified_events(self):
if self.notify_events:
notified_events = "\n".join(sorted(f"üîî **{n}**" for n in self.notify_events))
yield self.send(f"The following events are being notified:\n{notified_events}")
else:
yield self.send("üîï No events are being notified.")
def collect_fields(fields):
for field in fields:
if field['fullName']:
yield field
if 'fields' in field:
yield from collect_fields(field['fields'])
class TelegramContact(Contact):
def __init__(self, user, channel=None):
assert isinstance(user, dict), "user must be a dict provided by Telegram API"
self.user_info = user
super().__init__(user['id'], channel)
self.template = None
@property
def chat_id(self):
return self.channel.id
@property
def user_full_name(self):
fullname = " ".join((
self.user_info['first_name'],
self.user_info.get('last_name', ''),
)).strip()
return fullname
@property
def user_name(self):
return self.user_info['first_name']
def describeUser(self):
user = self.user_full_name
try:
user += f" (@{self.user_info['username']})"
except KeyError:
pass
if not self.is_private_chat:
chat_title = self.channel.chat_info.get('title')
if chat_title:
user += f" on '{chat_title}'"
return user
ACCESS_DENIED_MESSAGES = [
"üßô‚Äç‚ôÇÔ∏è You shall not pass! üëπ",
"üò® Oh NO! You are simply not allowed to this! üò¢",
"‚õî You cannot do this. Better go outside and relax... üå≥",
"‚õî ACCESS DENIED! This incident has ben reported to NSA, KGB, and George Soros! üïµ",
"üö´ Unauthorized access detected! Your device will explode in 3... 2... 1... üí£",
"‚ò¢ Radiation level too high! Continuation of the procedure forbidden! üõë",
]
def access_denied(self, *args, tmessage, **kwargs):
self.send(
random.choice(self.ACCESS_DENIED_MESSAGES), reply_to_message_id=tmessage['message_id']
)
def query_button(self, caption, payload):
if isinstance(payload, str) and len(payload) < 64:
return {'text': caption, 'callback_data': payload}
key = hash(repr(payload))
while True:
cached = self.bot.query_cache.get(key)
if cached is None:
self.bot.query_cache[key] = payload
break
if cached == payload:
break
key += 1
return {'text': caption, 'callback_data': key}
@defer.inlineCallbacks
def command_START(self, args, **kwargs):
self.bot.post(
'/setChatMenuButton',
json={'chat_id': self.channel.id, 'menu_button': {'type': 'commands'}},
)
yield self.command_HELLO(args)
reactor.callLater(0.2, self.command_HELP, '')
def command_NAY(self, args, tmessage, **kwargs):
"""forget the current command"""
replied_message = tmessage.get('reply_to_message')
if replied_message:
if 'reply_markup' in replied_message:
self.bot.edit_keyboard(self.channel.id, replied_message['message_id'])
if self.is_private_chat:
self.send("Never mind...")
else:
self.send(f"Never mind, {self.user_name}...")
command_NAY.usage = "nay - never mind the command we are currently discussing" # type: ignore[attr-defined]
@classmethod
def get_commands(cls):
commands = cls.build_commands()
response = []
for command in commands:
if command == 'start':
continue
meth = getattr(cls, 'command_' + command.upper())
doc = getattr(meth, '__doc__', None)
if not doc:
doc = command
response.append((command, doc))
return response
@classmethod
def describe_commands(cls):
return [f"{command} - {doc}" for command, doc in cls.get_commands()]
@Contact.overrideCommand
def command_COMMANDS(self, args, **kwargs):
if args.lower() == 'botfather':
response = self.describe_commands()
if response:
self.send('\n'.join(response))
else:
return super().command_COMMANDS(args)
return None
@defer.inlineCallbacks
def command_GETID(self, args, **kwargs):
"""get user and chat ID"""
if self.is_private_chat:
self.send(f"Your ID is `{self.user_id}`.")
else:
yield self.send(f"{self.user_name}, your ID is `{self.user_id}`.")
self.send(f'This {self.channel.chat_info.get("type", "group")} ID is `{self.chat_id}`.')
command_GETID.usage = ( # type: ignore[attr-defined]
"getid - get user and chat ID that can be put in the master configuration file"
)
@defer.inlineCallbacks
@Contact.overrideCommand
def command_LIST(self, args, **kwargs):
args = self.splitArgs(args)
if not args:
keyboard = [
[
self.query_button("üë∑Ô∏è Builders", '/list builders'),
self.query_button("üë∑Ô∏è (including old ones)", '/list all builders'),
],
[
self.query_button("‚öô Workers", '/list workers'),
self.query_button("‚öô (including old ones)", '/list all workers'),
],
[self.query_button("üìÑ Changes (last 10)", '/list changes')],
]
self.send("What do you want to list?", reply_markup={'inline_keyboard': keyboard})
return
all = False
num = 10
try:
num = int(args[0])
del args[0]
except ValueError:
if args[0] == 'all':
all = True
del args[0]
except IndexError:
pass
if not args:
raise UsageError(
"Try '" + self.bot.commandPrefix + "list [all|N] builders|workers|changes'."
)
if args[0] == 'builders':
bdicts = yield self.bot.getAllBuilders()
online_builderids = yield self.bot.getOnlineBuilders()
response = ["I found the following **builders**:"]
for bdict in bdicts:
if bdict['builderid'] in online_builderids:
response.append(f"`{bdict['name']}`")
elif all:
response.append(f"`{bdict['name']}` ‚ùå")
self.send('\n'.join(response))
elif args[0] == 'workers':
workers = yield self.master.data.get(('workers',))
response = ["I found the following **workers**:"]
for worker in workers:
if worker['configured_on']:
response.append(f"`{worker['name']}`")
if not worker['connected_to']:
response[-1] += " ⚠️"
elif all:
response.append(f"`{worker['name']}` ‚ùå")
self.send('\n'.join(response))
elif args[0] == 'changes':
wait_message = yield self.send("‚è≥ Getting your changes...")
if all:
changes = yield self.master.data.get(('changes',))
self.bot.delete_message(self.channel.id, wait_message['message_id'])
num = len(changes)
if num > 50:
keyboard = [
[
self.query_button(
"‼ Yes, flood me with all of them!", f'/list {num} changes'
)
],
[self.query_button("‚úÖ No, just show last 50", '/list 50 changes')],
]
self.send(
f"I found {num} changes. Do you really want me to list them all?",
reply_markup={'inline_keyboard': keyboard},
)
return
else:
changes = yield self.master.data.get(('changes',), order=['-changeid'], limit=num)
self.bot.delete_message(self.channel.id, wait_message['message_id'])
response = ["I found the following recent **changes**:\n"]
for change in reversed(changes):
change['comment'] = change['comments'].split('\n')[0]
change['date'] = epoch2datetime(change['when_timestamp']).strftime('%Y-%m-%d %H:%M')
response.append(
f"[{change['comment']}]({change['revlink']})\n"
f"_Author_: {change['author']}\n"
f"_Date_: {change['date']}\n"
f"_Repository_: {change['repository']}\n"
f"_Branch_: {change['branch']}\n"
f"_Revision_: {change['revision']}\n"
)
self.send('\n'.join(response))
@defer.inlineCallbacks
def get_running_builders(self):
builders = []
for bdict in (yield self.bot.getAllBuilders()):
if (yield self.bot.getRunningBuilds(bdict['builderid'])):
builders.append(bdict['name'])
return builders
@defer.inlineCallbacks
@Contact.overrideCommand
def command_WATCH(self, args, **kwargs):
if args:
super().command_WATCH(args)
else:
builders = yield self.get_running_builders()
if builders:
keyboard = [[self.query_button("üîé " + b, f'/watch {b}')] for b in builders]
self.send(
"Which builder do you want to watch?",
reply_markup={'inline_keyboard': keyboard},
)
else:
self.send("There are no currently running builds.")
@Contact.overrideCommand
def command_NOTIFY(self, args, tquery=None, **kwargs):
if args:
want_list = args == 'list'
if want_list and tquery:
self.bot.delete_message(self.chat_id, tquery['message']['message_id'])
super().command_NOTIFY(args)
if want_list or not tquery:
return
keyboard = [
[
self.query_button(
f"{e.capitalize()} {'üîî' if e in self.channel.notify_events else 'üîï'}",
f"/notify {'off' if e in self.channel.notify_events else 'on'}-quiet {e}",
)
for e in evs
]
for evs in (
('started', 'finished'),
('success', 'failure'),
('warnings', 'exception'),
('problem', 'recovery'),
('worse', 'better'),
('cancelled', 'worker'),
)
] + [[self.query_button("Hide...", '/notify list')]]
if tquery:
self.bot.edit_keyboard(self.chat_id, tquery['message']['message_id'], keyboard)
else:
self.send(
"Here are available notifications and their current state. "
"Click to turn them on/off.",
reply_markup={'inline_keyboard': keyboard},
)
def ask_for_reply(self, prompt, greeting='Ok'):
kwargs = {}
if not self.is_private_chat:
username = self.user_info.get('username', '')
if username:
if greeting:
prompt = f"{greeting} @{username}, now {prompt}..."
else:
prompt = f"@{username}, now {prompt}..."
kwargs['reply_markup'] = {'force_reply': True, 'selective': True}
else:
if greeting:
prompt = f"{greeting}, now reply to this message and {prompt}..."
else:
prompt = f"Reply to this message and {prompt}..."
else:
if greeting:
prompt = f"{greeting}, now {prompt}..."
else:
prompt = prompt[0].upper() + prompt[1:] + "..."
# Telegram seems to have a bug, which causes reply request to pop sometimes again.
# So we do not force reply to avoid it...
# kwargs['reply_markup'] = {
# 'force_reply': True
# }
self.send(prompt, **kwargs)
@defer.inlineCallbacks
@Contact.overrideCommand
def command_STOP(self, args, **kwargs):
argv = self.splitArgs(args)
if len(argv) >= 3 or argv and argv[0] != 'build':
super().command_STOP(args)
return
argv = argv[1:]
if not argv:
builders = yield self.get_running_builders()
if builders:
keyboard = [[self.query_button("üö´ " + b, f'/stop build {b}')] for b in builders]
self.send("Select builder to stop...", reply_markup={'inline_keyboard': keyboard})
else: # len(argv) == 1
self.template = '/stop ' + args + ' {}'
self.ask_for_reply(f"give me the reason to stop build on `{argv[0]}`")
@Contact.overrideCommand
def command_SHUTDOWN(self, args, **kwargs):
if args:
return super().command_SHUTDOWN(args)
if self.master.botmaster.shuttingDown:
keyboard = [
[
self.query_button("üîô Stop Shutdown", '/shutdown stop'),
self.query_button("‼️ Shutdown Now", '/shutdown now'),
]
]
text = "Buildbot is currently shutting down.\n\n"
else:
keyboard = [
[
self.query_button("↘️ Begin Shutdown", '/shutdown start'),
self.query_button("‼️ Shutdown Now", '/shutdown now'),
]
]
text = ""
self.send(text + "What do you want to do?", reply_markup={'inline_keyboard': keyboard})
return None
@defer.inlineCallbacks
def command_FORCE(self, args, tquery=None, partial=None, **kwargs):
"""force a build"""
try:
forceschedulers = yield self.master.data.get(('forceschedulers',))
except AttributeError:
forceschedulers = None
else:
forceschedulers = dict((s['name'], s) for s in forceschedulers)
if not forceschedulers:
raise UsageError("no force schedulers configured for use by /force")
argv = self.splitArgs(args)
try:
sched = argv[0]
except IndexError:
if len(forceschedulers) == 1:
sched = next(iter(forceschedulers))
else:
keyboard = [
[self.query_button(s['label'], f"/force {s['name']}")]
for s in forceschedulers.values()
]
self.send(
"Which force scheduler do you want to activate?",
reply_markup={'inline_keyboard': keyboard},
)
return
else:
if sched in forceschedulers:
del argv[0]
elif len(forceschedulers) == 1:
sched = next(iter(forceschedulers))
else:
raise UsageError(
"Try '/force' and follow the instructions" f" (no force scheduler {sched})"
)
scheduler = forceschedulers[sched]
try:
task = argv.pop(0)
except IndexError:
task = 'config'
if tquery and task != 'config':
self.bot.edit_keyboard(self.chat_id, tquery['message']['message_id'])
if not argv:
keyboard = [
[self.query_button(b, f'/force {sched} {task} {b}')]
for b in scheduler['builder_names']
]
self.send(
"Which builder do you want to start?", reply_markup={'inline_keyboard': keyboard}
)
return
if task == 'ask':
try:
what = argv.pop(0)
except IndexError as e:
raise UsageError("Try '/force' and follow the instructions") from e
else:
what = None # silence PyCharm warnings
bldr = argv.pop(0)
if bldr not in scheduler['builder_names']:
raise UsageError(
"Try '/force' and follow the instructions "
f"(`{bldr}` not configured for _{scheduler['label']}_ scheduler)"
)
try:
params = dict(arg.split('=', 1) for arg in argv)
except ValueError as e:
raise UsageError(f"Try '/force' and follow the instructions ({e})") from e
all_fields = list(collect_fields(scheduler['all_fields']))
required_params = [
f['fullName']
for f in all_fields
if f['required'] and f['fullName'] not in ('username', 'owner')
]
missing_params = [p for p in required_params if p not in params]
if task == 'build':
# TODO This should probably be moved to the upper class,
# however, it will change the force command totally
try:
if missing_params:
# raise UsageError
task = 'config'
else:
params.update(
dict(
(f['fullName'], f['default'])
for f in all_fields
if f['type'] == 'fixed' and f['fullName'] not in ('username', 'owner')
)
)
builder = yield self.bot.getBuilder(buildername=bldr)
for scheduler in self.master.allSchedulers():
if scheduler.name == sched and isinstance(scheduler, ForceScheduler):
break
else:
raise ValueError(f"There is no force scheduler '{sched}'")
try:
yield scheduler.force(
builderid=builder['builderid'], owner=self.describeUser(), **params
)
except CollectedValidationError as e:
raise ValueError(e.errors) from e
self.send("Force build successfully requested.")
return
except (IndexError, ValueError) as e:
raise UsageError(f"Try '/force' and follow the instructions ({e})") from e
if task == 'config':
msg = f"{self.user_full_name}, you are about to start a new build on `{bldr}`!"
keyboard = []
args = ' '.join(shlex.quote(f"{p[0]}={p[1]}") for p in params.items())
fields = [
f
for f in all_fields
if f['type'] != 'fixed' and f['fullName'] not in ('username', 'owner')
]
if fields:
msg += "\n\nThe current build parameters are:"
for field in fields:
if field['type'] == 'nested':
msg += f"\n{field['label']}"
else:
field_name = field['fullName']
value = params.get(field_name, field['default'])
if isinstance(value, str):
value = value.strip()
msg += f"\n {field['label']} `{value}`"
if value:
key = "Change "
else:
key = "Set "
key += field_name.replace('_', ' ').title()
if field_name in missing_params:
key = "⚠️ " + key
msg += " ⚠️"
keyboard.append([
self.query_button(key, f'/force {sched} ask {field_name} {bldr} {args}')
])
msg += "\n\nWhat do you want to do?"
if missing_params:
msg += " You must set values for all parameters marked with ⚠️"
if not missing_params:
keyboard.append(
[self.query_button("üöÄ Start Build", f'/force {sched} build {bldr} {args}')],
)
self.send(msg, reply_markup={'inline_keyboard': keyboard})
elif task == 'ask':
prompt = "enter the new value for the " + what.replace('_', ' ').lower()
args = ' '.join(shlex.quote(f"{p[0]}={p[1]}") for p in params.items() if p[0] != what)
self.template = f'/force {sched} config {bldr} {args} {what}={{}}'
self.ask_for_reply(prompt, '')
else:
raise UsageError("Try '/force' and follow the instructions")
command_FORCE.usage = "force - Force a build" # type: ignore[attr-defined]
class TelegramStatusBot(StatusBot):
contactClass = TelegramContact
channelClass = TelegramChannel
commandPrefix = '/'
offline_string = "offline ‚ùå"
idle_string = "idle üí§"
running_string = "running üåÄ:"
query_cache: dict[int, dict[str, Any]] = {}
@property
def commandSuffix(self):
if self.nickname is not None:
return '@' + self.nickname
return None
def __init__(self, token, outgoing_http, chat_ids, *args, retry_delay=30, **kwargs):
super().__init__(*args, **kwargs)
self.http_client = outgoing_http
self.retry_delay = retry_delay
self.token = token
self.chat_ids = chat_ids
self.nickname = None
@defer.inlineCallbacks
def startService(self):
yield super().startService()
for c in self.chat_ids:
channel = self.getChannel(c)
channel.add_notification_events(self.notify_events)
yield self.loadState()
commands = [
{'command': command, 'description': doc}
for command, doc in TelegramContact.get_commands()
]
self.post('/setMyCommands', json={'commands': commands})
results_emoji = {
SUCCESS: ' ‚úÖ',
WARNINGS: ' ⚠️',
FAILURE: '‚ùó',
EXCEPTION: ' ‼️',
RETRY: ' üîÑ',
CANCELLED: ' üö´',
}
def format_build_status(self, build, short=False):
br = build['results']
if short:
return self.results_emoji[br]
else:
return self.results_descriptions[br] + self.results_emoji[br]
def getContact(self, user, channel):
"""get a Contact instance for ``user`` on ``channel``"""
assert isinstance(user, dict), "user must be a dict provided by Telegram API"
assert isinstance(channel, dict), "channel must be a dict provided by Telegram API"
uid = user['id']
cid = channel['id']
try:
contact = self.contacts[(cid, uid)]
except KeyError:
valid = self.isValidUser(uid)
contact = self.contactClass(user=user, channel=self.getChannel(channel, valid))
if valid:
self.contacts[(cid, uid)] = contact
else:
if isinstance(user, dict):
contact.user_info.update(user)
if isinstance(channel, dict):
contact.channel.chat_info.update(channel)
return contact
def getChannel(self, channel, valid=True):
if not isinstance(channel, dict):
channel = {'id': channel}
cid = channel['id']
try:
return self.channels[cid]
except KeyError:
new_channel = self.channelClass(self, channel)
if valid:
self.channels[cid] = new_channel
new_channel.setServiceParent(self)
return new_channel
@defer.inlineCallbacks
def process_update(self, update):
data = {}
message = update.get('message')
if message is None:
query = update.get('callback_query')
if query is None:
self.log('No message in Telegram update object')
return 'no message'
original_message = query.get('message', {})
data = query.get('data', 0)
try:
data = self.query_cache[int(data)]
except ValueError:
text = data
data = {}
notify = None
except KeyError:
text = None
data = {}
notify = "Sorry, button is no longer valid!"
if original_message:
try:
self.edit_keyboard(
original_message['chat']['id'], original_message['message_id']
)
except KeyError:
pass
else:
if isinstance(data, dict):
data = data.copy()
text = data.pop('command')
try:
notify = data.pop('notify')
except KeyError:
notify = None
else:
text = data
data = {}
notify = None
data['tquery'] = query
self.answer_query(query['id'], notify)
message = {
'from': query['from'],
'chat': original_message.get('chat'),
'text': text,
}
if 'reply_to_message' in original_message:
message['reply_to_message'] = original_message['reply_to_message']
chat = message['chat']
user = message.get('from')
if user is None:
self.log('No user in incoming message')
return 'no user'
text = message.get('text')
if not text:
return 'no text in the message'
contact = self.getContact(user=user, channel=chat)
data['tmessage'] = message
template = contact.template
contact.template = None
if text.startswith(self.commandPrefix):
result = yield contact.handleMessage(text, **data)
else:
if template:
text = template.format(shlex.quote(text))
result = yield contact.handleMessage(text, **data)
return result
@defer.inlineCallbacks
def post(self, path, **kwargs):
logme = True
while True:
try:
res = yield self.http_client.post(path, **kwargs)
except AssertionError as err:
# just for tests
raise err
except Exception as err:
msg = f"ERROR: problem sending Telegram request {path} (will try again): {err}"
if logme:
self.log(msg)
logme = False
yield asyncSleep(self.retry_delay)
else:
ans = yield res.json()
if not ans.get('ok'):
self.log(
f"ERROR: cannot send Telegram request {path}: "
f"[{res.code}] {ans.get('description')}"
)
return None
return ans.get('result', True)
@defer.inlineCallbacks
def set_nickname(self):
res = yield self.post('/getMe')
if res:
self.nickname = res.get('username')
@defer.inlineCallbacks
def answer_query(self, query_id, notify=None):
params = {"callback_query_id": query_id}
if notify is not None:
params.update({"text": notify})
return (yield self.post('/answerCallbackQuery', json=params))
@defer.inlineCallbacks
def send_message(
self,
chat,
message,
parse_mode='Markdown',
reply_to_message_id=None,
reply_markup=None,
**kwargs,
):
result = None
message = message.strip()
while message:
params = {"chat_id": chat}
if parse_mode is not None:
params['parse_mode'] = parse_mode
if reply_to_message_id is not None:
params['reply_to_message_id'] = reply_to_message_id
reply_to_message_id = None # we only mark first message as a reply
if len(message) <= 4096:
params['text'] = message
message = None
else:
n = message[:4096].rfind('\n')
n = n + 1 if n != -1 else 4096
params['text'] = message[:n].rstrip()
message = message[n:].lstrip()
if not message and reply_markup is not None:
params['reply_markup'] = reply_markup
params.update(kwargs)
result = yield self.post('/sendMessage', json=params)
return result
@defer.inlineCallbacks
def edit_message(self, chat, msg, message, parse_mode='Markdown', **kwargs):
params = {"chat_id": chat, "message_id": msg, "text": message}
if parse_mode is not None:
params['parse_mode'] = parse_mode
params.update(kwargs)
return (yield self.post('/editMessageText', json=params))
@defer.inlineCallbacks
def edit_keyboard(self, chat, msg, keyboard=None):
params = {"chat_id": chat, "message_id": msg}
if keyboard is not None:
params['reply_markup'] = {'inline_keyboard': keyboard}
return (yield self.post('/editMessageReplyMarkup', json=params))
@defer.inlineCallbacks
def delete_message(self, chat, msg):
params = {"chat_id": chat, "message_id": msg}
return (yield self.post('/deleteMessage', json=params))
@defer.inlineCallbacks
def send_sticker(self, chat, sticker, **kwargs):
params = {"chat_id": chat, "sticker": sticker}
params.update(kwargs)
return (yield self.post('/sendSticker', json=params))
class TelegramWebhookBot(TelegramStatusBot):
name: str | None = "TelegramWebhookBot" # type: ignore[assignment]
def __init__(self, token, *args, certificate=None, **kwargs):
TelegramStatusBot.__init__(self, token, *args, **kwargs)
self._certificate = certificate
self.webhook = WebhookResource('telegram' + token)
self.webhook.setServiceParent(self)
@defer.inlineCallbacks
def startService(self):
yield super().startService()
url = bytes2unicode(self.master.config.buildbotURL)
if not url.endswith('/'):
url += '/'
yield self.set_webhook(url + self.webhook.path, self._certificate)
def process_webhook(self, request):
update = self.get_update(request)
return self.process_update(update)
def get_update(self, request):
content = request.content.read()
content = bytes2unicode(content)
content_type = request.getHeader(b'Content-Type')
content_type = bytes2unicode(content_type)
if content_type is not None and content_type.startswith('application/json'):
update = json.loads(content)
else:
raise ValueError(f'Unknown content type: {content_type}')
return update
@defer.inlineCallbacks
def set_webhook(self, url, certificate=None):
if not certificate:
self.log(f"Setting up webhook to: {url}")
yield self.post('/setWebhook', json={"url": url})
else:
self.log(f"Setting up webhook to: {url} (custom certificate)")
certificate = io.BytesIO(unicode2bytes(certificate))
yield self.post('/setWebhook', data={"url": url}, files={"certificate": certificate})
class TelegramPollingBot(TelegramStatusBot):
name: str | None = "TelegramPollingBot" # type: ignore[assignment]
def __init__(self, *args, poll_timeout=120, **kwargs):
super().__init__(*args, **kwargs)
self._polling_finished_notifier = Notifier()
self.poll_timeout = poll_timeout
def startService(self):
super().startService()
self._polling_continue = True
self.do_polling()
@defer.inlineCallbacks
def stopService(self):
self._polling_continue = False
yield self._polling_finished_notifier.wait()
yield super().stopService()
@defer.inlineCallbacks
def do_polling(self):
yield self.post('/deleteWebhook')
offset = 0
kwargs = {'json': {'timeout': self.poll_timeout}}
logme = True
while self._polling_continue:
if offset:
kwargs['json']['offset'] = offset
try:
res = yield self.http_client.post(
'/getUpdates', timeout=self.poll_timeout + 2, **kwargs
)
ans = yield res.json()
if not ans.get('ok'):
raise ValueError(f"[{res.code}] {ans.get('description')}")
updates = ans.get('result')
except AssertionError as err:
raise err
except Exception as err:
msg = f"ERROR: cannot send Telegram request /getUpdates (will try again): {err}"
if logme:
self.log(msg)
logme = False
yield asyncSleep(self.retry_delay)
else:
logme = True
if updates:
offset = max(update['update_id'] for update in updates) + 1
for update in updates:
yield self.process_update(update)
self._polling_finished_notifier.notify(None)
class TelegramBot(service.BuildbotService):
name = "TelegramBot"
in_test_harness = False
compare_attrs: ClassVar[Sequence[str]] = [
"bot_token",
"chat_ids",
"authz",
"tags",
"notify_events",
"showBlameList",
"useRevisions",
"certificate",
"useWebhook",
"pollTimeout",
"retryDelay",
]
secrets = ["bot_token"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.bot = None
def _get_http(self, bot_token):
base_url = "https://api.telegram.org/bot" + bot_token
return httpclientservice.HTTPSession(self.master.httpservice, base_url)
def checkConfig(
self,
bot_token,
chat_ids=None,
authz=None,
bot_username=None,
tags=None,
notify_events=None,
showBlameList=True,
useRevisions=False,
useWebhook=False,
certificate=None,
pollTimeout=120,
retryDelay=30,
):
super().checkConfig(self.name)
if authz is not None:
for acl in authz.values():
if not isinstance(acl, (list, tuple, bool)):
config.error("authz values must be bool or a list of user ids")
if isinstance(certificate, io.TextIOBase):
config.error("certificate file must be open in binary mode")
@defer.inlineCallbacks
def reconfigService(
self,
bot_token,
chat_ids=None,
authz=None,
bot_username=None,
tags=None,
notify_events=None,
showBlameList=True,
useRevisions=False,
useWebhook=False,
certificate=None,
pollTimeout=120,
retryDelay=30,
):
# need to stash these so we can detect changes later
self.bot_token = bot_token
if chat_ids is None:
chat_ids = []
self.chat_ids = chat_ids
self.authz = authz
self.useRevisions = useRevisions
self.tags = tags
if notify_events is None:
notify_events = set()
self.notify_events = notify_events
self.useWebhook = useWebhook
self.certificate = certificate
self.pollTimeout = pollTimeout
self.retryDelay = retryDelay
# This function is only called in case of reconfig with changes
# We don't try to be smart here. Just restart the bot if config has
# changed.
http = yield self._get_http(bot_token)
if self.bot is not None:
self.removeService(self.bot)
if not useWebhook:
self.bot = TelegramPollingBot(
bot_token,
http,
chat_ids,
authz,
tags=tags,
notify_events=notify_events,
useRevisions=useRevisions,
showBlameList=showBlameList,
poll_timeout=self.pollTimeout,
retry_delay=self.retryDelay,
)
else:
self.bot = TelegramWebhookBot(
bot_token,
http,
chat_ids,
authz,
tags=tags,
notify_events=notify_events,
useRevisions=useRevisions,
showBlameList=showBlameList,
retry_delay=self.retryDelay,
certificate=self.certificate,
)
if bot_username is not None:
self.bot.nickname = bot_username
else:
yield self.bot.set_nickname()
if self.bot.nickname is None:
raise RuntimeError("No bot username specified and I cannot get it from Telegram")
yield self.bot.setServiceParent(self)
| 40,035 | Python | .py | 969 | 29.050568 | 112 | 0.542491 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,838 | pushjet.py | buildbot_buildbot/master/buildbot/reporters/pushjet.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.python import log as twlog
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.reporters.base import ReporterBase
from buildbot.reporters.generators.build import BuildStatusGenerator
from buildbot.reporters.message import MessageFormatter
from buildbot.util import httpclientservice
from .utils import merge_reports_prop
from .utils import merge_reports_prop_take_first
ENCODING = 'utf8'
LEVELS = {
CANCELLED: 'cancelled',
EXCEPTION: 'exception',
FAILURE: 'failing',
SUCCESS: 'passing',
WARNINGS: 'warnings',
}
DEFAULT_MSG_TEMPLATE = (
'The Buildbot has detected a <a href="{{ build_url }}">{{ status_detected }}</a>'
+ 'of <i>{{ buildername }}</i> while building {{ projects }} on {{ workername }}.'
)
class PushjetNotifier(ReporterBase):
def checkConfig(self, secret, levels=None, base_url='https://api.pushjet.io', generators=None):
if generators is None:
generators = self._create_default_generators()
super().checkConfig(generators=generators)
@defer.inlineCallbacks
def reconfigService(
self, secret, levels=None, base_url='https://api.pushjet.io', generators=None
):
secret = yield self.renderSecrets(secret)
if generators is None:
generators = self._create_default_generators()
yield super().reconfigService(generators=generators)
self.secret = secret
if levels is None:
self.levels = {}
else:
self.levels = levels
self._http = yield httpclientservice.HTTPSession(self.master.httpservice, base_url)
def _create_default_generators(self):
formatter = MessageFormatter(template_type='html', template=DEFAULT_MSG_TEMPLATE)
return [BuildStatusGenerator(message_formatter=formatter)]
def sendMessage(self, reports):
body = merge_reports_prop(reports, 'body')
subject = merge_reports_prop_take_first(reports, 'subject')
results = merge_reports_prop(reports, 'results')
worker = merge_reports_prop_take_first(reports, 'worker')
msg = {'message': body, 'title': subject}
level = self.levels.get(LEVELS[results] if worker is None else 'worker_missing')
if level is not None:
msg['level'] = level
return self.sendNotification(msg)
def sendNotification(self, params):
twlog.msg("sending pushjet notification")
params.update({"secret": self.secret})
return self._http.post('/message', data=params)
| 3,457 | Python | .py | 75 | 40.84 | 99 | 0.727029 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,839 | gerrit_verify_status.py | buildbot_buildbot/master/buildbot/reporters/gerrit_verify_status.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Member
from __future__ import annotations
from twisted.internet import defer
from twisted.logger import Logger
from twisted.python import failure
from buildbot.process.properties import Interpolate
from buildbot.process.properties import Properties
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SKIPPED
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.reporters.base import ReporterBase
from buildbot.reporters.generators.build import BuildStartEndStatusGenerator
from buildbot.reporters.message import MessageFormatterRenderable
from buildbot.util import httpclientservice
log = Logger()
class GerritVerifyStatusPush(ReporterBase):
name: str | None = "GerritVerifyStatusPush" # type: ignore[assignment]
# overridable constants
RESULTS_TABLE = {
SUCCESS: 1,
WARNINGS: 1,
FAILURE: -1,
SKIPPED: 0,
EXCEPTION: 0,
RETRY: 0,
CANCELLED: 0,
}
DEFAULT_RESULT = -1
def checkConfig(
self,
baseURL,
auth,
verification_name=None,
abstain=False,
category=None,
reporter=None,
verbose=False,
debug=None,
verify=None,
generators=None,
**kwargs,
):
if generators is None:
generators = self._create_default_generators()
super().checkConfig(generators=generators, **kwargs)
@defer.inlineCallbacks
def reconfigService(
self,
baseURL,
auth,
verification_name=None,
abstain=False,
category=None,
reporter=None,
verbose=False,
debug=None,
verify=None,
generators=None,
**kwargs,
):
auth = yield self.renderSecrets(auth)
self.debug = debug
self.verify = verify
self.verbose = verbose
if generators is None:
generators = self._create_default_generators()
yield super().reconfigService(generators=generators, **kwargs)
if baseURL.endswith('/'):
baseURL = baseURL[:-1]
self._http = yield httpclientservice.HTTPSession(
self.master.httpservice, baseURL, auth=auth, debug=self.debug, verify=self.verify
)
self._verification_name = verification_name or Interpolate('%(prop:buildername)s')
self._reporter = reporter or "buildbot"
self._abstain = abstain
self._category = category
self._verbose = verbose
def _create_default_generators(self):
start_formatter = MessageFormatterRenderable('Build started.')
end_formatter = MessageFormatterRenderable('Build done.')
return [
BuildStartEndStatusGenerator(
start_formatter=start_formatter, end_formatter=end_formatter
)
]
def createStatus(
self,
change_id,
revision_id,
name,
value,
abstain=None,
rerun=None,
comment=None,
url=None,
reporter=None,
category=None,
duration=None,
):
"""
Abstract the POST REST api documented here:
https://gerrit.googlesource.com/plugins/verify-status/+/master/src/main/resources/Documentation/rest-api-changes.md
:param change_id: The change_id for the change tested (can be in the long form e.g:
myProject~master~I8473b95934b5732ac55d26311a706c9c2bde9940 or in the short
integer form).
:param revision_id: the revision_id tested can be the patchset number or
the commit id (short or long).
:param name: The name of the job.
:param value: The pass/fail result for this job: -1: fail 0: unstable, 1: succeed
:param abstain: Whether the value counts as a vote (defaults to false)
:param rerun: Whether this result is from a re-test on the same patchset
:param comment: A short comment about this job
:param url: The url link to more info about this job
:reporter: The user that verified this job
:category: A category for this job
"duration": The time it took to run this job
:return: A deferred with the result from Gerrit.
"""
payload = {'name': name, 'value': value}
if abstain is not None:
payload['abstain'] = abstain
if rerun is not None:
payload['rerun'] = rerun
if comment is not None:
payload['comment'] = comment
if url is not None:
payload['url'] = url
if reporter is not None:
payload['reporter'] = reporter
if category is not None:
payload['category'] = category
if duration is not None:
payload['duration'] = duration
if self._verbose:
log.debug(
'Sending Gerrit status for {change_id}/{revision_id}: data={data}',
change_id=change_id,
revision_id=revision_id,
data=payload,
)
return self._http.post(
'/'.join([
'/a/changes',
str(change_id),
'revisions',
str(revision_id),
'verify-status~verifications',
]),
json=payload,
)
def formatDuration(self, duration):
"""Format the duration.
This method could be overridden if really needed, as the duration format in gerrit
is an arbitrary string.
:param duration: duration in timedelta
"""
days = duration.days
hours, remainder = divmod(duration.seconds, 3600)
minutes, seconds = divmod(remainder, 60)
if days:
return f'{days} day{"s" if days > 1 else ""} {hours}h {minutes}m {seconds}s'
elif hours:
return f'{hours}h {minutes}m {seconds}s'
return f'{minutes}m {seconds}s'
@staticmethod
def getGerritChanges(props):
"""Get the gerrit changes
This method could be overridden if really needed to accommodate for other
custom steps method for fetching gerrit changes.
:param props: an IProperty
:return: (optionally via deferred) a list of dictionary with at list
change_id, and revision_id,
which format is the one accepted by the gerrit REST API as of
/changes/:change_id/revision/:revision_id paths (see gerrit doc)
"""
if 'gerrit_changes' in props:
return props.getProperty('gerrit_changes')
if 'event.change.number' in props:
return [
{
'change_id': props.getProperty('event.change.number'),
'revision_id': props.getProperty('event.patchSet.number'),
}
]
return []
@defer.inlineCallbacks
def sendMessage(self, reports):
report = reports[0]
build = reports[0]['builds'][0]
props = Properties.fromDict(build['properties'])
props.master = self.master
comment = report.get('body', None)
if build['complete']:
value = self.RESULTS_TABLE.get(build['results'], self.DEFAULT_RESULT)
duration = self.formatDuration(build['complete_at'] - build['started_at'])
else:
value = 0
duration = 'pending'
name = yield props.render(self._verification_name)
reporter = yield props.render(self._reporter)
category = yield props.render(self._category)
abstain = yield props.render(self._abstain)
# TODO: find reliable way to find out whether its a rebuild
rerun = None
changes = yield self.getGerritChanges(props)
for change in changes:
try:
yield self.createStatus(
change['change_id'],
change['revision_id'],
name,
value,
abstain=abstain,
rerun=rerun,
comment=comment,
url=build['url'],
reporter=reporter,
category=category,
duration=duration,
)
except Exception:
log.failure('Failed to send status!', failure=failure.Failure())
| 9,274 | Python | .py | 239 | 29.297071 | 123 | 0.622415 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,840 | http.py | buildbot_buildbot/master/buildbot/reporters/http.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
from twisted.internet import defer
from twisted.python import log
from buildbot.reporters.base import ReporterBase
from buildbot.reporters.generators.build import BuildStatusGenerator
from buildbot.reporters.message import MessageFormatterFunction
from buildbot.util import httpclientservice
class HttpStatusPush(ReporterBase):
name: str | None = "HttpStatusPush" # type: ignore[assignment]
secrets = ["auth"]
def checkConfig(
self, serverUrl, auth=None, headers=None, debug=None, verify=None, generators=None, **kwargs
):
if generators is None:
generators = self._create_default_generators()
super().checkConfig(generators=generators, **kwargs)
@defer.inlineCallbacks
def reconfigService(
self, serverUrl, auth=None, headers=None, debug=None, verify=None, generators=None, **kwargs
):
self.debug = debug
self.verify = verify
if generators is None:
generators = self._create_default_generators()
yield super().reconfigService(generators=generators, **kwargs)
self._http = yield httpclientservice.HTTPSession(
self.master.httpservice,
serverUrl,
auth=auth,
headers=headers,
debug=self.debug,
verify=self.verify,
)
def _create_default_generators(self):
formatter = MessageFormatterFunction(lambda context: context['build'], 'json')
return [BuildStatusGenerator(message_formatter=formatter, report_new=True)]
def is_status_2xx(self, code):
return code // 100 == 2
@defer.inlineCallbacks
def sendMessage(self, reports):
response = yield self._http.post("", json=reports[0]['body'])
if not self.is_status_2xx(response.code):
log.msg(f"{response.code}: unable to upload status: {response.content}")
| 2,620 | Python | .py | 57 | 39.947368 | 100 | 0.718321 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,841 | mail.py | buildbot_buildbot/master/buildbot/reporters/mail.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
import re
from email import charset
from email import encoders
from email.header import Header
from email.message import Message
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formatdate
from email.utils import parseaddr
from io import BytesIO
from typing import ClassVar
from typing import Sequence
from twisted.internet import defer
from twisted.internet import reactor
from twisted.python import log as twlog
from zope.interface import implementer
from buildbot import config
from buildbot import interfaces
from buildbot import util
from buildbot.process.properties import Properties
from buildbot.reporters.base import ENCODING
from buildbot.reporters.base import ReporterBase
from buildbot.reporters.generators.build import BuildStatusGenerator
from buildbot.reporters.generators.worker import WorkerMissingGenerator
from buildbot.reporters.message import MessageFormatter
from buildbot.reporters.message import MessageFormatterMissingWorker
from buildbot.util import ssl
from buildbot.util import unicode2bytes
from .utils import merge_reports_prop
from .utils import merge_reports_prop_take_first
# this incantation teaches email to output utf-8 using 7- or 8-bit encoding,
# although it has no effect before python-2.7.
# needs to match notifier.ENCODING
charset.add_charset(ENCODING, charset.SHORTEST, None, ENCODING)
ESMTPSenderFactory: None | type = None
try:
from twisted.mail.smtp import ESMTPSenderFactory
except ImportError:
pass
# Email parsing can be complex. We try to take a very liberal
# approach. The local part of an email address matches ANY non
# whitespace character. Rather allow a malformed email address than
# croaking on a valid (the matching of domains should be correct
# though; requiring the domain to not be a top level domain). With
# these regular expressions, we can match the following:
#
# [email protected]
# Full Name <[email protected]>
# <[email protected]>
_VALID_EMAIL_ADDR = r"(?:\S+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)+\.?)"
VALID_EMAIL = re.compile(rf"^(?:{_VALID_EMAIL_ADDR}|(.+\s+)?<{_VALID_EMAIL_ADDR}>\s*)$")
VALID_EMAIL_ADDR = re.compile(_VALID_EMAIL_ADDR)
@implementer(interfaces.IEmailLookup)
class Domain(util.ComparableMixin):
compare_attrs: ClassVar[Sequence[str]] = ("domain",)
def __init__(self, domain):
assert "@" not in domain
self.domain = domain
def getAddress(self, name):
"""If name is already an email address, pass it through."""
if '@' in name:
return name
return name + "@" + self.domain
@implementer(interfaces.IEmailSender)
class MailNotifier(ReporterBase):
secrets = ["smtpUser", "smtpPassword"]
def checkConfig(
self,
fromaddr,
relayhost="localhost",
lookup=None,
extraRecipients=None,
sendToInterestedUsers=True,
extraHeaders=None,
useTls=False,
useSmtps=False,
smtpUser=None,
smtpPassword=None,
smtpPort=25,
dumpMailsToLog=False,
generators=None,
):
if ESMTPSenderFactory is None:
config.error("twisted-mail is not installed - cannot send mail")
if generators is None:
generators = self._create_default_generators()
super().checkConfig(generators=generators)
if extraRecipients is None:
extraRecipients = []
if not isinstance(extraRecipients, (list, tuple)):
config.error("extraRecipients must be a list or tuple")
else:
for r in extraRecipients:
if not isinstance(r, str) or not VALID_EMAIL.search(r):
config.error(f"extra recipient {r} is not a valid email")
if lookup is not None:
if not isinstance(lookup, str):
assert interfaces.IEmailLookup.providedBy(lookup)
if extraHeaders:
if not isinstance(extraHeaders, dict):
config.error("extraHeaders must be a dictionary")
if useSmtps:
ssl.ensureHasSSL(self.__class__.__name__)
@defer.inlineCallbacks
def reconfigService(
self,
fromaddr,
relayhost="localhost",
lookup=None,
extraRecipients=None,
sendToInterestedUsers=True,
extraHeaders=None,
useTls=False,
useSmtps=False,
smtpUser=None,
smtpPassword=None,
smtpPort=25,
dumpMailsToLog=False,
generators=None,
):
if generators is None:
generators = self._create_default_generators()
yield super().reconfigService(generators=generators)
if extraRecipients is None:
extraRecipients = []
self.extraRecipients = extraRecipients
self.sendToInterestedUsers = sendToInterestedUsers
self.fromaddr = fromaddr
self.relayhost = relayhost
if lookup is not None:
if isinstance(lookup, str):
lookup = Domain(str(lookup))
self.lookup = lookup
self.extraHeaders = extraHeaders
self.useTls = useTls
self.useSmtps = useSmtps
self.smtpUser = smtpUser
self.smtpPassword = smtpPassword
self.smtpPort = smtpPort
self.dumpMailsToLog = dumpMailsToLog
def _create_default_generators(self):
return [
BuildStatusGenerator(
add_patch=True, message_formatter=MessageFormatter(template_type='html')
),
WorkerMissingGenerator(
workers='all', message_formatter=MessageFormatterMissingWorker(template_type='html')
),
]
def patch_to_attachment(self, patch, index):
# patches are specifically converted to unicode before entering the db
a = MIMEText(patch['body'].encode(ENCODING), _charset=ENCODING)
# convert to base64 to conform with RFC 5322 2.1.1
del a['Content-Transfer-Encoding']
encoders.encode_base64(a)
a.add_header('Content-Disposition', "attachment", filename="source patch " + str(index))
return a
@defer.inlineCallbacks
def createEmail(self, msgdict, title, results, builds=None, patches=None, logs=None):
text = msgdict['body']
type = msgdict['type']
subject = msgdict['subject']
assert '\n' not in subject, "Subject cannot contain newlines"
assert type in ('plain', 'html'), f"'{type}' message type must be 'plain' or 'html'."
if patches or logs:
m = MIMEMultipart()
txt = MIMEText(text, type, ENCODING)
m.attach(txt)
else:
m = Message()
m.set_payload(text, ENCODING)
m.set_type(f"text/{type}")
m['Date'] = formatdate(localtime=True)
m['Subject'] = subject
m['From'] = self.fromaddr
# m['To'] is added later
if patches:
for i, patch in enumerate(patches):
a = self.patch_to_attachment(patch, i)
m.attach(a)
if logs:
for log in logs:
# Use distinct filenames for the e-mail summary
name = f"{log['stepname']}.{log['name']}"
if len(builds) > 1:
filename = f"{log['buildername']}.{name}"
else:
filename = name
text = log['content']['content']
a = MIMEText(text.encode(ENCODING), _charset=ENCODING)
# convert to base64 to conform with RFC 5322 2.1.1
del a['Content-Transfer-Encoding']
encoders.encode_base64(a)
a.add_header('Content-Disposition', "attachment", filename=filename)
m.attach(a)
# @todo: is there a better way to do this?
# Add any extra headers that were requested, doing WithProperties
# interpolation if only one build was given
if self.extraHeaders:
extraHeaders = self.extraHeaders
if builds is not None and len(builds) == 1:
props = Properties.fromDict(builds[0]['properties'])
props.master = self.master
extraHeaders = yield props.render(extraHeaders)
for k, v in extraHeaders.items():
if k in m:
twlog.msg(
"Warning: Got header " + k + " in self.extraHeaders "
"but it already exists in the Message - "
"not adding it."
)
m[k] = v
return m
@defer.inlineCallbacks
def sendMessage(self, reports):
body = merge_reports_prop(reports, 'body')
subject = merge_reports_prop_take_first(reports, 'subject')
type = merge_reports_prop_take_first(reports, 'type')
results = merge_reports_prop(reports, 'results')
builds = merge_reports_prop(reports, 'builds')
users = merge_reports_prop(reports, 'users')
patches = merge_reports_prop(reports, 'patches')
logs = merge_reports_prop(reports, 'logs')
worker = merge_reports_prop_take_first(reports, 'worker')
body = unicode2bytes(body)
msgdict = {'body': body, 'subject': subject, 'type': type}
# ensure message body ends with double carriage return
if not body.endswith(b"\n\n"):
msgdict['body'] = body + b'\n\n'
m = yield self.createEmail(
msgdict, self.master.config.title, results, builds, patches, logs
)
# now, who is this message going to?
if worker is None:
recipients = yield self.findInterrestedUsersEmails(list(users))
all_recipients = self.processRecipients(recipients, m)
else:
all_recipients = list(users)
yield self.sendMail(m, all_recipients)
@defer.inlineCallbacks
def findInterrestedUsersEmails(self, users):
recipients = set()
if self.sendToInterestedUsers:
if self.lookup:
dl = []
for u in users:
dl.append(defer.maybeDeferred(self.lookup.getAddress, u))
users = yield defer.gatherResults(dl)
for r in users:
if r is None: # getAddress didn't like this address
continue
# Git can give emails like 'User' <[email protected]>@foo.com so check
# for two @ and chop the last
if r.count('@') > 1:
r = r[: r.rindex('@')]
if VALID_EMAIL.search(r):
recipients.add(r)
else:
twlog.msg(f"INVALID EMAIL: {r}")
return recipients
def formatAddress(self, addr):
r = parseaddr(addr)
if not r[0]:
return r[1]
return f"\"{Header(r[0], 'utf-8').encode()}\" <{r[1]}>"
def processRecipients(self, blamelist, m):
to_recipients = set(blamelist)
cc_recipients = set()
# If we're sending to interested users put the extras in the
# CC list so they can tell if they are also interested in the
# change:
if self.sendToInterestedUsers and to_recipients:
cc_recipients.update(self.extraRecipients)
else:
to_recipients.update(self.extraRecipients)
m['To'] = ", ".join([self.formatAddress(addr) for addr in sorted(to_recipients)])
if cc_recipients:
m['CC'] = ", ".join([self.formatAddress(addr) for addr in sorted(cc_recipients)])
return list(to_recipients | cc_recipients)
def sendMail(self, m, recipients):
s = m.as_string()
twlog.msg(f"sending mail ({len(s)} bytes) to", recipients)
if self.dumpMailsToLog: # pragma: no cover
twlog.msg(f"mail data:\n{s}")
result = defer.Deferred()
useAuth = self.smtpUser and self.smtpPassword
s = unicode2bytes(s)
recipients = [parseaddr(r)[1] for r in recipients]
hostname = self.relayhost if self.useTls or useAuth else None
sender_factory = ESMTPSenderFactory(
unicode2bytes(self.smtpUser),
unicode2bytes(self.smtpPassword),
parseaddr(self.fromaddr)[1],
recipients,
BytesIO(s),
result,
requireTransportSecurity=self.useTls,
requireAuthentication=useAuth,
hostname=hostname,
)
if self.useSmtps:
reactor.connectSSL(
self.relayhost, self.smtpPort, sender_factory, ssl.ClientContextFactory()
)
else:
reactor.connectTCP(self.relayhost, self.smtpPort, sender_factory)
return result
| 13,569 | Python | .py | 322 | 32.950311 | 100 | 0.635709 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,842 | bitbucket.py | buildbot_buildbot/master/buildbot/reporters/bitbucket.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
import hashlib
from urllib.parse import urlparse
from twisted.internet import defer
from twisted.python import log
from buildbot import config
from buildbot.process.properties import Properties
from buildbot.process.properties import Property
from buildbot.process.results import SUCCESS
from buildbot.reporters.base import ReporterBase
from buildbot.reporters.generators.build import BuildStartEndStatusGenerator
from buildbot.reporters.message import MessageFormatter
from buildbot.util import httpclientservice
# Magic words understood by Butbucket REST API
BITBUCKET_INPROGRESS = 'INPROGRESS'
BITBUCKET_SUCCESSFUL = 'SUCCESSFUL'
BITBUCKET_FAILED = 'FAILED'
_BASE_URL = 'https://api.bitbucket.org/2.0/repositories'
_OAUTH_URL = 'https://bitbucket.org/site/oauth2/access_token'
_GET_TOKEN_DATA = {'grant_type': 'client_credentials'}
class BitbucketStatusPush(ReporterBase):
name: str | None = "BitbucketStatusPush" # type: ignore[assignment]
def checkConfig(
self,
oauth_key=None,
oauth_secret=None,
auth=None,
base_url=_BASE_URL,
oauth_url=_OAUTH_URL,
debug=None,
verify=None,
status_key=None,
status_name=None,
generators=None,
**kwargs,
):
if auth is not None and (oauth_key is not None or oauth_secret is not None):
config.error('Either App Passwords or OAuth can be specified, not both')
if generators is None:
generators = self._create_default_generators()
super().checkConfig(generators=generators, **kwargs)
@defer.inlineCallbacks
def reconfigService(
self,
oauth_key=None,
oauth_secret=None,
auth=None,
base_url=_BASE_URL,
oauth_url=_OAUTH_URL,
debug=None,
verify=None,
status_key=None,
status_name=None,
generators=None,
**kwargs,
):
oauth_key, oauth_secret = yield self.renderSecrets(oauth_key, oauth_secret)
self.auth = yield self.renderSecrets(auth)
self.base_url = base_url
self.debug = debug
self.verify = verify
self.status_key = status_key or Property('buildername')
self.status_name = status_name or Property('buildername')
if generators is None:
generators = self._create_default_generators()
yield super().reconfigService(generators=generators, **kwargs)
base_url = base_url.rstrip('/')
self._http = yield httpclientservice.HTTPSession(
self.master.httpservice, base_url, debug=self.debug, verify=self.verify, auth=self.auth
)
self.oauthhttp = None
if self.auth is None:
self.oauthhttp = yield httpclientservice.HTTPSession(
self.master.httpservice,
oauth_url,
auth=(oauth_key, oauth_secret),
debug=self.debug,
verify=self.verify,
)
def _create_default_generators(self):
return [
BuildStartEndStatusGenerator(
start_formatter=MessageFormatter(subject="", template=''),
end_formatter=MessageFormatter(subject="", template=''),
)
]
@defer.inlineCallbacks
def sendMessage(self, reports):
# Only use OAuth if basic auth has not been specified
if not self.auth:
request = yield self.oauthhttp.post("", data=_GET_TOKEN_DATA)
if request.code != 200:
content = yield request.content()
log.msg(f"{request.code}: unable to authenticate to Bitbucket {content}")
return
token = (yield request.json())['access_token']
self._http.update_headers({'Authorization': f'Bearer {token}'})
build = reports[0]['builds'][0]
if build['complete']:
status = BITBUCKET_SUCCESSFUL if build['results'] == SUCCESS else BITBUCKET_FAILED
else:
status = BITBUCKET_INPROGRESS
props = Properties.fromDict(build['properties'])
props.master = self.master
def key_hash(key):
sha_obj = hashlib.sha1()
sha_obj.update(key.encode('utf-8'))
return sha_obj.hexdigest()
status_key = yield props.render(self.status_key)
body = {
'state': status,
'key': key_hash(status_key),
'name': (yield props.render(self.status_name)),
'description': reports[0]['subject'],
'url': build['url'],
}
for sourcestamp in build['buildset']['sourcestamps']:
if not sourcestamp['repository']:
log.msg(f"Empty repository URL for Bitbucket status {body}")
continue
owner, repo = self.get_owner_and_repo(sourcestamp['repository'])
endpoint = (owner, repo, 'commit', sourcestamp['revision'], 'statuses', 'build')
bitbucket_uri = f"/{'/'.join(endpoint)}"
if self.debug:
log.msg(f"Bitbucket status {bitbucket_uri} {body}")
response = yield self._http.post(bitbucket_uri, json=body)
if response.code not in (200, 201):
content = yield response.content()
log.msg(f"{response.code}: unable to upload Bitbucket status {content}")
def get_owner_and_repo(self, repourl):
"""
Takes a git repository URL from Bitbucket and tries to determine the owner and repository
name
:param repourl: Bitbucket git repo in the form of
[email protected]:OWNER/REPONAME.git
https://bitbucket.org/OWNER/REPONAME.git
ssh://[email protected]/OWNER/REPONAME.git
https://api.bitbucket.org/2.0/repositories/OWNER/REPONAME
:return: owner, repo: The owner of the repository and the repository name
"""
parsed = urlparse(repourl)
base_parsed = urlparse(self.base_url)
if parsed.path.startswith(base_parsed.path):
path = parsed.path.replace(base_parsed.path, "")
elif parsed.scheme:
path = parsed.path
else:
# we assume git@host:owner/repo.git here
path = parsed.path.split(':', 1)[-1]
path = path.lstrip('/')
if path.endswith('.git'):
path = path[:-4]
path = path.rstrip('/')
parts = path.split('/')
assert len(parts) == 2, 'OWNER/REPONAME is expected'
return parts
| 7,302 | Python | .py | 170 | 33.847059 | 99 | 0.637363 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,843 | buildset.py | buildbot_buildbot/master/buildbot/reporters/generators/buildset.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from typing import ClassVar
from typing import Sequence
from twisted.internet import defer
from zope.interface import implementer
from buildbot import interfaces
from buildbot.process.results import statusToString
from buildbot.reporters import utils
from buildbot.reporters.message import MessageFormatter
from .utils import BuildStatusGeneratorMixin
@implementer(interfaces.IReportGenerator)
class BuildSetStatusGenerator(BuildStatusGeneratorMixin):
wanted_event_keys = [
('buildsets', None, 'complete'),
]
compare_attrs: ClassVar[Sequence[str]] = ['formatter']
def __init__(
self,
mode=("failing", "passing", "warnings"),
tags=None,
builders=None,
schedulers=None,
branches=None,
subject=None,
add_logs=None,
add_patch=False,
message_formatter=None,
):
subject = "Buildbot %(result)s in %(title)s on %(builder)s"
super().__init__(mode, tags, builders, schedulers, branches, subject, add_logs, add_patch)
self.formatter = message_formatter
if self.formatter is None:
self.formatter = MessageFormatter()
@defer.inlineCallbacks
def generate(self, master, reporter, key, message):
bsid = message['bsid']
res = yield utils.getDetailsForBuildset(
master,
bsid,
want_properties=self.formatter.want_properties,
want_steps=self.formatter.want_steps,
want_previous_build=self._want_previous_build(),
want_logs=self.formatter.want_logs,
want_logs_content=self.formatter.want_logs_content,
)
builds = res['builds']
buildset = res['buildset']
# only include builds for which isMessageNeeded returns true
builds = [
build
for build in builds
if self.is_message_needed_by_props(build) and self.is_message_needed_by_results(build)
]
if not builds:
return None
report = yield self.buildset_message(self.formatter, master, reporter, builds, buildset)
return report
@defer.inlineCallbacks
def buildset_message(self, formatter, master, reporter, builds, buildset):
# The given builds must refer to builds from a single buildset
patches = []
logs = []
body = None
subject = None
msgtype = None
extra_info = None
users = set()
results = buildset["results"]
for build in builds:
patches.extend(self._get_patches_for_build(build))
build_logs = yield self._get_logs_for_build(build)
logs.extend(build_logs)
blamelist = yield reporter.getResponsibleUsersForBuild(master, build['buildid'])
users.update(set(blamelist))
buildmsg = yield formatter.format_message_for_build(
master, build, is_buildset=True, mode=self.mode, users=blamelist
)
msgtype, ok = self._merge_msgtype(msgtype, buildmsg['type'])
if not ok:
continue
subject = self._merge_subject(subject, buildmsg['subject'])
body, ok = self._merge_body(body, buildmsg['body'])
if not ok:
continue
extra_info, ok = self._merge_extra_info(extra_info, buildmsg["extra_info"])
if not ok:
continue
if subject is None and self.subject is not None:
subject = self.subject % {
'result': statusToString(results),
'projectName': master.config.title,
'title': master.config.title,
'builder': 'whole buildset',
}
return {
'body': body,
'subject': subject,
'type': msgtype,
'results': results,
'builds': builds,
"buildset": buildset,
'users': list(users),
'patches': patches,
'logs': logs,
"extra_info": extra_info,
}
def _want_previous_build(self):
return "change" in self.mode or "problem" in self.mode
@implementer(interfaces.IReportGenerator)
class BuildSetCombinedStatusGenerator:
wanted_event_keys = [
("buildsets", None, "complete"),
]
compare_attrs: ClassVar[Sequence[str]] = ["formatter"]
def __init__(self, message_formatter):
self.formatter = message_formatter
@defer.inlineCallbacks
def generate(self, master, reporter, key, message):
bsid = message["bsid"]
res = yield utils.getDetailsForBuildset(
master,
bsid,
want_properties=self.formatter.want_properties,
want_steps=self.formatter.want_steps,
want_logs=self.formatter.want_logs,
want_logs_content=self.formatter.want_logs_content,
)
builds = res['builds']
buildset = res['buildset']
report = yield self.buildset_message(self.formatter, master, reporter, buildset, builds)
return report
def check(self):
pass
@defer.inlineCallbacks
def buildset_message(self, formatter, master, reporter, buildset, builds):
buildmsg = yield formatter.format_message_for_buildset(
master, buildset, builds, is_buildset=True, mode=("passing",), users=[]
)
return {
"body": buildmsg["body"],
"subject": buildmsg["subject"],
"type": buildmsg["type"],
"extra_info": buildmsg["extra_info"],
"results": buildset["results"],
"builds": builds,
"buildset": buildset,
"users": [],
"patches": [],
"logs": [],
}
| 6,502 | Python | .py | 163 | 30.730061 | 98 | 0.624901 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,844 | worker.py | buildbot_buildbot/master/buildbot/reporters/generators/worker.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from typing import ClassVar
from typing import Sequence
from twisted.internet import defer
from zope.interface import implementer
from buildbot import config
from buildbot import interfaces
from buildbot import util
from buildbot.reporters.message import MessageFormatterMissingWorker
ENCODING = 'utf-8'
@implementer(interfaces.IReportGenerator)
class WorkerMissingGenerator(util.ComparableMixin):
compare_attrs: ClassVar[Sequence[str]] = ['workers', 'formatter']
wanted_event_keys = [
('workers', None, 'missing'),
]
def __init__(self, workers='all', message_formatter=None):
self.workers = workers
self.formatter = message_formatter
if self.formatter is None:
self.formatter = MessageFormatterMissingWorker()
def check(self):
if not (self.workers == 'all' or isinstance(self.workers, (list, tuple, set))):
config.error("workers must be 'all', or list of worker names")
@defer.inlineCallbacks
def generate(self, master, reporter, key, worker):
if not self._is_message_needed(worker):
return None
msg = yield self.formatter.formatMessageForMissingWorker(master, worker)
body = msg['body'].encode(ENCODING)
subject = msg['subject']
if subject is None:
subject = f"Buildbot worker {worker['name']} missing"
assert msg['type'] in (
'plain',
'html',
), f"'{msg['type']}' message type must be 'plain' or 'html'."
return {
'body': body,
'subject': subject,
'type': msg['type'],
'results': None,
'builds': None,
"buildset": None,
'users': worker['notify'],
'patches': None,
'logs': None,
'worker': worker['name'],
}
def generate_name(self):
name = self.__class__.__name__
if self.workers is not None:
name += "_workers_" + "+".join(self.workers)
return name
def _is_message_needed(self, worker):
return (self.workers == 'all' or worker['name'] in self.workers) and worker['notify']
| 2,878 | Python | .py | 69 | 34.942029 | 93 | 0.663327 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,845 | buildrequest.py | buildbot_buildbot/master/buildbot/reporters/generators/buildrequest.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from typing import ClassVar
from typing import Sequence
from twisted.internet import defer
from zope.interface import implementer
from buildbot import interfaces
from buildbot.process.build import Build
from buildbot.process.buildrequest import BuildRequest
from buildbot.process.properties import Properties
from buildbot.process.results import CANCELLED
from buildbot.reporters import utils
from buildbot.reporters.message import MessageFormatterRenderable
from .utils import BuildStatusGeneratorMixin
@implementer(interfaces.IReportGenerator)
class BuildRequestGenerator(BuildStatusGeneratorMixin):
wanted_event_keys = [('buildrequests', None, 'new'), ('buildrequests', None, 'cancel')]
compare_attrs: ClassVar[Sequence[str]] = ['formatter']
def __init__(
self,
tags=None,
builders=None,
schedulers=None,
branches=None,
add_patch=False,
formatter=None,
):
super().__init__('all', tags, builders, schedulers, branches, None, None, add_patch)
self.formatter = formatter
if self.formatter is None:
self.formatter = MessageFormatterRenderable('Build pending.')
@defer.inlineCallbacks
def partial_build_dict(self, master, buildrequest):
brdict = yield master.db.buildrequests.getBuildRequest(buildrequest['buildrequestid'])
bdict = {}
props = Properties()
buildrequest = yield BuildRequest.fromBrdict(master, brdict)
builder = yield master.botmaster.getBuilderById(brdict.builderid)
yield Build.setup_properties_known_before_build_starts(props, [buildrequest], builder)
Build.setupBuildProperties(props, [buildrequest])
bdict['properties'] = props.asDict()
yield utils.get_details_for_buildrequest(master, brdict, bdict)
return bdict
@defer.inlineCallbacks
def generate(self, master, reporter, key, buildrequest):
build = yield self.partial_build_dict(master, buildrequest)
_, _, event = key
if event == 'cancel':
build['complete'] = True
build['results'] = CANCELLED
if not self.is_message_needed_by_props(build):
return None
report = yield self.buildrequest_message(master, build)
return report
@defer.inlineCallbacks
def buildrequest_message(self, master, build):
patches = self._get_patches_for_build(build)
users = []
buildmsg = yield self.formatter.format_message_for_build(
master, build, is_buildset=True, mode=self.mode, users=users
)
return {
'body': buildmsg['body'],
'subject': buildmsg['subject'],
'type': buildmsg['type'],
'results': build['results'],
'builds': [build],
"buildset": build["buildset"],
'users': list(users),
'patches': patches,
'logs': [],
"extra_info": buildmsg["extra_info"],
}
| 3,706 | Python | .py | 85 | 36.8 | 94 | 0.694784 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,846 | utils.py | buildbot_buildbot/master/buildbot/reporters/generators/utils.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from typing import ClassVar
from typing import Sequence
from twisted.internet import defer
from twisted.python import log
from buildbot import config
from buildbot import util
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.process.results import statusToString
from buildbot.warnings import warn_deprecated
class BuildStatusGeneratorMixin(util.ComparableMixin):
possible_modes = (
"change",
"failing",
"passing",
"problem",
"warnings",
"exception",
"cancelled",
)
compare_attrs: ClassVar[Sequence[str]] = [
'mode',
'tags',
'builders',
'schedulers',
'branches',
'subject',
'add_logs',
'add_patch',
]
def __init__(self, mode, tags, builders, schedulers, branches, subject, add_logs, add_patch):
self.mode = self._compute_shortcut_modes(mode)
if add_logs is not None:
warn_deprecated(
'4.1.0',
(
f'{self.__class__.__name__} argument add_logs have been deprecated. '
'Please use want_logs_content of the passed message formatter.'
),
)
self.tags = tags
self.builders = builders
self.schedulers = schedulers
self.branches = branches
self.subject = subject
self.add_logs = add_logs
self.add_patch = add_patch
def check(self):
self._verify_build_generator_mode(self.mode)
if self.subject is not None and '\n' in self.subject:
config.error('Newlines are not allowed in message subjects')
list_or_none_params = [
('tags', self.tags),
('builders', self.builders),
('schedulers', self.schedulers),
('branches', self.branches),
]
for name, param in list_or_none_params:
self._verify_list_or_none_param(name, param)
# you should either limit on builders or tags, not both
if self.builders is not None and self.tags is not None:
config.error("Please specify only builders or tags to include - not both.")
def generate_name(self):
name = self.__class__.__name__
if self.tags is not None:
name += "_tags_" + "+".join(self.tags)
if self.builders is not None:
name += "_builders_" + "+".join(self.builders)
if self.schedulers is not None:
name += "_schedulers_" + "+".join(self.schedulers)
if self.branches is not None:
name += "_branches_" + "+".join(self.branches)
name += "_".join(self.mode)
return name
def is_message_needed_by_props(self, build):
builder = build['builder']
scheduler = build['properties'].get('scheduler', [None])[0]
branch = build['properties'].get('branch', [None])[0]
if self.builders is not None and builder['name'] not in self.builders:
return False
if self.schedulers is not None and scheduler not in self.schedulers:
return False
if self.branches is not None and branch not in self.branches:
return False
if self.tags is not None and not self._matches_any_tag(builder['tags']):
return False
return True
def is_message_needed_by_results(self, build):
results = build['results']
if "change" in self.mode:
prev = build['prev_build']
if prev and prev['results'] != results:
return True
if "failing" in self.mode and results == FAILURE:
return True
if "passing" in self.mode and results == SUCCESS:
return True
if "problem" in self.mode and results == FAILURE:
prev = build['prev_build']
if prev and prev['results'] != FAILURE:
return True
if "warnings" in self.mode and results == WARNINGS:
return True
if "exception" in self.mode and results == EXCEPTION:
return True
if "cancelled" in self.mode and results == CANCELLED:
return True
return False
def _merge_msgtype(self, msgtype, new_msgtype):
if new_msgtype is None:
return msgtype, False
if msgtype is None:
return new_msgtype, True
if msgtype != new_msgtype:
log.msg(
f'{self}: Incompatible message types for multiple builds '
f'({msgtype} and {new_msgtype}). Ignoring'
)
return msgtype, False
return msgtype, True
def _merge_subject(self, subject, new_subject):
if subject is None and new_subject is not None:
return new_subject
return subject
def _merge_body(self, body, new_body):
if body is None:
return new_body, True
if new_body is None:
return body, True
if isinstance(body, str) and isinstance(new_body, str):
return body + new_body, True
if isinstance(body, list) and isinstance(new_body, list):
return body + new_body, True
log.msg(
f'{self}: Incompatible message body types for multiple builds '
f'({type(body)} and {type(new_body)}). Ignoring'
)
return body, False
def _merge_extra_info(self, info, new_info):
if info is None:
return new_info, True
if new_info is None:
return info, True
for key, new_value in new_info.items():
if key not in info:
info[key] = new_value
continue
value = info[key]
for vkey, vvalue in new_value.items():
if vkey not in value:
value[vkey] = vvalue
return info, True
def _get_patches_for_build(self, build):
if not self.add_patch:
return []
ss_list = build['buildset']['sourcestamps']
return [ss['patch'] for ss in ss_list if 'patch' in ss and ss['patch'] is not None]
@defer.inlineCallbacks
def build_message(self, formatter, master, reporter, build):
patches = self._get_patches_for_build(build)
logs = self._get_logs_for_build(build)
users = yield reporter.getResponsibleUsersForBuild(master, build['buildid'])
buildmsg = yield formatter.format_message_for_build(
master, build, is_buildset=False, mode=self.mode, users=users
)
results = build['results']
subject = buildmsg['subject']
if subject is None and self.subject is not None:
subject = self.subject % {
'result': statusToString(results),
'projectName': master.config.title,
'title': master.config.title,
'builder': build['builder']['name'],
}
return {
'body': buildmsg['body'],
'subject': subject,
'type': buildmsg['type'],
'results': results,
'builds': [build],
"buildset": build["buildset"],
'users': list(users),
'patches': patches,
'logs': logs,
"extra_info": buildmsg["extra_info"],
}
def _get_logs_for_build(self, build):
if 'steps' not in build:
return []
all_logs = []
for step in build['steps']:
if 'logs' not in step:
continue
for l in step['logs']:
if 'content' in l:
all_logs.append(l)
return all_logs
def _verify_build_generator_mode(self, mode):
for m in self._compute_shortcut_modes(mode):
if m not in self.possible_modes:
if m == "all":
config.error(
"mode 'all' is not valid in an iterator and must be "
"passed in as a separate string"
)
else:
config.error(f"mode {m} is not a valid mode")
def _verify_list_or_none_param(self, name, param):
if param is not None and not isinstance(param, list):
config.error(f"{name} must be a list or None")
def _compute_shortcut_modes(self, mode):
if isinstance(mode, str):
if mode == "all":
mode = ("failing", "passing", "warnings", "exception", "cancelled")
elif mode == "warnings":
mode = ("failing", "warnings")
else:
mode = (mode,)
return mode
def _matches_any_tag(self, tags):
return self.tags and any(tag for tag in self.tags if tag in tags)
| 9,654 | Python | .py | 238 | 30.382353 | 97 | 0.586704 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,847 | build.py | buildbot_buildbot/master/buildbot/reporters/generators/build.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from typing import ClassVar
from typing import Sequence
from twisted.internet import defer
from zope.interface import implementer
from buildbot import interfaces
from buildbot.reporters import utils
from buildbot.reporters.message import MessageFormatter
from buildbot.reporters.message import MessageFormatterRenderable
from .utils import BuildStatusGeneratorMixin
@implementer(interfaces.IReportGenerator)
class BuildStatusGenerator(BuildStatusGeneratorMixin):
wanted_event_keys = [
('builds', None, 'finished'),
]
compare_attrs: ClassVar[Sequence[str]] = ['formatter']
def __init__(
self,
mode=("failing", "passing", "warnings"),
tags=None,
builders=None,
schedulers=None,
branches=None,
add_logs=None,
add_patch=False,
report_new=False,
message_formatter=None,
):
subject = "Buildbot %(result)s in %(title)s on %(builder)s"
super().__init__(mode, tags, builders, schedulers, branches, subject, add_logs, add_patch)
self.formatter = message_formatter
if self.formatter is None:
self.formatter = MessageFormatter()
if report_new:
self.wanted_event_keys = [
('builds', None, 'finished'),
('builds', None, 'new'),
]
@defer.inlineCallbacks
def generate(self, master, reporter, key, build):
_, _, event = key
is_new = event == 'new'
want_previous_build = False if is_new else self._want_previous_build()
yield utils.getDetailsForBuild(
master,
build,
want_properties=self.formatter.want_properties,
want_steps=self.formatter.want_steps,
want_previous_build=want_previous_build,
want_logs=self.formatter.want_logs,
add_logs=self.add_logs,
want_logs_content=self.formatter.want_logs_content,
)
if not self.is_message_needed_by_props(build):
return None
if not is_new and not self.is_message_needed_by_results(build):
return None
report = yield self.build_message(self.formatter, master, reporter, build)
return report
def _want_previous_build(self):
return "change" in self.mode or "problem" in self.mode
@implementer(interfaces.IReportGenerator)
class BuildStartEndStatusGenerator(BuildStatusGeneratorMixin):
wanted_event_keys = [
('builds', None, 'new'),
('builds', None, 'finished'),
]
compare_attrs: ClassVar[Sequence[str]] = ['start_formatter', 'end_formatter']
def __init__(
self,
tags=None,
builders=None,
schedulers=None,
branches=None,
add_logs=None,
add_patch=False,
start_formatter=None,
end_formatter=None,
):
super().__init__('all', tags, builders, schedulers, branches, None, add_logs, add_patch)
self.start_formatter = start_formatter
if self.start_formatter is None:
self.start_formatter = MessageFormatterRenderable('Build started.')
self.end_formatter = end_formatter
if self.end_formatter is None:
self.end_formatter = MessageFormatterRenderable('Build done.')
@defer.inlineCallbacks
def generate(self, master, reporter, key, build):
_, _, event = key
is_new = event == 'new'
formatter = self.start_formatter if is_new else self.end_formatter
yield utils.getDetailsForBuild(
master,
build,
want_properties=formatter.want_properties,
want_steps=formatter.want_steps,
want_logs=formatter.want_logs,
add_logs=self.add_logs,
want_logs_content=formatter.want_logs_content,
)
if not self.is_message_needed_by_props(build):
return None
report = yield self.build_message(formatter, master, reporter, build)
return report
| 4,724 | Python | .py | 117 | 32.615385 | 98 | 0.663613 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,848 | servicechecks.py | buildbot_buildbot/master/buildbot/monkeypatches/servicechecks.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
def patch():
"""
Patch startService and stopService so that they check the previous state
first.
(used for debugging only)
"""
from twisted.application.service import Service
old_startService = Service.startService
old_stopService = Service.stopService
def startService(self):
assert not self.running, f"{self!r} already running"
return old_startService(self)
def stopService(self):
assert self.running, f"{self!r} already stopped"
return old_stopService(self)
Service.startService = startService
Service.stopService = stopService
| 1,323 | Python | .py | 31 | 38.870968 | 79 | 0.755642 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,849 | __init__.py | buildbot_buildbot/master/buildbot/monkeypatches/__init__.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import os
import unittest
from twisted.python import util
def onlyOnce(fn):
"Set up FN to only run once within an interpreter instance"
def wrap(*args, **kwargs):
if hasattr(fn, 'called'):
return None
fn.called = 1
return fn(*args, **kwargs)
util.mergeFunctionMetadata(fn, wrap)
return wrap
# NOTE: all of these patches test for applicability *before* importing the
# patch module. This will help cut down on unnecessary imports where the
# patches are not needed, and also avoid problems with patches importing
# private things in external libraries that no longer exist.
@onlyOnce
def patch_testcase_timeout():
# any test that should take more than 5 second should be annotated so.
unittest.TestCase.timeout = 5
# but we know that the DB tests are very slow, so we increase a bit that value for
# real database tests
if os.environ.get("BUILDBOT_TEST_DB_URL", None) is not None:
unittest.TestCase.timeout = 120
@onlyOnce
def patch_servicechecks():
from buildbot.monkeypatches import servicechecks
servicechecks.patch()
@onlyOnce
def patch_decorators():
from buildbot.monkeypatches import decorators
decorators.patch()
@onlyOnce
def patch_config_for_unit_tests():
from buildbot.config.master import set_is_in_unit_tests
# by default, buildbot.config warns about not configured buildbotNetUsageData.
# its important for users to not leak information, but unneeded and painful for tests
set_is_in_unit_tests(True)
def patch_all():
patch_servicechecks()
patch_testcase_timeout()
patch_decorators()
patch_config_for_unit_tests()
| 2,381 | Python | .py | 57 | 38.140351 | 89 | 0.758036 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,850 | decorators.py | buildbot_buildbot/master/buildbot/monkeypatches/decorators.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.python import util
def patch_noargs_decorator(decorator):
def new_decorator(func):
wrapper = decorator(func)
wrapper.__wrapped__ = func
return wrapper
util.mergeFunctionMetadata(decorator, new_decorator)
return new_decorator
def patch():
defer.inlineCallbacks = patch_noargs_decorator(defer.inlineCallbacks)
| 1,108 | Python | .py | 25 | 41.44 | 79 | 0.776952 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,851 | python.py | buildbot_buildbot/master/buildbot/steps/python.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import re
from twisted.internet import defer
from buildbot import config
from buildbot.process import buildstep
from buildbot.process import logobserver
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.process.results import statusToString
class BuildEPYDoc(buildstep.ShellMixin, buildstep.BuildStep):
name = "epydoc"
command = ["make", "epydocs"]
description = "building epydocs"
descriptionDone = "epydoc"
def __init__(self, **kwargs):
kwargs = self.setupShellMixin(kwargs)
super().__init__(**kwargs)
self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self._log_consumer))
def _log_consumer(self):
self.import_errors = 0
self.warnings = 0
self.errors = 0
while True:
_, line = yield
if line.startswith("Error importing "):
self.import_errors += 1
if line.find("Warning: ") != -1:
self.warnings += 1
if line.find("Error: ") != -1:
self.errors += 1
def getResultSummary(self):
summary = ' '.join(self.descriptionDone)
if self.import_errors:
summary += f" ierr={self.import_errors}"
if self.warnings:
summary += f" warn={self.warnings}"
if self.errors:
summary += f" err={self.errors}"
if self.results != SUCCESS:
summary += f' ({statusToString(self.results)})'
return {'step': summary}
@defer.inlineCallbacks
def run(self):
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
stdio_log = yield self.getLog('stdio')
yield stdio_log.finish()
if cmd.didFail():
return FAILURE
if self.warnings or self.errors:
return WARNINGS
return SUCCESS
class PyFlakes(buildstep.ShellMixin, buildstep.BuildStep):
name = "pyflakes"
command = ["make", "pyflakes"]
description = "running pyflakes"
descriptionDone = "pyflakes"
flunkOnFailure = False
# any pyflakes lines like this cause FAILURE
_flunkingIssues = ("undefined",)
_MESSAGES = ("unused", "undefined", "redefs", "import*", "misc")
def __init__(self, *args, **kwargs):
# PyFlakes return 1 for both warnings and errors. We
# categorize this initially as WARNINGS so that
# evaluateCommand below can inspect the results more closely.
kwargs['decodeRC'] = {0: SUCCESS, 1: WARNINGS}
kwargs = self.setupShellMixin(kwargs)
super().__init__(*args, **kwargs)
self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self._log_consumer))
counts = self.counts = {}
summaries = self.summaries = {}
for m in self._MESSAGES:
counts[m] = 0
summaries[m] = []
# we need a separate variable for syntax errors
self._hasSyntaxError = False
def _log_consumer(self):
counts = self.counts
summaries = self.summaries
first = True
while True:
stream, line = yield
if stream == 'h':
continue
# the first few lines might contain echoed commands from a 'make
# pyflakes' step, so don't count these as warnings. Stop ignoring
# the initial lines as soon as we see one with a colon.
if first:
if ':' in line:
# there's the colon, this is the first real line
first = False
# fall through and parse the line
else:
# skip this line, keep skipping non-colon lines
continue
if line.find("imported but unused") != -1:
m = "unused"
elif line.find("*' used; unable to detect undefined names") != -1:
m = "import*"
elif line.find("undefined name") != -1:
m = "undefined"
elif line.find("redefinition of unused") != -1:
m = "redefs"
elif line.find("invalid syntax") != -1:
self._hasSyntaxError = True
# we can do this, because if a syntax error occurs
# the output will only contain the info about it, nothing else
m = "misc"
else:
m = "misc"
summaries[m].append(line)
counts[m] += 1
def getResultSummary(self):
summary = ' '.join(self.descriptionDone)
for m in self._MESSAGES:
if self.counts[m]:
summary += f" {m}={self.counts[m]}"
if self.results != SUCCESS:
summary += f' ({statusToString(self.results)})'
return {'step': summary}
@defer.inlineCallbacks
def run(self):
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
stdio_log = yield self.getLog('stdio')
yield stdio_log.finish()
# we log 'misc' as syntax-error
if self._hasSyntaxError:
yield self.addCompleteLog("syntax-error", "\n".join(self.summaries['misc']))
else:
for m in self._MESSAGES:
if self.counts[m]:
yield self.addCompleteLog(m, "\n".join(self.summaries[m]))
self.setProperty(f"pyflakes-{m}", self.counts[m], "pyflakes")
self.setProperty("pyflakes-total", sum(self.counts.values()), "pyflakes")
if cmd.didFail() or self._hasSyntaxError:
return FAILURE
for m in self._flunkingIssues:
if m in self.counts and self.counts[m] > 0:
return FAILURE
if sum(self.counts.values()) > 0:
return WARNINGS
return SUCCESS
class PyLint(buildstep.ShellMixin, buildstep.BuildStep):
"""A command that knows about pylint output.
It is a good idea to add --output-format=parseable to your
command, since it includes the filename in the message.
"""
name = "pylint"
description = "running pylint"
descriptionDone = "pylint"
# pylint's return codes (see pylint(1) for details)
# 1 - 16 will be bit-ORed
RC_OK = 0
RC_FATAL = 1
RC_ERROR = 2
RC_WARNING = 4
RC_REFACTOR = 8
RC_CONVENTION = 16
RC_USAGE = 32
# Using the default text output, the message format is :
# MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE
# with --output-format=parseable it is: (the outer brackets are literal)
# FILE_NAME:LINE_NUM: [MESSAGE_TYPE[, OBJECT]] MESSAGE
# message type consists of the type char and 4 digits
# The message types:
_MESSAGES = {
'C': "convention", # for programming standard violation
'R': "refactor", # for bad code smell
'W': "warning", # for python specific problems
'E': "error", # for much probably bugs in the code
'F': "fatal", # error prevented pylint from further processing.
'I': "info",
}
_flunkingIssues = ("F", "E") # msg categories that cause FAILURE
_msgtypes_re_str = f"(?P<errtype>[{''.join(list(_MESSAGES))}])"
_default_line_re = re.compile(rf'^{_msgtypes_re_str}(\d+)?: *\d+(, *\d+)?:.+')
_default_2_0_0_line_re = re.compile(
rf'^(?P<path>[^:]+):(?P<line>\d+):\d+: *{_msgtypes_re_str}(\d+)?:.+'
)
_parseable_line_re = re.compile(
rf'(?P<path>[^:]+):(?P<line>\d+): \[{_msgtypes_re_str}(\d+)?(\([a-z-]+\))?[,\]] .+'
)
def __init__(self, store_results=True, **kwargs):
kwargs = self.setupShellMixin(kwargs)
super().__init__(**kwargs)
self._store_results = store_results
self.counts = {}
self.summaries = {}
for m in self._MESSAGES:
self.counts[m] = 0
self.summaries[m] = []
self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self._log_consumer))
# returns (message type, path, line) tuple if line has been matched, or None otherwise
def _match_line(self, line):
m = self._default_2_0_0_line_re.match(line)
if m:
try:
line_int = int(m.group('line'))
except ValueError:
line_int = None
return (m.group('errtype'), m.group('path'), line_int)
m = self._parseable_line_re.match(line)
if m:
try:
line_int = int(m.group('line'))
except ValueError:
line_int = None
return (m.group('errtype'), m.group('path'), line_int)
m = self._default_line_re.match(line)
if m:
return (m.group('errtype'), None, None)
return None
def _log_consumer(self):
while True:
stream, line = yield
if stream == 'h':
continue
ret = self._match_line(line)
if not ret:
continue
msgtype, path, line_number = ret
assert msgtype in self._MESSAGES
self.summaries[msgtype].append(line)
self.counts[msgtype] += 1
if self._store_results and path is not None:
self.addTestResult(
self._result_setid, line, test_name=None, test_code_path=path, line=line_number
)
def getResultSummary(self):
summary = ' '.join(self.descriptionDone)
for msg, fullmsg in sorted(self._MESSAGES.items()):
if self.counts[msg]:
summary += f" {fullmsg}={self.counts[msg]}"
if self.results != SUCCESS:
summary += f' ({statusToString(self.results)})'
return {'step': summary}
@defer.inlineCallbacks
def run(self):
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
stdio_log = yield self.getLog('stdio')
yield stdio_log.finish()
for msg, fullmsg in sorted(self._MESSAGES.items()):
if self.counts[msg]:
yield self.addCompleteLog(fullmsg, "\n".join(self.summaries[msg]))
self.setProperty(f"pylint-{fullmsg}", self.counts[msg], 'Pylint')
self.setProperty("pylint-total", sum(self.counts.values()), 'Pylint')
if cmd.rc & (self.RC_FATAL | self.RC_ERROR | self.RC_USAGE):
return FAILURE
for msg in self._flunkingIssues:
if msg in self.counts and self.counts[msg] > 0:
return FAILURE
if sum(self.counts.values()) > 0:
return WARNINGS
return SUCCESS
@defer.inlineCallbacks
def addTestResultSets(self):
if not self._store_results:
return
self._result_setid = yield self.addTestResultSet('Pylint warnings', 'code_issue', 'message')
class Sphinx(buildstep.ShellMixin, buildstep.BuildStep):
"""A Step to build sphinx documentation"""
name = "sphinx"
description = "running sphinx"
descriptionDone = "sphinx"
haltOnFailure = True
def __init__(
self,
sphinx_sourcedir='.',
sphinx_builddir=None,
sphinx_builder=None,
sphinx='sphinx-build',
tags=None,
defines=None,
strict_warnings=False,
mode='incremental',
**kwargs,
):
if tags is None:
tags = []
if defines is None:
defines = {}
if sphinx_builddir is None:
# Who the heck is not interested in the built doc ?
config.error("Sphinx argument sphinx_builddir is required")
if mode not in ('incremental', 'full'):
config.error("Sphinx argument mode has to be 'incremental' or" + "'full' is required")
self.success = False
kwargs = self.setupShellMixin(kwargs)
super().__init__(**kwargs)
# build the command
command = [sphinx]
if sphinx_builder is not None:
command.extend(['-b', sphinx_builder])
for tag in tags:
command.extend(['-t', tag])
for key in sorted(defines):
if defines[key] is None:
command.extend(['-D', key])
elif isinstance(defines[key], bool):
command.extend(['-D', f'{key}={defines[key] and 1 or 0}'])
else:
command.extend(['-D', f'{key}={defines[key]}'])
if mode == 'full':
command.extend(['-E']) # Don't use a saved environment
if strict_warnings:
command.extend(['-W']) # Convert warnings to errors
command.extend([sphinx_sourcedir, sphinx_builddir])
self.command = command
self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self._log_consumer))
_msgs = ('WARNING', 'ERROR', 'SEVERE')
def _log_consumer(self):
self.warnings = []
next_is_warning = False
while True:
_, line = yield
if line.startswith('build succeeded') or line.startswith('no targets are out of date.'):
self.success = True
elif line.startswith('Warning, treated as error:'):
next_is_warning = True
else:
if next_is_warning:
self.warnings.append(line)
next_is_warning = False
else:
for msg in self._msgs:
if msg in line:
self.warnings.append(line)
def getResultSummary(self):
summary = f'{self.name} {len(self.warnings)} warnings'
if self.results != SUCCESS:
summary += f' ({statusToString(self.results)})'
return {'step': summary}
@defer.inlineCallbacks
def run(self):
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
stdio_log = yield self.getLog('stdio')
yield stdio_log.finish()
if self.warnings:
yield self.addCompleteLog('warnings', "\n".join(self.warnings))
self.setStatistic('warnings', len(self.warnings))
if self.success:
if not self.warnings:
return SUCCESS
return WARNINGS
return FAILURE
| 14,995 | Python | .py | 362 | 31.433702 | 100 | 0.586812 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,852 | python_twisted.py | buildbot_buildbot/master/buildbot/steps/python_twisted.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
BuildSteps that are specific to the Twisted source tree
"""
from __future__ import annotations
import re
from twisted.internet import defer
from twisted.internet.base import ReactorBase
from twisted.python import log
from buildbot import util
from buildbot.process import buildstep
from buildbot.process import logobserver
from buildbot.process.results import FAILURE
from buildbot.process.results import SKIPPED
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.steps import shell
class HLint(buildstep.ShellMixin, buildstep.BuildStep):
"""I run a 'lint' checker over a set of .xhtml files. Any deviations
from recommended style is flagged and put in the output log.
This step looks at .changes in the parent Build to extract a list of
Lore XHTML files to check."""
name = "hlint"
description = "running hlint"
descriptionDone = "hlint"
warnOnWarnings = True
warnOnFailure = True
# TODO: track time, but not output
warnings = 0
def __init__(self, python=None, **kwargs):
kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command'])
super().__init__(**kwargs)
self.python = python
self.warningLines = []
self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self.logConsumer))
@defer.inlineCallbacks
def run(self):
# create the command
html_files = set()
for f in self.build.allFiles():
if f.endswith(".xhtml") and not f.startswith("sandbox/"):
html_files.add(f)
# remove duplicates
hlintTargets = sorted(list(html_files))
if not hlintTargets:
return SKIPPED
self.hlintFiles = hlintTargets
command = []
if self.python:
command.append(self.python)
command += ["bin/lore", "-p", "--output", "lint", *self.hlintFiles]
cmd = yield self.makeRemoteShellCommand(command=command)
yield self.runCommand(cmd)
stdio_log = yield self.getLog('stdio')
yield stdio_log.finish()
yield self.addCompleteLog('warnings', '\n'.join(self.warningLines))
yield self.addCompleteLog("files", "\n".join(self.hlintFiles) + "\n")
# warnings are in stdout, rc is always 0, unless the tools break
if cmd.didFail():
return FAILURE
self.descriptionDone = f"{self.warnings} hlin{self.warnings == 1 and 't' or 'ts'}"
if self.warnings:
return WARNINGS
return SUCCESS
def logConsumer(self):
while True:
_, line = yield
if ':' in line:
self.warnings += 1
self.warningLines.append(line)
class TrialTestCaseCounter(logobserver.LogLineObserver):
_line_re = re.compile(r'^(?:Doctest: )?([\w\.]+) \.\.\. \[([^\]]+)\]$')
def __init__(self):
super().__init__()
self.numTests = 0
self.finished = False
self.counts = {
'total': None,
'failures': 0,
'errors': 0,
'skips': 0,
'expectedFailures': 0,
'unexpectedSuccesses': 0,
}
def outLineReceived(self, line):
# different versions of Twisted emit different per-test lines with
# the bwverbose reporter.
# 2.0.0: testSlave (buildbot.test.test_runner.Create) ... [OK]
# 2.1.0: buildbot.test.test_runner.Create.testSlave ... [OK]
# 2.4.0: buildbot.test.test_runner.Create.testSlave ... [OK]
# Let's just handle the most recent version, since it's the easiest.
# Note that doctests create lines line this:
# Doctest: viff.field.GF ... [OK]
if line.startswith("=" * 40):
self.finished = True
if not self.finished:
m = self._line_re.search(line.strip())
if m:
m.groups()
self.numTests += 1
self.step.setProgress('tests', self.numTests)
out = re.search(r'Ran (\d+) tests', line)
if out:
self.counts['total'] = int(out.group(1))
if line.startswith("OK") or line.startswith("FAILED ") or line.startswith("PASSED"):
# the extra space on FAILED_ is to distinguish the overall
# status from an individual test which failed. The lack of a
# space on the OK is because it may be printed without any
# additional text (if there are no skips,etc)
out = re.search(r'failures=(\d+)', line)
if out:
self.counts['failures'] = int(out.group(1))
out = re.search(r'errors=(\d+)', line)
if out:
self.counts['errors'] = int(out.group(1))
out = re.search(r'skips=(\d+)', line)
if out:
self.counts['skips'] = int(out.group(1))
out = re.search(r'expectedFailures=(\d+)', line)
if out:
self.counts['expectedFailures'] = int(out.group(1))
out = re.search(r'unexpectedSuccesses=(\d+)', line)
if out:
self.counts['unexpectedSuccesses'] = int(out.group(1))
# successes= is a Twisted-2.0 addition, and is not currently used
out = re.search(r'successes=(\d+)', line)
if out:
self.counts['successes'] = int(out.group(1))
UNSPECIFIED = () # since None is a valid choice
class Trial(buildstep.ShellMixin, buildstep.BuildStep):
"""
There are some class attributes which may be usefully overridden
by subclasses. 'trialMode' and 'trialArgs' can influence the trial
command line.
"""
name = "trial"
progressMetrics = ('output', 'tests', 'test.log')
# note: the slash only works on unix workers, of course, but we have
# no way to know what the worker uses as a separator.
# TODO: figure out something clever.
logfiles = {"test.log": "_trial_temp/test.log"}
# we use test.log to track Progress at the end of __init__()
renderables = ['tests', 'jobs']
flunkOnFailure = True
python: list[str] | str | None = None
trial = "trial"
trialMode = ["--reporter=bwverbose"] # requires Twisted-2.1.0 or newer
# for Twisted-2.0.0 or 1.3.0, use ["-o"] instead
trialArgs: list[str] = []
jobs: int | None = None
testpath = UNSPECIFIED # required (but can be None)
testChanges = False # TODO: needs better name
recurse = False
reactor: ReactorBase | None = None
randomly = False
tests: list[str] | None = None # required
description = 'testing'
descriptionDone = 'tests'
def __init__(
self,
reactor=UNSPECIFIED,
python=None,
trial=None,
testpath=UNSPECIFIED,
tests=None,
testChanges=None,
recurse=None,
randomly=None,
trialMode=None,
trialArgs=None,
jobs=None,
**kwargs,
):
kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command'])
super().__init__(**kwargs)
if python:
self.python = python
if self.python is not None:
if isinstance(self.python, str):
self.python = [self.python]
for s in self.python:
if " " in s:
# this is not strictly an error, but I suspect more
# people will accidentally try to use python="python2.3
# -Wall" than will use embedded spaces in a python flag
log.msg("python= component '%s' has spaces")
log.msg("To add -Wall, use python=['python', '-Wall']")
why = "python= value has spaces, probably an error"
raise ValueError(why)
if trial:
self.trial = trial
if " " in self.trial:
raise ValueError("trial= value has spaces")
if trialMode is not None:
self.trialMode = trialMode
if trialArgs is not None:
self.trialArgs = trialArgs
if jobs is not None:
self.jobs = jobs
if testpath is not UNSPECIFIED:
self.testpath = testpath
if self.testpath is UNSPECIFIED:
raise ValueError("You must specify testpath= (it can be None)")
assert isinstance(self.testpath, str) or self.testpath is None
if reactor is not UNSPECIFIED:
self.reactor = reactor
if tests is not None:
self.tests = tests
if isinstance(self.tests, str):
self.tests = [self.tests]
if testChanges is not None:
self.testChanges = testChanges
# self.recurse = True # not sure this is necessary
if not self.testChanges and self.tests is None:
raise ValueError("Must either set testChanges= or provide tests=")
if recurse is not None:
self.recurse = recurse
if randomly is not None:
self.randomly = randomly
if self.reactor:
self.description = f"testing ({self.reactor})"
# this counter will feed Progress along the 'test cases' metric
self.observer = TrialTestCaseCounter()
self.addLogObserver('stdio', self.observer)
# this observer consumes multiple lines in a go, so it can't be easily
# handled in TrialTestCaseCounter.
self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self.logConsumer))
self.problems = []
self.warnings = {}
# text used before commandComplete runs
self.text = 'running'
def setup_python_path(self):
if self.testpath is None:
return
# this bit produces a list, which can be used by buildbot_worker.runprocess.RunProcess
ppath = self.env.get('PYTHONPATH', self.testpath)
if isinstance(ppath, str):
ppath = [ppath]
if self.testpath not in ppath:
ppath.insert(0, self.testpath)
self.env['PYTHONPATH'] = ppath
@defer.inlineCallbacks
def run(self):
# choose progressMetrics and logfiles based on whether trial is being
# run with multiple workers or not.
output_observer = logobserver.OutputProgressObserver('test.log')
# build up most of the command, then stash it until start()
command = []
if self.python:
command.extend(self.python)
command.append(self.trial)
command.extend(self.trialMode)
if self.recurse:
command.append("--recurse")
if self.reactor:
command.append(f"--reactor={self.reactor}")
if self.randomly:
command.append("--random=0")
command.extend(self.trialArgs)
if self.jobs is not None:
self.jobs = int(self.jobs)
command.append(f"--jobs={self.jobs}")
# using -j/--jobs flag produces more than one test log.
self.logfiles = {}
for i in range(self.jobs):
self.logfiles[f'test.{i}.log'] = f'_trial_temp/{i}/test.log'
self.logfiles[f'err.{i}.log'] = f'_trial_temp/{i}/err.log'
self.logfiles[f'out.{i}.log'] = f'_trial_temp/{i}/out.log'
self.addLogObserver(f'test.{i}.log', output_observer)
else:
# this one just measures bytes of output in _trial_temp/test.log
self.addLogObserver('test.log', output_observer)
# now that self.build.allFiles() is nailed down, finish building the
# command
if self.testChanges:
for f in self.build.allFiles():
if f.endswith(".py"):
command.append(f"--testmodule={f}")
else:
command.extend(self.tests)
self.setup_python_path()
cmd = yield self.makeRemoteShellCommand(command=command)
yield self.runCommand(cmd)
stdio_log = yield self.getLog('stdio')
yield stdio_log.finish()
# figure out all status, then let the various hook functions return
# different pieces of it
problems = '\n'.join(self.problems)
warnings = self.warnings
if problems:
yield self.addCompleteLog("problems", problems)
if warnings:
lines = sorted(warnings.keys())
yield self.addCompleteLog("warnings", "".join(lines))
return self.build_results(cmd)
def build_results(self, cmd):
counts = self.observer.counts
total = counts['total']
failures = counts['failures']
errors = counts['errors']
parsed = total is not None
desc_parts = []
if not cmd.didFail():
if parsed:
results = SUCCESS
if total:
desc_parts += [str(total), total == 1 and "test" or "tests", "passed"]
else:
desc_parts += ["no tests", "run"]
else:
results = FAILURE
desc_parts += ["testlog", "unparseable"]
else:
# something failed
results = FAILURE
if parsed:
desc_parts += ["tests"]
if failures:
desc_parts += [str(failures), failures == 1 and "failure" or "failures"]
if errors:
desc_parts += [str(errors), errors == 1 and "error" or "errors"]
else:
desc_parts += ["tests", "failed"]
if counts['skips']:
desc_parts += [str(counts['skips']), counts['skips'] == 1 and "skip" or "skips"]
if counts['expectedFailures']:
desc_parts += [
str(counts['expectedFailures']),
"todo" if counts['expectedFailures'] == 1 else "todos",
]
if self.reactor:
desc_parts.append(self.rtext('({})'))
self.descriptionDone = util.join_list(desc_parts)
return results
def rtext(self, fmt='{}'):
if self.reactor:
rtext = fmt.format(self.reactor)
return rtext.replace("reactor", "")
return ""
def logConsumer(self):
while True:
_, line = yield
if line.find(" exceptions.DeprecationWarning: ") != -1:
# no source
warning = line # TODO: consider stripping basedir prefix here
self.warnings[warning] = self.warnings.get(warning, 0) + 1
elif line.find(" DeprecationWarning: ") != -1 or line.find(" UserWarning: ") != -1:
# next line is the source
warning = line + "\n" + (yield)[1] + "\n"
self.warnings[warning] = self.warnings.get(warning, 0) + 1
elif line.find("Warning: ") != -1:
warning = line
self.warnings[warning] = self.warnings.get(warning, 0) + 1
if line.find("=" * 60) == 0 or line.find("-" * 60) == 0:
# read to EOF
while True:
self.problems.append(line)
_, line = yield
class RemovePYCs(shell.ShellCommand):
name = "remove_pyc"
command = ['find', '.', '-name', "'*.pyc'", '-exec', 'rm', '{}', ';']
description = "removing .pyc files"
descriptionDone = "remove .pycs"
| 16,088 | Python | .py | 376 | 32.720745 | 95 | 0.591255 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,853 | mswin.py | buildbot_buildbot/master/buildbot/steps/mswin.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.python import log
from buildbot.process.buildstep import BuildStep
from buildbot.process.buildstep import ShellMixin
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
class Robocopy(ShellMixin, BuildStep):
"""Robocopy build step.
This is just a wrapper around the standard shell command that
will handle arguments and return codes accordingly for Robocopy.
"""
renderables = ['custom_opts', 'destination', 'exclude_dirs', 'exclude_files', 'files', 'source']
# Robocopy exit flags (they are combined to make up the exit code)
# See: http://ss64.com/nt/robocopy-exit.html
return_flags = {FAILURE: [8, 16], WARNINGS: [2, 4], SUCCESS: [0, 1]}
def __init__(self, source, destination, exclude=None, exclude_files=None, **kwargs):
self.source = source
self.destination = destination
self.files = kwargs.pop('files', None)
self.recursive = kwargs.pop('recursive', False)
self.mirror = kwargs.pop('mirror', False)
self.move = kwargs.pop('move', False)
self.exclude_files = exclude_files
if exclude and not exclude_files:
self.exclude_files = exclude
self.exclude_dirs = kwargs.pop('exclude_dirs', None)
self.custom_opts = kwargs.pop('custom_opts', None)
self.verbose = kwargs.pop('verbose', False)
super().__init__(**kwargs)
@defer.inlineCallbacks
def run(self):
command = ['robocopy', self.source, self.destination]
if self.files:
command += self.files
if self.recursive:
command.append('/E')
if self.mirror:
command.append('/MIR')
if self.move:
command.append('/MOVE')
if self.exclude_files:
command.append('/XF')
command += self.exclude_files
if self.exclude_dirs:
command.append('/XD')
command += self.exclude_dirs
if self.verbose:
command += ['/V', '/TS', '/FP']
if self.custom_opts:
command += self.custom_opts
command += ['/TEE', '/NP']
cmd = yield self.makeRemoteShellCommand(command=command)
yield self.runCommand(cmd)
# If we have a "clean" return code, it's good.
# Otherwise, look for errors first, warnings second.
if cmd.rc in (0, 1):
return SUCCESS
for result in [FAILURE, WARNINGS]:
for flag in self.return_flags[result]:
if (cmd.rc & flag) == flag:
return result
log.msg(f"Unknown return code for Robocopy: {cmd.rc}")
return EXCEPTION
| 3,523 | Python | .py | 79 | 37.341772 | 100 | 0.666375 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,854 | worker.py | buildbot_buildbot/master/buildbot/steps/worker.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import os
import stat
from twisted.internet import defer
from buildbot.process import buildstep
from buildbot.process import remotecommand
from buildbot.process import remotetransfer
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
class WorkerBuildStep(buildstep.BuildStep):
pass
class SetPropertiesFromEnv(WorkerBuildStep):
"""
Sets properties from environment variables on the worker.
Note this is transferred when the worker first connects
"""
name = 'SetPropertiesFromEnv'
description = ['Setting']
descriptionDone = ['Set']
def __init__(self, variables, source="WorkerEnvironment", **kwargs):
super().__init__(**kwargs)
self.variables = variables
self.source = source
@defer.inlineCallbacks
def run(self):
# on Windows, environment variables are case-insensitive, but we have
# a case-sensitive dictionary in worker_environ. Fortunately, that
# dictionary is also folded to uppercase, so we can simply fold the
# variable names to uppercase to duplicate the case-insensitivity.
fold_to_uppercase = self.worker.worker_system == 'win32'
properties = self.build.getProperties()
environ = self.worker.worker_environ
variables = self.variables
log = []
if isinstance(variables, str):
variables = [self.variables]
for variable in variables:
key = variable
if fold_to_uppercase:
key = variable.upper()
value = environ.get(key, None)
if value:
# note that the property is not uppercased
properties.setProperty(variable, value, self.source, runtime=True)
log.append(f"{variable} = {value!r}")
yield self.addCompleteLog("properties", "\n".join(log))
return SUCCESS
class FileExists(WorkerBuildStep):
"""
Check for the existence of a file on the worker.
"""
name = 'FileExists'
renderables = ['file']
haltOnFailure = True
flunkOnFailure = True
def __init__(self, file, **kwargs):
super().__init__(**kwargs)
self.file = file
@defer.inlineCallbacks
def run(self):
self.checkWorkerHasCommand('stat')
cmd = remotecommand.RemoteCommand('stat', {'file': self.file})
yield self.runCommand(cmd)
if cmd.didFail():
self.descriptionDone = ["File not found."]
return FAILURE
s = cmd.updates["stat"][-1]
if stat.S_ISREG(s[stat.ST_MODE]):
self.descriptionDone = ["File found."]
return SUCCESS
else:
self.descriptionDone = ["Not a file."]
return FAILURE
class CopyDirectory(WorkerBuildStep):
"""
Copy a directory tree on the worker.
"""
name = 'CopyDirectory'
description = ['Copying']
descriptionDone = ['Copied']
renderables = ['src', 'dest']
haltOnFailure = True
flunkOnFailure = True
def __init__(self, src, dest, timeout=120, maxTime=None, **kwargs):
super().__init__(**kwargs)
self.src = src
self.dest = dest
self.timeout = timeout
self.maxTime = maxTime
@defer.inlineCallbacks
def run(self):
self.checkWorkerHasCommand('cpdir')
args = {'fromdir': self.src, 'todir': self.dest}
args['timeout'] = self.timeout
if self.maxTime:
args['maxTime'] = self.maxTime
cmd = remotecommand.RemoteCommand('cpdir', args)
yield self.runCommand(cmd)
if cmd.didFail():
self.descriptionDone = ["Copying", self.src, "to", self.dest, "failed."]
return FAILURE
self.descriptionDone = ["Copied", self.src, "to", self.dest]
return SUCCESS
class RemoveDirectory(WorkerBuildStep):
"""
Remove a directory tree on the worker.
"""
name = 'RemoveDirectory'
description = ['Deleting']
descriptionDone = ['Deleted']
renderables = ['dir']
haltOnFailure = True
flunkOnFailure = True
def __init__(self, dir, **kwargs):
super().__init__(**kwargs)
self.dir = dir
@defer.inlineCallbacks
def run(self):
self.checkWorkerHasCommand('rmdir')
cmd = remotecommand.RemoteCommand('rmdir', {'dir': self.dir})
yield self.runCommand(cmd)
if cmd.didFail():
self.descriptionDone = ["Delete failed."]
return FAILURE
return SUCCESS
class MakeDirectory(WorkerBuildStep):
"""
Create a directory on the worker.
"""
name = 'MakeDirectory'
description = ['Creating']
descriptionDone = ['Created']
renderables = ['dir']
haltOnFailure = True
flunkOnFailure = True
def __init__(self, dir, **kwargs):
super().__init__(**kwargs)
self.dir = dir
@defer.inlineCallbacks
def run(self):
self.checkWorkerHasCommand('mkdir')
cmd = remotecommand.RemoteCommand('mkdir', {'dir': self.dir})
yield self.runCommand(cmd)
if cmd.didFail():
self.descriptionDone = ["Create failed."]
return FAILURE
return SUCCESS
class CompositeStepMixin:
def workerPathToMasterPath(self, path):
return os.path.join(*self.worker.path_module.split(path))
@defer.inlineCallbacks
def addLogForRemoteCommands(self, logname):
"""This method must be called by user classes
composite steps could create several logs, this mixin functions will write
to the last one.
"""
self.rc_log = yield self.addLog(logname)
return self.rc_log
def runRemoteCommand(
self, cmd, args, abandonOnFailure=True, evaluateCommand=lambda cmd: cmd.didFail()
):
"""generic RemoteCommand boilerplate"""
cmd = remotecommand.RemoteCommand(cmd, args)
if hasattr(self, "rc_log"):
cmd.useLog(self.rc_log, False)
d = self.runCommand(cmd)
def commandComplete(cmd):
if abandonOnFailure and cmd.didFail():
raise buildstep.BuildStepFailed()
return evaluateCommand(cmd)
d.addCallback(lambda res: commandComplete(cmd))
return d
def runRmdir(self, dir, timeout=None, **kwargs):
"""remove a directory from the worker"""
cmd_args = {'dir': dir, 'logEnviron': self.logEnviron}
if timeout:
cmd_args['timeout'] = timeout
return self.runRemoteCommand('rmdir', cmd_args, **kwargs)
def runRmFile(self, path, timeout=None, **kwargs):
"""remove a file from the worker"""
cmd_args = {'path': path, 'logEnviron': self.logEnviron}
if timeout:
cmd_args['timeout'] = timeout
if self.workerVersionIsOlderThan('rmfile', '3.1'):
cmd_args['dir'] = os.path.abspath(path)
return self.runRemoteCommand('rmdir', cmd_args, **kwargs)
return self.runRemoteCommand('rmfile', cmd_args, **kwargs)
def pathExists(self, path):
"""test whether path exists"""
def commandComplete(cmd):
return not cmd.didFail()
return self.runRemoteCommand(
'stat',
{
'file': path,
'logEnviron': self.logEnviron,
},
abandonOnFailure=False,
evaluateCommand=commandComplete,
)
def runMkdir(self, _dir, **kwargs):
"""create a directory and its parents"""
return self.runRemoteCommand(
'mkdir',
{
'dir': _dir,
'logEnviron': self.logEnviron,
},
**kwargs,
)
def runGlob(self, path, **kwargs):
"""find files matching a shell-style pattern"""
def commandComplete(cmd):
return cmd.updates['files'][-1]
return self.runRemoteCommand(
'glob',
{
'path': path,
'logEnviron': self.logEnviron,
},
evaluateCommand=commandComplete,
**kwargs,
)
def getFileContentFromWorker(self, filename, abandonOnFailure=False):
self.checkWorkerHasCommand("uploadFile")
fileWriter = remotetransfer.StringFileWriter()
# default arguments
args = {
'workdir': self.workdir,
'writer': fileWriter,
'maxsize': None,
'blocksize': 32 * 1024,
}
if self.workerVersionIsOlderThan('uploadFile', '3.0'):
args['slavesrc'] = filename
else:
args['workersrc'] = filename
def commandComplete(cmd):
if cmd.didFail():
return None
return fileWriter.buffer
return self.runRemoteCommand(
'uploadFile', args, abandonOnFailure=abandonOnFailure, evaluateCommand=commandComplete
)
def downloadFileContentToWorker(
self, workerdest, strfile, abandonOnFailure=False, mode=None, workdir=None
):
if workdir is None:
workdir = self.workdir
self.checkWorkerHasCommand("downloadFile")
fileReader = remotetransfer.StringFileReader(strfile)
# default arguments
args = {
'workdir': workdir,
'maxsize': None,
'mode': mode,
'reader': fileReader,
'blocksize': 32 * 1024,
}
if self.workerVersionIsOlderThan('downloadFile', '3.0'):
args['slavedest'] = workerdest
else:
args['workerdest'] = workerdest
def commandComplete(cmd):
if cmd.didFail():
return None
return fileReader
return self.runRemoteCommand(
'downloadFile', args, abandonOnFailure=abandonOnFailure, evaluateCommand=commandComplete
)
| 10,638 | Python | .py | 283 | 28.90106 | 100 | 0.623504 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,855 | transfer.py | buildbot_buildbot/master/buildbot/steps/transfer.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
import json
import os
import stat
from twisted.internet import defer
from twisted.python import log
from buildbot import config
from buildbot.interfaces import WorkerSetupError
from buildbot.process import remotecommand
from buildbot.process import remotetransfer
from buildbot.process.buildstep import FAILURE
from buildbot.process.buildstep import SKIPPED
from buildbot.process.buildstep import SUCCESS
from buildbot.process.buildstep import BuildStep
from buildbot.steps.worker import CompositeStepMixin
from buildbot.util import flatten
def makeStatusRemoteCommand(step, remote_command, args) -> remotecommand.RemoteCommand:
self = remotecommand.RemoteCommand(remote_command, args, decodeRC={None: SUCCESS, 0: SUCCESS})
self.useLog(step.stdio_log)
return self
class _TransferBuildStep(BuildStep):
"""
Base class for FileUpload and FileDownload to factor out common
functionality.
"""
renderables = ['workdir']
haltOnFailure = True
flunkOnFailure = True
def __init__(self, workdir=None, **buildstep_kwargs):
super().__init__(**buildstep_kwargs)
self.workdir = workdir
@defer.inlineCallbacks
def runTransferCommand(
self,
cmd: remotecommand.RemoteCommand,
writer: remotetransfer.FileWriter | None = None,
):
# Run a transfer step, add a callback to extract the command status,
# add an error handler that cancels the writer.
self.cmd = cmd
try:
yield self.runCommand(cmd)
finally:
if writer:
writer.cancel()
cmd_res = cmd.results()
if cmd_res >= FAILURE:
if writer:
writer.purge()
return cmd_res
@defer.inlineCallbacks
def interrupt(self, reason):
yield self.addCompleteLog('interrupt', str(reason))
if self.cmd:
yield self.cmd.interrupt(reason)
return None
class FileUpload(_TransferBuildStep):
name = 'upload'
renderables = [
'masterdest',
'url',
'urlText',
'workersrc',
]
def __init__(
self,
workersrc=None,
masterdest=None,
workdir=None,
maxsize=None,
blocksize=256 * 1024,
mode=None,
keepstamp=False,
url=None,
urlText=None,
**buildstep_kwargs,
):
# Emulate that first two arguments are positional.
if workersrc is None or masterdest is None:
raise TypeError("__init__() takes at least 3 arguments")
super().__init__(workdir=workdir, **buildstep_kwargs)
self.workersrc = workersrc
self.masterdest = masterdest
self.maxsize = maxsize
self.blocksize = blocksize
if not isinstance(mode, (int, type(None))):
config.error('mode must be an integer or None')
self.mode = mode
self.keepstamp = keepstamp
self.url = url
self.urlText = urlText
@defer.inlineCallbacks
def run(self):
self.checkWorkerHasCommand("uploadFile")
self.stdio_log = yield self.addLog("stdio")
source = self.workersrc
masterdest = self.masterdest
# we rely upon the fact that the buildmaster runs chdir'ed into its
# basedir to make sure that relative paths in masterdest are expanded
# properly. TODO: maybe pass the master's basedir all the way down
# into the BuildStep so we can do this better.
masterdest = os.path.expanduser(masterdest)
log.msg(f"FileUpload started, from worker {source!r} to master {masterdest!r}")
if self.description is None:
self.description = [f'uploading {os.path.basename(source)}']
if self.descriptionDone is None:
self.descriptionDone = self.description
if self.url is not None:
urlText = self.urlText
if urlText is None:
urlText = os.path.basename(masterdest)
yield self.addURL(urlText, self.url)
# we use maxsize to limit the amount of data on both sides
fileWriter = remotetransfer.FileWriter(masterdest, self.maxsize, self.mode)
if self.keepstamp and self.workerVersionIsOlderThan("uploadFile", "2.13"):
m = (
f"This worker ({self.build.workername}) does not support preserving timestamps. "
"Please upgrade the worker."
)
raise WorkerSetupError(m)
# default arguments
args = {
'workdir': self.workdir,
'writer': fileWriter,
'maxsize': self.maxsize,
'blocksize': self.blocksize,
'keepstamp': self.keepstamp,
}
if self.workerVersionIsOlderThan('uploadFile', '3.0'):
args['slavesrc'] = source
else:
args['workersrc'] = source
cmd = makeStatusRemoteCommand(self, 'uploadFile', args)
res = yield self.runTransferCommand(cmd, fileWriter)
log.msg(f"File '{os.path.basename(self.workersrc)}' upload finished with results {res!s}")
return res
class DirectoryUpload(_TransferBuildStep):
name = 'upload'
renderables = ['workersrc', 'masterdest', 'url', 'urlText']
def __init__(
self,
workersrc=None,
masterdest=None,
workdir=None,
maxsize=None,
blocksize=16 * 1024,
compress=None,
url=None,
urlText=None,
**buildstep_kwargs,
):
# Emulate that first two arguments are positional.
if workersrc is None or masterdest is None:
raise TypeError("__init__() takes at least 3 arguments")
super().__init__(workdir=workdir, **buildstep_kwargs)
self.workersrc = workersrc
self.masterdest = masterdest
self.maxsize = maxsize
self.blocksize = blocksize
if compress not in (None, 'gz', 'bz2'):
config.error("'compress' must be one of None, 'gz', or 'bz2'")
self.compress = compress
self.url = url
self.urlText = urlText
@defer.inlineCallbacks
def run(self):
self.checkWorkerHasCommand("uploadDirectory")
self.stdio_log = yield self.addLog("stdio")
source = self.workersrc
masterdest = self.masterdest
# we rely upon the fact that the buildmaster runs chdir'ed into its
# basedir to make sure that relative paths in masterdest are expanded
# properly. TODO: maybe pass the master's basedir all the way down
# into the BuildStep so we can do this better.
masterdest = os.path.expanduser(masterdest)
log.msg(f"DirectoryUpload started, from worker {source!r} to master {masterdest!r}")
self.descriptionDone = f"uploading {os.path.basename(source)}"
if self.url is not None:
urlText = self.urlText
if urlText is None:
urlText = os.path.basename(os.path.normpath(masterdest))
yield self.addURL(urlText, self.url)
# we use maxsize to limit the amount of data on both sides
dirWriter = remotetransfer.DirectoryWriter(masterdest, self.maxsize, self.compress, 0o600)
# default arguments
args = {
'workdir': self.workdir,
'writer': dirWriter,
'maxsize': self.maxsize,
'blocksize': self.blocksize,
'compress': self.compress,
}
if self.workerVersionIsOlderThan('uploadDirectory', '3.0'):
args['slavesrc'] = source
else:
args['workersrc'] = source
cmd = makeStatusRemoteCommand(self, 'uploadDirectory', args)
res = yield self.runTransferCommand(cmd, dirWriter)
return res
class MultipleFileUpload(_TransferBuildStep, CompositeStepMixin):
name = 'upload'
logEnviron = False
renderables = ['workersrcs', 'masterdest', 'url', 'urlText']
def __init__(
self,
workersrcs=None,
masterdest=None,
workdir=None,
maxsize=None,
blocksize=16 * 1024,
glob=False,
mode=None,
compress=None,
keepstamp=False,
url=None,
urlText=None,
**buildstep_kwargs,
):
# Emulate that first two arguments are positional.
if workersrcs is None or masterdest is None:
raise TypeError("__init__() takes at least 3 arguments")
super().__init__(workdir=workdir, **buildstep_kwargs)
self.workersrcs = workersrcs
self.masterdest = masterdest
self.maxsize = maxsize
self.blocksize = blocksize
if not isinstance(mode, (int, type(None))):
config.error('mode must be an integer or None')
self.mode = mode
if compress not in (None, 'gz', 'bz2'):
config.error("'compress' must be one of None, 'gz', or 'bz2'")
self.compress = compress
self.glob = glob
self.keepstamp = keepstamp
self.url = url
self.urlText = urlText
def uploadFile(self, source, masterdest):
fileWriter = remotetransfer.FileWriter(masterdest, self.maxsize, self.mode)
args = {
'workdir': self.workdir,
'writer': fileWriter,
'maxsize': self.maxsize,
'blocksize': self.blocksize,
'keepstamp': self.keepstamp,
}
if self.workerVersionIsOlderThan('uploadFile', '3.0'):
args['slavesrc'] = source
else:
args['workersrc'] = source
cmd = makeStatusRemoteCommand(self, 'uploadFile', args)
return self.runTransferCommand(cmd, fileWriter)
def uploadDirectory(self, source, masterdest):
dirWriter = remotetransfer.DirectoryWriter(masterdest, self.maxsize, self.compress, 0o600)
args = {
'workdir': self.workdir,
'writer': dirWriter,
'maxsize': self.maxsize,
'blocksize': self.blocksize,
'compress': self.compress,
}
if self.workerVersionIsOlderThan('uploadDirectory', '3.0'):
args['slavesrc'] = source
else:
args['workersrc'] = source
cmd = makeStatusRemoteCommand(self, 'uploadDirectory', args)
return self.runTransferCommand(cmd, dirWriter)
@defer.inlineCallbacks
def startUpload(self, source, destdir):
masterdest = os.path.join(destdir, os.path.basename(source))
args = {'file': source, 'workdir': self.workdir}
cmd = makeStatusRemoteCommand(self, 'stat', args)
yield self.runCommand(cmd)
if cmd.rc != 0:
msg = f'File {self.workdir}/{source} not available at worker'
yield self.addCompleteLog('stderr', msg)
return FAILURE
s = cmd.updates['stat'][-1]
if stat.S_ISDIR(s[stat.ST_MODE]):
result = yield self.uploadDirectory(source, masterdest)
elif stat.S_ISREG(s[stat.ST_MODE]):
result = yield self.uploadFile(source, masterdest)
else:
msg = f'{source} is neither a regular file, nor a directory'
yield self.addCompleteLog('stderr', msg)
return FAILURE
yield self.uploadDone(result, source, masterdest)
return result
def uploadDone(self, result, source, masterdest):
pass
@defer.inlineCallbacks
def allUploadsDone(self, result, sources, masterdest):
if self.url is not None:
urlText = self.urlText
if urlText is None:
urlText = os.path.basename(os.path.normpath(masterdest))
yield self.addURL(urlText, self.url)
@defer.inlineCallbacks
def run(self):
self.checkWorkerHasCommand("uploadDirectory")
self.checkWorkerHasCommand("uploadFile")
self.checkWorkerHasCommand("stat")
self.stdio_log = yield self.addLog("stdio")
masterdest = os.path.expanduser(self.masterdest)
sources = self.workersrcs if isinstance(self.workersrcs, list) else [self.workersrcs]
if self.keepstamp and self.workerVersionIsOlderThan("uploadFile", "2.13"):
m = (
f"This worker ({self.build.workername}) does not support preserving timestamps. "
"Please upgrade the worker."
)
raise WorkerSetupError(m)
if not sources:
return SKIPPED
if self.glob:
results = yield defer.gatherResults([
self.runGlob(os.path.join(self.workdir, source), abandonOnFailure=False)
for source in sources
])
sources = [self.workerPathToMasterPath(p) for p in flatten(results)]
log.msg(f"MultipleFileUpload started, from worker {sources!r} to master {masterdest!r}")
self.descriptionDone = [
'uploading',
str(len(sources)),
'file' if len(sources) == 1 else 'files',
]
if not sources:
result = SKIPPED
else:
result = SUCCESS
for source in sources:
result_single = yield self.startUpload(source, masterdest)
if result_single == FAILURE:
result = FAILURE
break
yield self.allUploadsDone(result, sources, masterdest)
return result
class FileDownload(_TransferBuildStep):
name = 'download'
renderables = ['mastersrc', 'workerdest']
def __init__(
self,
mastersrc,
workerdest=None,
workdir=None,
maxsize=None,
blocksize=16 * 1024,
mode=None,
**buildstep_kwargs,
):
# Emulate that first two arguments are positional.
if workerdest is None:
raise TypeError("__init__() takes at least 3 arguments")
super().__init__(workdir=workdir, **buildstep_kwargs)
self.mastersrc = mastersrc
self.workerdest = workerdest
self.maxsize = maxsize
self.blocksize = blocksize
if not isinstance(mode, (int, type(None))):
config.error('mode must be an integer or None')
self.mode = mode
@defer.inlineCallbacks
def run(self):
self.checkWorkerHasCommand("downloadFile")
self.stdio_log = yield self.addLog("stdio")
# we are currently in the buildmaster's basedir, so any non-absolute
# paths will be interpreted relative to that
source = os.path.expanduser(self.mastersrc)
workerdest = self.workerdest
log.msg(f"FileDownload started, from master {source!r} to worker {workerdest!r}")
self.descriptionDone = ["downloading to", os.path.basename(workerdest)]
# setup structures for reading the file
try:
fp = open(source, 'rb')
except OSError:
# if file does not exist, bail out with an error
yield self.addCompleteLog('stderr', f'File {source!r} not available at master')
return FAILURE
fileReader = remotetransfer.FileReader(fp)
# default arguments
args = {
'maxsize': self.maxsize,
'reader': fileReader,
'blocksize': self.blocksize,
'workdir': self.workdir,
'mode': self.mode,
}
if self.workerVersionIsOlderThan('downloadFile', '3.0'):
args['slavedest'] = workerdest
else:
args['workerdest'] = workerdest
cmd = makeStatusRemoteCommand(self, 'downloadFile', args)
res = yield self.runTransferCommand(cmd)
return res
class StringDownload(_TransferBuildStep):
name = 'string_download'
renderables = ['workerdest', 's']
def __init__(
self,
s,
workerdest=None,
workdir=None,
maxsize=None,
blocksize=16 * 1024,
mode=None,
**buildstep_kwargs,
):
# Emulate that first two arguments are positional.
if workerdest is None:
raise TypeError("__init__() takes at least 3 arguments")
super().__init__(workdir=workdir, **buildstep_kwargs)
self.s = s
self.workerdest = workerdest
self.maxsize = maxsize
self.blocksize = blocksize
if not isinstance(mode, (int, type(None))):
config.error(f"StringDownload step's mode must be an integer or None, got '{mode}'")
self.mode = mode
@defer.inlineCallbacks
def run(self):
# we use 'downloadFile' remote command on the worker
self.checkWorkerHasCommand("downloadFile")
self.stdio_log = yield self.addLog("stdio")
# we are currently in the buildmaster's basedir, so any non-absolute
# paths will be interpreted relative to that
workerdest = self.workerdest
log.msg(f"StringDownload started, from master to worker {workerdest!r}")
self.descriptionDone = ["downloading to", os.path.basename(workerdest)]
# setup structures for reading the file
fileReader = remotetransfer.StringFileReader(self.s)
# default arguments
args = {
'maxsize': self.maxsize,
'reader': fileReader,
'blocksize': self.blocksize,
'workdir': self.workdir,
'mode': self.mode,
}
if self.workerVersionIsOlderThan('downloadFile', '3.0'):
args['slavedest'] = workerdest
else:
args['workerdest'] = workerdest
cmd = makeStatusRemoteCommand(self, 'downloadFile', args)
res = yield self.runTransferCommand(cmd)
return res
class JSONStringDownload(StringDownload):
name = "json_download"
def __init__(self, o, workerdest=None, **buildstep_kwargs):
# Emulate that first two arguments are positional.
if workerdest is None:
raise TypeError("__init__() takes at least 3 arguments")
if 's' in buildstep_kwargs:
del buildstep_kwargs['s']
super().__init__(s=o, workerdest=workerdest, **buildstep_kwargs)
@defer.inlineCallbacks
def run(self):
self.s = json.dumps(self.s)
res = yield super().run()
return res
class JSONPropertiesDownload(StringDownload):
name = "json_properties_download"
def __init__(self, workerdest=None, **buildstep_kwargs):
# Emulate that first two arguments are positional.
if workerdest is None:
raise TypeError("__init__() takes at least 2 arguments")
if 's' in buildstep_kwargs:
del buildstep_kwargs['s']
super().__init__(s=None, workerdest=workerdest, **buildstep_kwargs)
@defer.inlineCallbacks
def run(self):
properties = self.build.getProperties()
props = {}
for key, value, _ in properties.asList():
props[key] = value
self.s = json.dumps(
{
"properties": props,
"sourcestamps": [ss.asDict() for ss in self.build.getAllSourceStamps()],
},
)
res = yield super().run()
return res
| 19,876 | Python | .py | 494 | 31.012146 | 98 | 0.627751 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,856 | gitdiffinfo.py | buildbot_buildbot/master/buildbot/steps/gitdiffinfo.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import json
from twisted.internet import defer
from buildbot import config
from buildbot.process import buildstep
from buildbot.process import logobserver
from buildbot.process import results
class GitDiffInfo(buildstep.ShellMixin, buildstep.BuildStep):
name = 'GitDiffInfo'
description = 'running GitDiffInfo'
descriptionDone = 'GitDiffInfo'
def __init__(self, compareToRef='master', dataName='diffinfo-master', **kwargs):
try:
from unidiff import PatchSet
_ = PatchSet # silence pylint
except ImportError:
config.error('unidiff package must be installed in order to use GitDiffInfo')
kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command'])
super().__init__(**kwargs)
self._compare_to_ref = compareToRef
self._data_name = dataName
self._observer = logobserver.BufferLogObserver()
def _convert_hunk(self, hunk):
# TODO: build an intermediate class that would handle serialization. We want to output
# as few data as possible, even if the json is not human-readable
return {
'ss': hunk.source_start,
'sl': hunk.source_length,
'ts': hunk.target_start,
'tl': hunk.target_length,
}
def _convert_file(self, file):
return {
'source_file': file.source_file,
'target_file': file.target_file,
'is_binary': file.is_binary_file,
'is_rename': file.is_rename,
'hunks': [self._convert_hunk(hunk) for hunk in file],
}
def _convert_patchset(self, patchset):
return [self._convert_file(file) for file in patchset]
@defer.inlineCallbacks
def run(self):
command = ['git', 'merge-base', 'HEAD', self._compare_to_ref]
cmd = yield self.makeRemoteShellCommand(
command=command, stdioLogName='stdio-merge-base', collectStdout=True
)
yield self.runCommand(cmd)
log = yield self.getLog("stdio-merge-base")
yield log.finish()
if cmd.results() != results.SUCCESS:
return cmd.results()
commit = cmd.stdout.strip()
self.setProperty('diffinfo-merge-base-commit', commit, 'GitDiffInfo')
self.addLogObserver('stdio-diff', self._observer)
command = ['git', 'diff', '--no-prefix', '-U0', commit, 'HEAD']
cmd = yield self.makeRemoteShellCommand(command=command, stdioLogName='stdio-diff')
yield self.runCommand(cmd)
if cmd.results() != results.SUCCESS:
return cmd.results()
from unidiff import PatchSet
patchset = PatchSet(self._observer.getStdout(), metadata_only=True)
data = json.dumps(self._convert_patchset(patchset)).encode('utf-8')
yield self.setBuildData(self._data_name, data, 'GitDiffInfo')
return cmd.results()
| 3,604 | Python | .py | 78 | 38.74359 | 94 | 0.674757 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,857 | trigger.py | buildbot_buildbot/master/buildbot/steps/trigger.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.python import log
from buildbot import config
from buildbot.interfaces import IRenderable
from buildbot.interfaces import ITriggerableScheduler
from buildbot.process.buildstep import CANCELLED
from buildbot.process.buildstep import EXCEPTION
from buildbot.process.buildstep import SUCCESS
from buildbot.process.buildstep import BuildStep
from buildbot.process.properties import Properties
from buildbot.process.properties import Property
from buildbot.process.results import ALL_RESULTS
from buildbot.process.results import statusToString
from buildbot.process.results import worst_status
from buildbot.reporters.utils import getURLForBuild
from buildbot.reporters.utils import getURLForBuildrequest
class Trigger(BuildStep):
name = "trigger"
renderables = [
'alwaysUseLatest',
'parent_relationship',
'schedulerNames',
'set_properties',
'sourceStamps',
'updateSourceStamp',
'waitForFinish',
]
flunkOnFailure = True
def __init__(
self,
schedulerNames=None,
sourceStamp=None,
sourceStamps=None,
updateSourceStamp=None,
alwaysUseLatest=False,
waitForFinish=False,
set_properties=None,
copy_properties=None,
parent_relationship="Triggered from",
unimportantSchedulerNames=None,
**kwargs,
):
if schedulerNames is None:
schedulerNames = []
if unimportantSchedulerNames is None:
unimportantSchedulerNames = []
if not schedulerNames:
config.error("You must specify a scheduler to trigger")
if (sourceStamp or sourceStamps) and (updateSourceStamp is not None):
config.error("You can't specify both sourceStamps and updateSourceStamp")
if (sourceStamp or sourceStamps) and alwaysUseLatest:
config.error("You can't specify both sourceStamps and alwaysUseLatest")
if alwaysUseLatest and (updateSourceStamp is not None):
config.error("You can't specify both alwaysUseLatest and updateSourceStamp")
def hasRenderable(l):
for s in l:
if IRenderable.providedBy(s):
return True
return False
if not hasRenderable(schedulerNames) and not hasRenderable(unimportantSchedulerNames):
if not set(schedulerNames).issuperset(set(unimportantSchedulerNames)):
config.error("unimportantSchedulerNames must be a subset of schedulerNames")
self.schedulerNames = schedulerNames
self.unimportantSchedulerNames = unimportantSchedulerNames
self.sourceStamps = sourceStamps or []
if sourceStamp:
self.sourceStamps.append(sourceStamp)
if updateSourceStamp is not None:
self.updateSourceStamp = updateSourceStamp
else:
self.updateSourceStamp = not (alwaysUseLatest or self.sourceStamps)
self.alwaysUseLatest = alwaysUseLatest
self.waitForFinish = waitForFinish
if set_properties is None:
set_properties = {}
if copy_properties is None:
copy_properties = []
properties = {}
properties.update(set_properties)
for i in copy_properties:
properties[i] = Property(i)
self.set_properties = properties
self.parent_relationship = parent_relationship
self.running = False
self.ended = False
self.brids = []
self.triggeredNames = None
self.waitForFinishDeferred = None
self._result_list = []
super().__init__(**kwargs)
def interrupt(self, reason):
# We cancel the buildrequests, as the data api handles
# both cases:
# - build started: stop is sent,
# - build not created yet: related buildrequests are set to CANCELLED.
# Note that there is an identified race condition though (more details
# are available at buildbot.data.buildrequests).
for brid in self.brids:
self.master.data.control(
"cancel", {'reason': 'parent build was interrupted'}, ("buildrequests", brid)
)
if self.running and not self.ended:
self.ended = True
# if we are interrupted because of a connection lost, we interrupt synchronously
if self.build.conn is None and self.waitForFinishDeferred is not None:
self.waitForFinishDeferred.cancel()
# Create the properties that are used for the trigger
def createTriggerProperties(self, properties):
# make a new properties object from a dict rendered by the old
# properties object
trigger_properties = Properties()
trigger_properties.update(properties, "Trigger")
return trigger_properties
def getSchedulerByName(self, name):
# we use the fact that scheduler_manager is a multiservice, with schedulers as childs
# this allow to quickly find schedulers instance by name
schedulers = self.master.scheduler_manager.namedServices
if name not in schedulers:
raise ValueError(f"unknown triggered scheduler: {name!r}")
sch = schedulers[name]
if not ITriggerableScheduler.providedBy(sch):
raise ValueError(f"triggered scheduler is not ITriggerableScheduler: {name!r}")
return sch
# This customization endpoint allows users to dynamically select which
# scheduler and properties to trigger
def getSchedulersAndProperties(self):
return [
{
'sched_name': sched,
'props_to_set': self.set_properties,
'unimportant': sched in self.unimportantSchedulerNames,
}
for sched in self.schedulerNames
]
def prepareSourcestampListForTrigger(self):
if self.sourceStamps:
ss_for_trigger = {}
for ss in self.sourceStamps:
codebase = ss.get('codebase', '')
assert codebase not in ss_for_trigger, "codebase specified multiple times"
ss_for_trigger[codebase] = ss
trigger_values = [ss_for_trigger[k] for k in sorted(ss_for_trigger.keys())]
return trigger_values
if self.alwaysUseLatest:
return []
# start with the sourcestamps from current build
ss_for_trigger = {}
objs_from_build = self.build.getAllSourceStamps()
for ss in objs_from_build:
ss_for_trigger[ss.codebase] = ss.asDict()
# overrule revision in sourcestamps with got revision
if self.updateSourceStamp:
got = self.getAllGotRevisions()
for codebase, ss in ss_for_trigger.items():
if codebase in got:
ss['revision'] = got[codebase]
trigger_values = [ss_for_trigger[k] for k in sorted(ss_for_trigger.keys())]
return trigger_values
def getAllGotRevisions(self):
all_got_revisions = self.getProperty('got_revision', {})
# For backwards compatibility all_got_revisions is a string if codebases
# are not used. Convert to the default internal type (dict)
if not isinstance(all_got_revisions, dict):
all_got_revisions = {'': all_got_revisions}
return all_got_revisions
@defer.inlineCallbacks
def worstStatus(self, overall_results, rclist, unimportant_brids):
for was_cb, results in rclist:
if isinstance(results, tuple):
results, brids_dict = results
# brids_dict.values() represents the list of brids kicked by a certain scheduler.
# We want to ignore the result of ANY brid that was kicked off
# by an UNimportant scheduler.
if set(unimportant_brids).issuperset(set(brids_dict.values())):
continue
if not was_cb:
yield self.addLogWithFailure(results)
results = EXCEPTION
overall_results = worst_status(overall_results, results)
return overall_results
@defer.inlineCallbacks
def addBuildUrls(self, rclist):
brids = {}
for was_cb, results in rclist:
if isinstance(results, tuple):
results, brids = results
builderNames = {}
if was_cb: # errors were already logged in worstStatus
for builderid, br in brids.items():
builds = yield self.master.db.builds.getBuilds(buildrequestid=br)
for build in builds:
builderid = build.builderid
# When virtual builders are used, the builderid used for triggering
# is not the same as the one that the build actually got
if builderid not in builderNames:
builderDict = yield self.master.data.get(("builders", builderid))
builderNames[builderid] = builderDict["name"]
num = build.number
url = getURLForBuild(self.master, builderid, num)
yield self.addURL(
f'{statusToString(build.results)}: '
f'{builderNames[builderid]} #{num}',
url,
)
@defer.inlineCallbacks
def _add_results(self, brid):
@defer.inlineCallbacks
def _is_buildrequest_complete(brid):
buildrequest = yield self.master.db.buildrequests.getBuildRequest(brid)
return buildrequest.complete
event = ('buildrequests', str(brid), 'complete')
yield self.master.mq.waitUntilEvent(event, lambda: _is_buildrequest_complete(brid))
builds = yield self.master.db.builds.getBuilds(buildrequestid=brid)
for build in builds:
self._result_list.append(build.results)
self.updateSummary()
@defer.inlineCallbacks
def run(self):
schedulers_and_props = yield self.getSchedulersAndProperties()
schedulers_and_props_list = []
# To be back compatible we need to differ between old and new style
# schedulers_and_props can either consist of 2 elements tuple or
# dictionary
for element in schedulers_and_props:
if isinstance(element, dict):
schedulers_and_props_list = schedulers_and_props
break
# Old-style back compatibility: Convert tuple to dict and make
# it important
d = {'sched_name': element[0], 'props_to_set': element[1], 'unimportant': False}
schedulers_and_props_list.append(d)
# post process the schedulernames, and raw properties
# we do this out of the loop, as this can result in errors
schedulers_and_props = [
(
self.getSchedulerByName(entry_dict['sched_name']),
self.createTriggerProperties(entry_dict['props_to_set']),
entry_dict['unimportant'],
)
for entry_dict in schedulers_and_props_list
]
ss_for_trigger = self.prepareSourcestampListForTrigger()
dl = []
triggeredNames = []
results = SUCCESS
self.running = True
unimportant_brids = []
for sch, props_to_set, unimportant in schedulers_and_props:
idsDeferred, resultsDeferred = sch.trigger(
waited_for=self.waitForFinish,
sourcestamps=ss_for_trigger,
set_props=props_to_set,
parent_buildid=self.build.buildid,
parent_relationship=self.parent_relationship,
)
# we are not in a hurry of starting all in parallel and managing
# the deferred lists, just let the db writes be serial.
brids = {}
try:
_, brids = yield idsDeferred
except Exception as e:
yield self.addLogWithException(e)
results = EXCEPTION
if unimportant:
unimportant_brids.extend(brids.values())
self.brids.extend(brids.values())
for brid in brids.values():
# put the url to the brids, so that we can have the status from
# the beginning
url = getURLForBuildrequest(self.master, brid)
yield self.addURL(f"{sch.name} #{brid}", url)
# No yield since we let this happen as the builds complete
self._add_results(brid)
dl.append(resultsDeferred)
triggeredNames.append(sch.name)
if self.ended:
return CANCELLED
self.triggeredNames = triggeredNames
if self.waitForFinish:
self.waitForFinishDeferred = defer.DeferredList(dl, consumeErrors=1)
try:
rclist = yield self.waitForFinishDeferred
except defer.CancelledError:
pass
# we were interrupted, don't bother update status
if self.ended:
return CANCELLED
yield self.addBuildUrls(rclist)
results = yield self.worstStatus(results, rclist, unimportant_brids)
else:
# do something to handle errors
for d in dl:
d.addErrback(log.err, '(ignored) while invoking Triggerable schedulers:')
return results
def getResultSummary(self):
if self.ended:
return {'step': 'interrupted'}
return {'step': self.getCurrentSummary()['step']} if self.triggeredNames else {}
def getCurrentSummary(self):
if not self.triggeredNames:
return {'step': 'running'}
summary = ""
if self._result_list:
for status in ALL_RESULTS:
count = self._result_list.count(status)
if count:
summary = summary + (
f", {self._result_list.count(status)} {statusToString(status, count)}"
)
return {'step': f"triggered {', '.join(self.triggeredNames)}{summary}"}
| 15,004 | Python | .py | 324 | 35.141975 | 97 | 0.631165 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,858 | cmake.py | buildbot_buildbot/master/buildbot/steps/cmake.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from buildbot import config
from buildbot.interfaces import IRenderable
from buildbot.process.buildstep import BuildStep
from buildbot.process.buildstep import ShellMixin
class CMake(ShellMixin, BuildStep):
DEFAULT_CMAKE = 'cmake'
name = 'cmake'
description = ['running', 'cmake']
descriptionDone = ['cmake']
renderables = ('cmake', 'definitions', 'generator', 'options', 'path')
haltOnFailure = True
def __init__(
self,
path=None,
generator=None,
definitions=None,
options=None,
cmake=DEFAULT_CMAKE,
**kwargs,
):
self.path = path
self.generator = generator
if not (
definitions is None
or isinstance(definitions, dict)
or IRenderable.providedBy(definitions)
):
config.error('definitions must be a dictionary or implement IRenderable')
self.definitions = definitions
if not (
options is None or isinstance(options, (list, tuple)) or IRenderable.providedBy(options)
):
config.error('options must be a list, a tuple or implement IRenderable')
self.options = options
self.cmake = cmake
kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command'])
super().__init__(**kwargs)
@defer.inlineCallbacks
def run(self):
"""
run CMake
"""
command = [self.cmake]
if self.generator:
command.extend(['-G', self.generator])
if self.definitions is not None:
for item in self.definitions.items():
command.append(f'-D{item[0]}={item[1]}')
if self.options is not None:
command.extend(self.options)
if self.path:
command.append(self.path)
cmd = yield self.makeRemoteShellCommand(command=command)
yield self.runCommand(cmd)
return cmd.results()
| 2,693 | Python | .py | 70 | 31.371429 | 100 | 0.668587 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,859 | subunit.py | buildbot_buildbot/master/buildbot/steps/subunit.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import io
from unittest import TestResult
from twisted.internet import defer
from buildbot.process import buildstep
from buildbot.process import logobserver
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
from buildbot.process.results import statusToString
class SubunitLogObserver(logobserver.LogLineObserver, TestResult):
"""Observe a log that may contain subunit output.
This class extends TestResult to receive the callbacks from the subunit
parser in the most direct fashion.
"""
def __init__(self):
super().__init__()
try:
from subunit import PROGRESS_CUR
from subunit import PROGRESS_POP
from subunit import PROGRESS_PUSH
from subunit import PROGRESS_SET
from subunit import TestProtocolServer
except ImportError as e:
raise ImportError(
"subunit is not importable, but is required for SubunitLogObserver support."
) from e
self.PROGRESS_CUR = PROGRESS_CUR
self.PROGRESS_SET = PROGRESS_SET
self.PROGRESS_PUSH = PROGRESS_PUSH
self.PROGRESS_POP = PROGRESS_POP
self.warningio = io.BytesIO()
self.protocol = TestProtocolServer(self, self.warningio)
self.skips = []
self.seen_tags = set() # don't yet know what tags does in subunit
def outLineReceived(self, line):
# Impedance mismatch: subunit wants lines, observers get lines-no\n
# Note that observers get already decoded lines whereas protocol wants bytes
self.protocol.lineReceived(line.encode('utf-8') + b'\n')
def errLineReceived(self, line):
# Same note as in outLineReceived applies
self.protocol.lineReceived(line.encode('utf-8') + b'\n')
def stopTest(self, test):
super().stopTest(test)
self.step.setProgress('tests', self.testsRun)
def addSkip(self, test, detail):
if hasattr(TestResult, 'addSkip'):
super().addSkip(test, detail)
else:
self.skips.append((test, detail))
def addError(self, test, err):
super().addError(test, err)
self.issue(test, err)
def addFailure(self, test, err):
super().addFailure(test, err)
self.issue(test, err)
def issue(self, test, err):
"""An issue - failing, erroring etc test."""
self.step.setProgress('tests failed', len(self.failures) + len(self.errors))
def tags(self, new_tags, gone_tags):
"""Accumulate the seen tags."""
self.seen_tags.update(new_tags)
class SubunitShellCommand(buildstep.ShellMixin, buildstep.BuildStep):
name = 'shell'
"""A ShellCommand that sniffs subunit output.
"""
def __init__(self, failureOnNoTests=False, *args, **kwargs):
kwargs = self.setupShellMixin(kwargs)
super().__init__(*args, **kwargs)
self.failureOnNoTests = failureOnNoTests
self._observer = SubunitLogObserver()
self.addLogObserver('stdio', self._observer)
self.progressMetrics = (*self.progressMetrics, "tests", "tests failed")
@defer.inlineCallbacks
def run(self):
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
stdio_log = yield self.getLog('stdio')
yield stdio_log.finish()
problems = ""
for test, err in self._observer.errors + self._observer.failures:
problems += f"{test.id()}\n{err}"
if problems:
yield self.addCompleteLog("problems", problems)
warnings = self._observer.warningio.getvalue()
if warnings:
yield self.addCompleteLog("warnings", warnings)
failures = len(self._observer.failures)
errors = len(self._observer.errors)
total = self._observer.testsRun
if cmd.didFail():
return FAILURE
if failures + errors > 0:
return FAILURE
if not total and self.failureOnNoTests:
return FAILURE
return SUCCESS
def getResultSummary(self):
failures = len(self._observer.failures)
errors = len(self._observer.errors)
skips = len(self._observer.skips)
total = self._observer.testsRun
count = failures + errors
summary = self.name
if not count:
if total:
summary += f' {total} {total == 1 and "test" or "tests"} passed'
else:
summary += " no tests run"
else:
summary += f" Total {total} test(s)"
if failures:
summary += f' {failures} {failures == 1 and "failure" or "failures"}'
if errors:
summary += f' {errors} {errors == 1 and "error" or "errors"}'
if skips:
summary += f' {skips} {skips == 1 and "skip" or "skips"}'
# TODO: expectedFailures/unexpectedSuccesses
if self.results != SUCCESS:
summary += f' ({statusToString(self.results)})'
if self.timed_out:
summary += " (timed out)"
return {'step': summary}
| 5,851 | Python | .py | 135 | 35.037037 | 92 | 0.650581 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,860 | maxq.py | buildbot_buildbot/master/buildbot/steps/maxq.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from buildbot import config
from buildbot.process import buildstep
from buildbot.process import logobserver
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
class MaxQObserver(logobserver.LogLineObserver):
def __init__(self):
super().__init__()
self.failures = 0
def outLineReceived(self, line):
if line.startswith('TEST FAILURE:'):
self.failures += 1
class MaxQ(buildstep.ShellMixin, buildstep.BuildStep):
flunkOnFailure = True
name = "maxq"
binary = 'run_maxq.py'
failures = 0
def __init__(self, testdir=None, **kwargs):
if not testdir:
config.error("please pass testdir")
self.testdir = testdir
kwargs = self.setupShellMixin(kwargs)
super().__init__(**kwargs)
self.observer = MaxQObserver()
self.addLogObserver('stdio', self.observer)
@defer.inlineCallbacks
def run(self):
command = [self.binary]
command.append(self.testdir)
cmd = yield self.makeRemoteShellCommand(command=command)
yield self.runCommand(cmd)
stdio_log = yield self.getLog('stdio')
yield stdio_log.finish()
self.failures = self.observer.failures
# treat a nonzero exit status as a failure, if no other failures are
# detected
if not self.failures and cmd.didFail():
self.failures = 1
if self.failures:
return FAILURE
return SUCCESS
def getResultSummary(self):
if self.failures:
return {'step': f"{self.failures} maxq failures"}
return {'step': 'success'}
| 2,407 | Python | .py | 60 | 34.1 | 79 | 0.699142 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,861 | configurable.py | buildbot_buildbot/master/buildbot/steps/configurable.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import re
import traceback
import warnings
import yaml
from evalidate import Expr
from evalidate import base_eval_model
from twisted.internet import defer
from buildbot.db.model import Model
from buildbot.plugins import util
from buildbot.plugins.db import get_plugins
from buildbot.process import buildstep
from buildbot.process.properties import Properties
from buildbot.process.properties import renderer
from buildbot.process.results import SUCCESS
from buildbot.steps.shell import ShellCommand
from buildbot.steps.trigger import Trigger
from buildbot.steps.worker import CompositeStepMixin
class BuildbotCiYmlInvalid(Exception):
pass
_env_string_key_re = re.compile(r'^\s*(\w+)=')
_env_string_value_re = re.compile(r'''(?:"((?:\\.|[^"])*?)"|'([^']*?)'|(\S*))''')
def parse_env_string(env_str, parent_env=None):
env_str = env_str.strip()
orig_env_str = env_str
props = {}
if parent_env:
props.update(parent_env)
if not env_str:
return props
while env_str:
m = _env_string_key_re.match(env_str)
if m is None:
raise ValueError(f'Could not parse \'{orig_env_str}\': splitting \'{env_str}\' failed')
k = m.group(1)
env_str = env_str[m.end() :]
m = _env_string_value_re.match(env_str)
if m is None:
raise ValueError(f'Could not parse \'{orig_env_str}\': splitting \'{env_str}\' failed')
env_str = env_str[m.end() :]
v = m.group(1) or m.group(2) or m.group(3) or ''
props[k] = v
return props
def interpolate_constructor(loader, node):
value = loader.construct_scalar(node)
return util.Interpolate(value)
class BuildbotCiLoader(yaml.SafeLoader):
constructors_loaded = False
@classmethod
def ensure_constructors_loaded(cls):
if cls.constructors_loaded:
return
cls.load_constructors()
@classmethod
def load_constructors(cls):
cls.add_constructor('!Interpolate', interpolate_constructor)
cls.add_constructor('!i', interpolate_constructor)
steps = get_plugins('steps', None, load_now=True)
for step_name in steps.names:
# Accessing a step from the plugin DB may raise warrings (e.g. deprecation).
# We don't want them logged until the step is actually used.
with warnings.catch_warnings(record=True) as all_warnings:
warnings.simplefilter("always")
step_class = steps.get(step_name)
step_warnings = list(all_warnings)
cls.register_step_class(step_name, step_class, step_warnings)
@classmethod
def register_step_class(cls, name, step_class, step_warnings):
def step_constructor(loader, node):
try:
if isinstance(node, yaml.ScalarNode):
args = [loader.construct_scalar(node)]
kwargs = {}
elif isinstance(node, yaml.SequenceNode):
args = loader.construct_sequence(node)
kwargs = {}
elif isinstance(node, yaml.MappingNode):
args = []
kwargs = loader.construct_mapping(node)
else:
raise Exception('Unsupported node type')
except Exception as e:
raise Exception(f"Could not parse steps arguments: {e}") from e
# Re-raise all warnings that occurred when accessing step class. We only want them to be
# logged if the configuration actually uses the step class.
for w in step_warnings:
warnings.warn_explicit(w.message, w.category, w.filename, w.lineno)
return step_class(*args, **kwargs)
cls.add_constructor('!' + name, step_constructor)
class BuildbotCiYml:
SCRIPTS = (
"before_install",
"install",
"after_install",
"before_script",
"script",
"after_script",
)
steps_loaded = False
def __init__(self):
self.config_dict = None
self.label_mapping = {}
self.global_env = {}
self.script_commands = {}
for script in self.SCRIPTS:
self.script_commands[script] = []
self.matrix = []
@classmethod
def load_from_str(cls, config_input):
BuildbotCiLoader.ensure_constructors_loaded()
try:
config_dict = yaml.load(config_input, Loader=BuildbotCiLoader)
except Exception as e:
raise BuildbotCiYmlInvalid(f"Invalid YAML data\n{e}") from e
return cls.load_from_dict(config_dict)
@classmethod
def load_from_dict(cls, config):
yml = cls()
yml.load_from_dict_internal(config)
return yml
def load_from_dict_internal(self, config):
self.config = config
self.label_mapping = self.config.get('label_mapping', {})
self.global_env = BuildbotCiYml.load_global_env(config)
self.script_commands = BuildbotCiYml.load_scripts(config)
self.matrix = BuildbotCiYml.load_matrix(config, self.global_env)
@classmethod
def load_global_env(cls, config):
env = config.get("env", None)
if env is None:
return {}
if isinstance(env, list):
return {}
if isinstance(env, dict):
env = env.get('global')
if isinstance(env, str):
return parse_env_string(env)
if isinstance(env, list):
global_env = {}
for e in env:
global_env.update(parse_env_string(e))
return global_env
raise BuildbotCiYmlInvalid("'env.global' configuration parameter is invalid")
raise BuildbotCiYmlInvalid("'env' parameter is invalid")
@classmethod
def load_scripts(cls, config):
script_commands = {}
for script in cls.SCRIPTS:
commands = config.get(script, [])
if isinstance(commands, str):
commands = [commands]
if not isinstance(commands, list):
raise BuildbotCiYmlInvalid(f"'{script}' parameter is invalid")
script_commands[script] = commands
return script_commands
@staticmethod
def adjust_matrix_config(c, global_env):
c = c.copy()
c['env'] = parse_env_string(c.get('env', ''), global_env)
return c
@classmethod
def load_matrix(cls, config, global_env):
return [
cls.adjust_matrix_config(matrix_config, global_env)
for matrix_config in config.get('matrix', {}).get('include') or []
]
class BuildbotTestCiReadConfigMixin:
config_filenames = ['.bbtravis.yml', '.buildbot-ci.yml']
config = None
@defer.inlineCallbacks
def get_config_yml_from_worker(self):
exceptions = []
for filename in self.config_filenames:
try:
config_yml = yield self.getFileContentFromWorker(filename, abandonOnFailure=True)
return filename, config_yml
except buildstep.BuildStepFailed as e:
exceptions.append(e)
return None, exceptions
@defer.inlineCallbacks
def get_ci_config(self):
filename, result = yield self.get_config_yml_from_worker()
if not filename:
exceptions = result
msg = ' '.join(str(exceptions))
self.descriptionDone = "failed to read configuration"
self.addCompleteLog(
'error',
f'Failed to read configuration from files {self.config_filenames}: got {msg}',
)
raise buildstep.BuildStepFailed("failed to read configuration")
config_yml = result
self.addCompleteLog(filename, config_yml)
try:
config = BuildbotCiYml.load_from_str(config_yml)
except BuildbotCiYmlInvalid as e:
self.descriptionDone = f'bad configuration file {filename}'
self.addCompleteLog('error', f'Bad configuration file:\n{e}')
raise buildstep.BuildStepFailed(f'bad configuration file {filename}') from e
return config
class BuildbotTestCiTrigger(BuildbotTestCiReadConfigMixin, CompositeStepMixin, Trigger):
def __init__(self, scheduler, **kwargs):
super().__init__(
name='buildbot-test-ci trigger',
waitForFinish=True,
schedulerNames=[scheduler],
haltOnFailure=True,
flunkOnFailure=True,
sourceStamps=[],
alwaysUseLatest=False,
updateSourceStamp=False,
**kwargs,
)
@defer.inlineCallbacks
def run(self):
self.config = yield self.get_ci_config()
rv = yield super().run()
return rv
def _replace_label(self, v):
return str(self.config.label_mapping.get(v, v))
def build_scheduler_for_env(self, scheduler_name, env):
new_build_props = Properties()
new_build_props.setProperty(
"BUILDBOT_PULL_REQUEST", self.getProperty("BUILDBOT_PULL_REQUEST"), "inherit"
)
for k, v in env.items():
if k == "env":
# v is dictionary
new_build_props.update(v, "BuildbotTestCiTrigger")
else:
new_build_props.setProperty(k, v, "BuildbotTestCiTrigger")
tags_from_props = sorted(
f'{self._replace_label(k)}:{self._replace_label(v)}'
for k, (v, _) in new_build_props.asDict().items()
if k not in self.config.global_env.keys() and k != 'BUILDBOT_PULL_REQUEST'
)
tags = [t for t in self.build.builder.config.tags if t not in ('try', 'spawner')]
new_build_props.setProperty(
"virtual_builder_name", " ".join(tags + tags_from_props), "BuildbotTestCiTrigger"
)
new_build_props.setProperty(
"virtual_builder_tags", tags + tags_from_props, "BuildbotTestCiTrigger"
)
new_build_props.setProperty(
"matrix_label", "/".join(tags_from_props), "BuildbotTestCiTrigger"
)
return (scheduler_name, new_build_props)
def createTriggerProperties(self, props):
return props
def getSchedulersAndProperties(self):
scheduler_name = self.schedulerNames[0]
return [self.build_scheduler_for_env(scheduler_name, env) for env in self.config.matrix]
eval_model = base_eval_model.clone()
eval_model.nodes.append('Mul')
eval_model.nodes.append('Slice')
eval_model.nodes.append('Tuple')
def evaluate_condition(condition, local_dict):
expr = Expr(condition, eval_model)
return bool(expr.eval(local_dict))
class BuildbotCiSetupSteps(BuildbotTestCiReadConfigMixin, CompositeStepMixin, buildstep.BuildStep):
name = "setup-steps"
haltOnFailure = True
flunkOnFailure = True
MAX_NAME_LENGTH = 47
disable = False
def _add_step(self, command):
name = None
condition = None
step = None
original_command = command
if isinstance(command, dict):
name = command.get("title")
condition = command.get("condition")
step = command.get("step")
command = command.get("cmd")
if isinstance(command, buildstep.BuildStep):
step = command
if condition is not None:
try:
local_dict = {k: v for k, (v, s) in self.build.getProperties().properties.items()}
if not evaluate_condition(condition, local_dict):
return
except Exception:
self.descriptionDone = "Problem parsing condition"
self.addCompleteLog("condition error", traceback.format_exc())
return
if step is None:
if command is None:
self.addCompleteLog(
"BuildbotCiSetupSteps error",
f"Neither step nor cmd is defined: {original_command}",
)
return
if name is None:
name = self._name_from_command(command)
@renderer
def render_env(props):
return {str(k): str(v[0]) for k, v in props.properties.items()}
step = ShellCommand(
name=name,
description=command,
command=command,
doStepIf=not self.disable,
env=render_env,
)
self.build.addStepsAfterLastStep([step])
def _name_from_command(self, name):
name = name.lstrip("#").lstrip(" ").split("\n")[0]
max_length = Model.steps.c.name.type.length
if len(name) > max_length:
name = name[: max_length - 3] + "..."
return name
@defer.inlineCallbacks
def run(self):
config = yield self.get_ci_config()
for k in BuildbotCiYml.SCRIPTS:
for command in config.script_commands[k]:
self._add_step(command=command)
return SUCCESS
| 13,781 | Python | .py | 332 | 31.85241 | 100 | 0.618763 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,862 | cppcheck.py | buildbot_buildbot/master/buildbot/steps/cppcheck.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import re
from twisted.internet import defer
from buildbot.process import logobserver
from buildbot.process.buildstep import BuildStep
from buildbot.process.buildstep import ShellMixin
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
class Cppcheck(ShellMixin, BuildStep):
# Highly inspired from the Pylint step.
name = "cppcheck"
description = ["running", "cppcheck"]
descriptionDone = ["cppcheck"]
flunkingIssues = ('error',)
MESSAGES = ('error', 'warning', 'style', 'performance', 'portability', 'information')
renderables = ('binary', 'source', 'extra_args')
def __init__(self, *args, **kwargs):
for name, default in [
('binary', 'cppcheck'),
('source', ['.']),
('enable', []),
('inconclusive', False),
('extra_args', []),
]:
setattr(self, name, kwargs.pop(name, default))
kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command'])
super().__init__(*args, **kwargs)
self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self._log_consumer))
self.counts = {}
summaries = self.summaries = {}
for m in self.MESSAGES:
self.counts[m] = 0
summaries[m] = []
def _log_consumer(self):
line_re = re.compile(rf"(?:\[.+\]: )?\((?P<severity>{'|'.join(self.MESSAGES)})\) .+")
while True:
_, line = yield
m = line_re.match(line)
if m is not None:
msgsev = m.group('severity')
self.summaries[msgsev].append(line)
self.counts[msgsev] += 1
@defer.inlineCallbacks
def run(self):
command = [self.binary]
command.extend(self.source)
if self.enable:
command.append(f"--enable={','.join(self.enable)}")
if self.inconclusive:
command.append('--inconclusive')
command.extend(self.extra_args)
cmd = yield self.makeRemoteShellCommand(command=command)
yield self.runCommand(cmd)
stdio_log = yield self.getLog('stdio')
yield stdio_log.finish()
self.descriptionDone = self.descriptionDone[:]
for msg in self.MESSAGES:
self.setProperty(f'cppcheck-{msg}', self.counts[msg], 'Cppcheck')
if not self.counts[msg]:
continue
self.descriptionDone.append(f"{msg}={self.counts[msg]}")
yield self.addCompleteLog(msg, '\n'.join(self.summaries[msg]))
self.setProperty('cppcheck-total', sum(self.counts.values()), 'Cppcheck')
yield self.updateSummary()
if cmd.results() != SUCCESS:
return cmd.results()
for msg in self.flunkingIssues:
if self.counts[msg] != 0:
return FAILURE
if sum(self.counts.values()) > 0:
return WARNINGS
return SUCCESS
| 3,710 | Python | .py | 86 | 35.151163 | 93 | 0.639745 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,863 | shell.py | buildbot_buildbot/master/buildbot/steps/shell.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
import re
from twisted.internet import defer
from buildbot import config
from buildbot.process import buildstep
from buildbot.process import logobserver
# for existing configurations that import WithProperties from here. We like
# to move this class around just to keep our readers guessing.
from buildbot.process.properties import WithProperties
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.process.results import statusToString
from buildbot.process.results import worst_status
from buildbot.steps.worker import CompositeStepMixin
from buildbot.util import join_list
_hush_pyflakes = [
WithProperties,
]
del _hush_pyflakes
class TreeSize(buildstep.ShellMixin, buildstep.BuildStep):
name = "treesize"
command = ["du", "-s", "-k", "."]
description = ["measuring", "tree", "size"]
def __init__(self, **kwargs):
kwargs = self.setupShellMixin(kwargs)
super().__init__(**kwargs)
self.observer = logobserver.BufferLogObserver(wantStdout=True, wantStderr=True)
self.addLogObserver('stdio', self.observer)
@defer.inlineCallbacks
def run(self):
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
stdio_log = yield self.getLog('stdio')
yield stdio_log.finish()
out = self.observer.getStdout()
m = re.search(r'^(\d+)', out)
kib = None
if m:
kib = int(m.group(1))
self.setProperty("tree-size-KiB", kib, "treesize")
self.descriptionDone = f"treesize {kib} KiB"
else:
self.descriptionDone = "treesize unknown"
if cmd.didFail():
return FAILURE
if kib is None:
return WARNINGS # not sure how 'du' could fail, but whatever
return SUCCESS
class SetPropertyFromCommand(buildstep.ShellMixin, buildstep.BuildStep):
name = "setproperty"
renderables = ['property']
def __init__(
self,
property=None,
extract_fn=None,
strip=True,
includeStdout=True,
includeStderr=False,
**kwargs,
):
kwargs = self.setupShellMixin(kwargs)
self.property = property
self.extract_fn = extract_fn
self.strip = strip
self.includeStdout = includeStdout
self.includeStderr = includeStderr
if not (property is not None) ^ (extract_fn is not None):
config.error("Exactly one of property and extract_fn must be set")
super().__init__(**kwargs)
if self.extract_fn:
self.includeStderr = True
self.observer = logobserver.BufferLogObserver(
wantStdout=self.includeStdout, wantStderr=self.includeStderr
)
self.addLogObserver('stdio', self.observer)
@defer.inlineCallbacks
def run(self):
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
stdio_log = yield self.getLog('stdio')
yield stdio_log.finish()
property_changes = {}
if self.property:
if cmd.didFail():
return FAILURE
result = self.observer.getStdout()
if self.strip:
result = result.strip()
propname = self.property
self.setProperty(propname, result, "SetPropertyFromCommand Step")
property_changes[propname] = result
else:
new_props = self.extract_fn(
cmd.rc, self.observer.getStdout(), self.observer.getStderr()
)
for k, v in new_props.items():
self.setProperty(k, v, "SetPropertyFromCommand Step")
property_changes = new_props
props_set = [f"{k}: {v!r}" for k, v in sorted(property_changes.items())]
yield self.addCompleteLog('property changes', "\n".join(props_set))
if len(property_changes) > 1:
self.descriptionDone = f'{len(property_changes)} properties set'
elif len(property_changes) == 1:
self.descriptionDone = f'property \'{next(iter(property_changes))}\' set'
if cmd.didFail():
return FAILURE
return SUCCESS
class ShellCommand(buildstep.ShellMixin, buildstep.BuildStep):
name = 'shell'
def __init__(self, **kwargs):
if self.is_exact_step_class(ShellCommand):
if 'command' not in kwargs:
config.error("ShellCommand's `command' argument is not specified")
# check validity of arguments being passed to RemoteShellCommand
valid_rsc_args = [
"command",
"env",
"want_stdout",
"want_stderr",
"timeout",
"maxTime",
"max_lines",
"sigtermTime",
"logfiles",
"lazylogfiles",
"usePTY",
"logEnviron",
"collectStdout",
"collectStderr",
"interruptSignal",
"initialStdin",
"decodeRC",
"stdioLogName",
"workdir",
*buildstep.BuildStep._params_names,
]
invalid_args = []
for arg in kwargs:
if arg not in valid_rsc_args:
invalid_args.append(arg)
if invalid_args:
config.error(
"Invalid argument(s) passed to ShellCommand: " + ', '.join(invalid_args)
)
kwargs = self.setupShellMixin(kwargs)
super().__init__(**kwargs)
@defer.inlineCallbacks
def run(self):
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
return cmd.results()
class Configure(ShellCommand):
name = "configure"
haltOnFailure = True
flunkOnFailure = True
description = "configuring"
descriptionDone = "configure"
command = ["./configure"]
class WarningCountingShellCommand(buildstep.ShellMixin, CompositeStepMixin, buildstep.BuildStep):
renderables = [
'suppressionFile',
'suppressionList',
'warningPattern',
'directoryEnterPattern',
'directoryLeavePattern',
'maxWarnCount',
]
warnCount = 0
warningPattern = '(?i).*warning[: ].*'
# The defaults work for GNU Make.
directoryEnterPattern = "make.*: Entering directory [\u2019\"`'](.*)[\u2019'`\"]"
directoryLeavePattern = "make.*: Leaving directory"
suppressionFile: str | None = None
commentEmptyLineRe = re.compile(r"^\s*(#.*)?$")
suppressionLineRe = re.compile(r"^\s*(.+?)\s*:\s*(.+?)\s*(?:[:]\s*([0-9]+)(?:-([0-9]+))?\s*)?$")
class Sentinel:
pass
_sentinel = Sentinel()
def __init__(
self,
warningPattern=_sentinel,
warningExtractor=None,
maxWarnCount=None,
directoryEnterPattern=None,
directoryLeavePattern=None,
suppressionFile=None,
suppressionList=None,
**kwargs,
):
# See if we've been given a regular expression to use to match
# warnings. If not, use a default that assumes any line with "warning"
# present is a warning. This may lead to false positives in some cases.
if not isinstance(warningPattern, self.Sentinel):
self.warningPattern = warningPattern
if directoryEnterPattern:
self.directoryEnterPattern = directoryEnterPattern
if directoryLeavePattern:
self.directoryLeavePattern = directoryLeavePattern
if suppressionFile:
self.suppressionFile = suppressionFile
# self.suppressions is already taken, so use something else
self.suppressionList = suppressionList
if warningExtractor:
self.warningExtractor = warningExtractor
else:
self.warningExtractor = WarningCountingShellCommand.warnExtractWholeLine
self.maxWarnCount = maxWarnCount
if self.is_exact_step_class(WarningCountingShellCommand) and not kwargs.get('command'):
# WarningCountingShellCommand class is directly instantiated.
# Explicitly check that command is set to prevent runtime error
# later.
config.error("WarningCountingShellCommand's 'command' argument is not specified")
kwargs = self.setupShellMixin(kwargs)
super().__init__(**kwargs)
self.suppressions = []
self.directoryStack = []
self.warnCount = 0
self.loggedWarnings = []
if self.warningPattern is not None:
self.addLogObserver(
'stdio', logobserver.LineConsumerLogObserver(self.warningLogConsumer)
)
def addSuppression(self, suppressionList):
"""
This method can be used to add patters of warnings that should
not be counted.
It takes a single argument, a list of patterns.
Each pattern is a 4-tuple (FILE-RE, WARN-RE, START, END).
FILE-RE is a regular expression (string or compiled regexp), or None.
If None, the pattern matches all files, else only files matching the
regexp. If directoryEnterPattern is specified in the class constructor,
matching is against the full path name, eg. src/main.c.
WARN-RE is similarly a regular expression matched against the
text of the warning, or None to match all warnings.
START and END form an inclusive line number range to match against. If
START is None, there is no lower bound, similarly if END is none there
is no upper bound."""
for fileRe, warnRe, start, end in suppressionList:
if fileRe is not None and isinstance(fileRe, str):
fileRe = re.compile(fileRe)
if warnRe is not None and isinstance(warnRe, str):
warnRe = re.compile(warnRe)
self.suppressions.append((fileRe, warnRe, start, end))
def warnExtractWholeLine(self, line, match):
"""
Extract warning text as the whole line.
No file names or line numbers."""
return (None, None, line)
def warnExtractFromRegexpGroups(self, line, match):
"""
Extract file name, line number, and warning text as groups (1,2,3)
of warningPattern match."""
file = match.group(1)
lineNo = match.group(2)
if lineNo is not None:
lineNo = int(lineNo)
text = match.group(3)
return (file, lineNo, text)
def warningLogConsumer(self):
# Now compile a regular expression from whichever warning pattern we're
# using
wre = self.warningPattern
if isinstance(wre, str):
wre = re.compile(wre)
directoryEnterRe = self.directoryEnterPattern
if directoryEnterRe is not None and isinstance(directoryEnterRe, str):
directoryEnterRe = re.compile(directoryEnterRe)
directoryLeaveRe = self.directoryLeavePattern
if directoryLeaveRe is not None and isinstance(directoryLeaveRe, str):
directoryLeaveRe = re.compile(directoryLeaveRe)
# Check if each line in the output from this command matched our
# warnings regular expressions. If did, bump the warnings count and
# add the line to the collection of lines with warnings
self.loggedWarnings = []
while True:
_, line = yield
if directoryEnterRe:
match = directoryEnterRe.search(line)
if match:
self.directoryStack.append(match.group(1))
continue
if directoryLeaveRe and self.directoryStack and directoryLeaveRe.search(line):
self.directoryStack.pop()
continue
match = wre.match(line)
if match:
self.maybeAddWarning(self.loggedWarnings, line, match)
def maybeAddWarning(self, warnings, line, match):
if self.suppressions:
(file, lineNo, text) = self.warningExtractor(self, line, match)
lineNo = lineNo and int(lineNo)
if file is not None and file != "" and self.directoryStack:
currentDirectory = '/'.join(self.directoryStack)
if currentDirectory is not None and currentDirectory != "":
file = f"{currentDirectory}/{file}"
# Skip adding the warning if any suppression matches.
for fileRe, warnRe, start, end in self.suppressions:
if not (file is None or fileRe is None or fileRe.match(file)):
continue
if not (warnRe is None or warnRe.search(text)):
continue
if (start is not None and end is not None) and not (
lineNo is not None and start <= lineNo <= end
):
continue
return
warnings.append(line)
self.warnCount += 1
@defer.inlineCallbacks
def setup_suppression(self):
if self.suppressionList is not None:
self.addSuppression(self.suppressionList)
if self.suppressionFile is not None:
data = yield self.getFileContentFromWorker(self.suppressionFile, abandonOnFailure=True)
lines = data.split("\n")
list = []
for line in lines:
if self.commentEmptyLineRe.match(line):
continue
match = self.suppressionLineRe.match(line)
if match:
file, test, start, end = match.groups()
if end is not None:
end = int(end)
if start is not None:
start = int(start)
if end is None:
end = start
list.append((file, test, start, end))
self.addSuppression(list)
@defer.inlineCallbacks
def run(self):
yield self.setup_suppression()
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
yield self.finish_logs()
yield self.createSummary()
return self.evaluateCommand(cmd)
@defer.inlineCallbacks
def finish_logs(self):
stdio_log = yield self.getLog('stdio')
yield stdio_log.finish()
@defer.inlineCallbacks
def createSummary(self):
"""
Match log lines against warningPattern.
Warnings are collected into another log for this step, and the
build-wide 'warnings-count' is updated."""
# If there were any warnings, make the log if lines with warnings
# available
if self.warnCount:
yield self.addCompleteLog(
f"warnings ({self.warnCount})", "\n".join(self.loggedWarnings) + "\n"
)
warnings_stat = self.getStatistic('warnings', 0)
self.setStatistic('warnings', warnings_stat + self.warnCount)
old_count = self.getProperty("warnings-count", 0)
self.setProperty(
"warnings-count", old_count + self.warnCount, "WarningCountingShellCommand"
)
def evaluateCommand(self, cmd):
result = cmd.results()
if self.maxWarnCount is not None and self.warnCount > self.maxWarnCount:
result = worst_status(result, FAILURE)
elif self.warnCount:
result = worst_status(result, WARNINGS)
return result
class Compile(WarningCountingShellCommand):
name = "compile"
haltOnFailure = True
flunkOnFailure = True
description = ["compiling"]
descriptionDone = ["compile"]
command = ["make", "all"]
class Test(WarningCountingShellCommand):
name = "test"
warnOnFailure = True
description = ["testing"]
descriptionDone = ["test"]
command = ["make", "test"]
def setTestResults(self, total=0, failed=0, passed=0, warnings=0):
"""
Called by subclasses to set the relevant statistics; this actually
adds to any statistics already present
"""
total += self.getStatistic('tests-total', 0)
self.setStatistic('tests-total', total)
failed += self.getStatistic('tests-failed', 0)
self.setStatistic('tests-failed', failed)
warnings += self.getStatistic('tests-warnings', 0)
self.setStatistic('tests-warnings', warnings)
passed += self.getStatistic('tests-passed', 0)
self.setStatistic('tests-passed', passed)
def getResultSummary(self):
description = []
if self.hasStatistic('tests-total'):
total = self.getStatistic("tests-total", 0)
failed = self.getStatistic("tests-failed", 0)
passed = self.getStatistic("tests-passed", 0)
warnings = self.getStatistic("tests-warnings", 0)
if not total:
total = failed + passed + warnings
if total:
description += [str(total), 'tests']
if passed:
description += [str(passed), 'passed']
if warnings:
description += [str(warnings), 'warnings']
if failed:
description += [str(failed), 'failed']
if description:
summary = join_list(description)
if self.results != SUCCESS:
summary += f' ({statusToString(self.results)})'
if self.timed_out:
summary += " (timed out)"
return {'step': summary}
return super().getResultSummary()
class PerlModuleTestObserver(logobserver.LogLineObserver):
def __init__(self, warningPattern):
super().__init__()
if warningPattern:
self.warningPattern = re.compile(warningPattern)
else:
self.warningPattern = None
self.rc = SUCCESS
self.total = 0
self.failed = 0
self.warnings = 0
self.newStyle = False
self.complete = False
failedRe = re.compile(r"Tests: \d+ Failed: (\d+)\)")
testsRe = re.compile(r"Files=\d+, Tests=(\d+)")
oldFailureCountsRe = re.compile(r"(\d+)/(\d+) subtests failed")
oldSuccessCountsRe = re.compile(r"Files=\d+, Tests=(\d+),")
def outLineReceived(self, line):
if self.warningPattern.match(line):
self.warnings += 1
if self.newStyle:
if line.startswith('Result: FAIL'):
self.rc = FAILURE
mo = self.failedRe.search(line)
if mo:
self.failed += int(mo.group(1))
if self.failed:
self.rc = FAILURE
mo = self.testsRe.search(line)
if mo:
self.total = int(mo.group(1))
else:
if line.startswith('Test Summary Report'):
self.newStyle = True
mo = self.oldFailureCountsRe.search(line)
if mo:
self.failed = int(mo.group(1))
self.total = int(mo.group(2))
self.rc = FAILURE
mo = self.oldSuccessCountsRe.search(line)
if mo:
self.total = int(mo.group(1))
class PerlModuleTest(Test):
command = ["prove", "--lib", "lib", "-r", "t"]
total = 0
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.observer = PerlModuleTestObserver(warningPattern=self.warningPattern)
self.addLogObserver('stdio', self.observer)
def evaluateCommand(self, cmd):
if self.observer.total:
passed = self.observer.total - self.observer.failed
self.setTestResults(
total=self.observer.total,
failed=self.observer.failed,
passed=passed,
warnings=self.observer.warnings,
)
rc = self.observer.rc
if rc == SUCCESS and self.observer.warnings:
rc = WARNINGS
return rc
| 20,873 | Python | .py | 495 | 31.779798 | 100 | 0.611344 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,864 | shellsequence.py | buildbot_buildbot/master/buildbot/steps/shellsequence.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import copy
from twisted.internet import defer
from twisted.python import log
from buildbot import config
from buildbot.process import buildstep
from buildbot.process import results
class ShellArg(results.ResultComputingConfigMixin):
publicAttributes = [*results.ResultComputingConfigMixin.resultConfig, "command", "logname"]
def __init__(self, command=None, logname=None, **kwargs):
name = self.__class__.__name__
if command is None:
config.error(f"the 'command' parameter of {name} must not be None")
self.command = command
self.logname = logname
for k, v in kwargs.items():
if k not in self.resultConfig:
config.error(f"the parameter '{k}' is not handled by ShellArg")
setattr(self, k, v)
# we don't validate anything yet as we can have renderables.
def validateAttributes(self):
# only make the check if we have a list
if not isinstance(self.command, (str, list)):
config.error(f"{self.command} is an invalid command, it must be a string or a list")
if isinstance(self.command, list):
if not all(isinstance(x, str) for x in self.command):
config.error(f"{self.command} must only have strings in it")
runConfParams = [(p_attr, getattr(self, p_attr)) for p_attr in self.resultConfig]
not_bool = [
(p_attr, p_val) for (p_attr, p_val) in runConfParams if not isinstance(p_val, bool)
]
if not_bool:
config.error(f"{not_bool!r} must be booleans")
@defer.inlineCallbacks
def getRenderingFor(self, build):
rv = copy.copy(self)
for p_attr in self.publicAttributes:
res = yield build.render(getattr(self, p_attr))
setattr(rv, p_attr, res)
return rv
class ShellSequence(buildstep.ShellMixin, buildstep.BuildStep):
last_command = None
renderables = ['commands']
def __init__(self, commands=None, **kwargs):
self.commands = commands
kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command'])
super().__init__(**kwargs)
def shouldRunTheCommand(self, cmd):
return bool(cmd)
def getFinalState(self):
return self.describe(True)
@defer.inlineCallbacks
def runShellSequence(self, commands):
terminate = False
if commands is None:
log.msg("After rendering, ShellSequence `commands` is None")
return results.EXCEPTION
overall_result = results.SUCCESS
for arg in commands:
if not isinstance(arg, ShellArg):
log.msg(
"After rendering, ShellSequence `commands` list "
"contains something that is not a ShellArg"
)
return results.EXCEPTION
try:
arg.validateAttributes()
except config.ConfigErrors as e:
log.msg(f"After rendering, ShellSequence `commands` is invalid: {e}")
return results.EXCEPTION
# handle the command from the arg
command = arg.command
if not self.shouldRunTheCommand(command):
continue
# keep the command around so we can describe it
self.last_command = command
cmd = yield self.makeRemoteShellCommand(command=command, stdioLogName=arg.logname)
yield self.runCommand(cmd)
overall_result, terminate = results.computeResultAndTermination(
arg, cmd.results(), overall_result
)
if terminate:
break
return overall_result
def run(self):
return self.runShellSequence(self.commands)
| 4,475 | Python | .py | 99 | 36.20202 | 96 | 0.653122 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,865 | vstudio.py | buildbot_buildbot/master/buildbot/steps/vstudio.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
# Visual studio steps
from __future__ import annotations
import re
from twisted.internet import defer
from buildbot import config
from buildbot.process import buildstep
from buildbot.process import results
from buildbot.process.logobserver import LogLineObserver
class MSLogLineObserver(LogLineObserver):
stdoutDelimiter = "\r\n"
stderrDelimiter = "\r\n"
_re_delimiter = re.compile(r'^(\d+>)?-{5}.+-{5}$')
_re_file = re.compile(r'^(\d+>)?[^ ]+\.(cpp|c)$')
_re_warning = re.compile(r' ?: warning [A-Z]+[0-9]+:')
_re_error = re.compile(r' ?error ([A-Z]+[0-9]+)?\s?: ')
nbFiles = 0
nbProjects = 0
nbWarnings = 0
nbErrors = 0
logwarnings = None
logerrors = None
def __init__(self, logwarnings, logerrors, **kwargs):
super().__init__(**kwargs)
self.logwarnings = logwarnings
self.logerrors = logerrors
def outLineReceived(self, line):
if self._re_delimiter.search(line):
self.nbProjects += 1
self.logwarnings.addStdout(f"{line}\n")
self.logerrors.addStdout(f"{line}\n")
self.step.setProgress('projects', self.nbProjects)
elif self._re_file.search(line):
self.nbFiles += 1
self.step.setProgress('files', self.nbFiles)
elif self._re_warning.search(line):
self.nbWarnings += 1
self.logwarnings.addStdout(f"{line}\n")
self.step.setProgress('warnings', self.nbWarnings)
elif self._re_error.search(f"{line}\n"):
# error has no progress indication
self.nbErrors += 1
self.logerrors.addStderr(f"{line}\n")
class VisualStudio(buildstep.ShellMixin, buildstep.BuildStep):
# an *abstract* base class, which will not itself work as a buildstep
name = "compile"
description = "compiling"
descriptionDone = "compile"
progressMetrics = (*buildstep.BuildStep.progressMetrics, "projects", "files", "warnings")
logobserver = None
installdir: str | None = None
default_installdir: str | None = None
# One of build, clean or rebuild
mode = "rebuild"
projectfile = None
config = None
useenv = False
project = None
PATH: list[str] = []
INCLUDE: list[str] = []
LIB: list[str] = []
renderables = ['projectfile', 'config', 'project', 'mode']
def __init__(
self,
installdir=None,
mode="rebuild",
projectfile=None,
config='release',
useenv=False,
project=None,
INCLUDE=None,
LIB=None,
PATH=None,
**kwargs,
):
if INCLUDE is None:
INCLUDE = []
if LIB is None:
LIB = []
if PATH is None:
PATH = []
self.installdir = installdir
self.mode = mode
self.projectfile = projectfile
self.config = config
self.useenv = useenv
self.project = project
if INCLUDE:
self.INCLUDE = INCLUDE
self.useenv = True
if LIB:
self.LIB = LIB
self.useenv = True
if PATH:
self.PATH = PATH
kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command'])
super().__init__(**kwargs)
def add_env_path(self, name, value):
"""concat a path for this name"""
try:
oldval = self.env[name]
if not oldval.endswith(';'):
oldval = oldval + ';'
except KeyError:
oldval = ""
if not value.endswith(';'):
value = value + ';'
self.env[name] = oldval + value
@defer.inlineCallbacks
def setup_log_files(self):
logwarnings = yield self.addLog("warnings")
logerrors = yield self.addLog("errors")
self.logobserver = MSLogLineObserver(logwarnings, logerrors)
yield self.addLogObserver('stdio', self.logobserver)
def setupEnvironment(self):
if self.env is None:
self.env = {}
# setup the custom one, those one goes first
for path in self.PATH:
self.add_env_path("PATH", path)
for path in self.INCLUDE:
self.add_env_path("INCLUDE", path)
for path in self.LIB:
self.add_env_path("LIB", path)
if not self.installdir:
self.installdir = self.default_installdir
def evaluate_result(self, cmd):
self.setStatistic('projects', self.logobserver.nbProjects)
self.setStatistic('files', self.logobserver.nbFiles)
self.setStatistic('warnings', self.logobserver.nbWarnings)
self.setStatistic('errors', self.logobserver.nbErrors)
if cmd.didFail():
return results.FAILURE
if self.logobserver.nbErrors > 0:
return results.FAILURE
if self.logobserver.nbWarnings > 0:
return results.WARNINGS
return results.SUCCESS
@defer.inlineCallbacks
def run(self):
self.setupEnvironment()
yield self.setup_log_files()
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
yield self.finish_logs()
self.results = self.evaluate_result(cmd)
return self.results
def getResultSummary(self):
if self.logobserver is None:
# step was skipped or log observer was not created due to another reason
return {"step": results.statusToString(self.results)}
description = (
f'compile {self.logobserver.nbProjects} projects {self.logobserver.nbFiles} ' 'files'
)
if self.logobserver.nbWarnings > 0:
description += f' {self.logobserver.nbWarnings} warnings'
if self.logobserver.nbErrors > 0:
description += f' {self.logobserver.nbErrors} errors'
if self.results != results.SUCCESS:
description += f' ({results.statusToString(self.results)})'
if self.timed_out:
description += " (timed out)"
return {'step': description}
@defer.inlineCallbacks
def finish_logs(self):
log = yield self.getLog("warnings")
yield log.finish()
log = yield self.getLog("errors")
yield log.finish()
class VC6(VisualStudio):
default_installdir = 'C:\\Program Files\\Microsoft Visual Studio'
def setupEnvironment(self):
super().setupEnvironment()
# Root of Visual Developer Studio Common files.
VSCommonDir = self.installdir + '\\Common'
MSVCDir = self.installdir + '\\VC98'
MSDevDir = VSCommonDir + '\\msdev98'
self.add_env_path("PATH", MSDevDir + '\\BIN')
self.add_env_path("PATH", MSVCDir + '\\BIN')
self.add_env_path("PATH", VSCommonDir + '\\TOOLS\\WINNT')
self.add_env_path("PATH", VSCommonDir + '\\TOOLS')
self.add_env_path("INCLUDE", MSVCDir + '\\INCLUDE')
self.add_env_path("INCLUDE", MSVCDir + '\\ATL\\INCLUDE')
self.add_env_path("INCLUDE", MSVCDir + '\\MFC\\INCLUDE')
self.add_env_path("LIB", MSVCDir + '\\LIB')
self.add_env_path("LIB", MSVCDir + '\\MFC\\LIB')
@defer.inlineCallbacks
def run(self):
command = ["msdev", self.projectfile, "/MAKE"]
if self.project is not None:
command.append(self.project + " - " + self.config)
else:
command.append("ALL - " + self.config)
if self.mode == "rebuild":
command.append("/REBUILD")
elif self.mode == "clean":
command.append("/CLEAN")
else:
command.append("/BUILD")
if self.useenv:
command.append("/USEENV")
self.command = command
res = yield super().run()
return res
class VC7(VisualStudio):
default_installdir = 'C:\\Program Files\\Microsoft Visual Studio .NET 2003'
def setupEnvironment(self):
super().setupEnvironment()
VSInstallDir = self.installdir + '\\Common7\\IDE'
VCInstallDir = self.installdir
MSVCDir = self.installdir + '\\VC7'
self.add_env_path("PATH", VSInstallDir)
self.add_env_path("PATH", MSVCDir + '\\BIN')
self.add_env_path("PATH", VCInstallDir + '\\Common7\\Tools')
self.add_env_path("PATH", VCInstallDir + '\\Common7\\Tools\\bin')
self.add_env_path("INCLUDE", MSVCDir + '\\INCLUDE')
self.add_env_path("INCLUDE", MSVCDir + '\\ATLMFC\\INCLUDE')
self.add_env_path("INCLUDE", MSVCDir + '\\PlatformSDK\\include')
self.add_env_path("INCLUDE", VCInstallDir + '\\SDK\\v1.1\\include')
self.add_env_path("LIB", MSVCDir + '\\LIB')
self.add_env_path("LIB", MSVCDir + '\\ATLMFC\\LIB')
self.add_env_path("LIB", MSVCDir + '\\PlatformSDK\\lib')
self.add_env_path("LIB", VCInstallDir + '\\SDK\\v1.1\\lib')
@defer.inlineCallbacks
def run(self):
command = ["devenv.com", self.projectfile]
if self.mode == "rebuild":
command.append("/Rebuild")
elif self.mode == "clean":
command.append("/Clean")
else:
command.append("/Build")
command.append(self.config)
if self.useenv:
command.append("/UseEnv")
if self.project is not None:
command.append("/Project")
command.append(self.project)
self.command = command
res = yield super().run()
return res
# alias VC7 as VS2003
VS2003 = VC7
class VC8(VC7):
# Our ones
arch = None
default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 8'
renderables = ['arch']
def __init__(self, arch="x86", **kwargs):
self.arch = arch
# always upcall !
super().__init__(**kwargs)
def setupEnvironment(self):
# Do not use super() here. We want to override VC7.setupEnvironment().
VisualStudio.setupEnvironment(self)
VSInstallDir = self.installdir
VCInstallDir = self.installdir + '\\VC'
self.add_env_path("PATH", VSInstallDir + '\\Common7\\IDE')
if self.arch == "x64":
self.add_env_path("PATH", VCInstallDir + '\\BIN\\x86_amd64')
self.add_env_path("PATH", VCInstallDir + '\\BIN')
self.add_env_path("PATH", VSInstallDir + '\\Common7\\Tools')
self.add_env_path("PATH", VSInstallDir + '\\Common7\\Tools\\bin')
self.add_env_path("PATH", VCInstallDir + '\\PlatformSDK\\bin')
self.add_env_path("PATH", VSInstallDir + '\\SDK\\v2.0\\bin')
self.add_env_path("PATH", VCInstallDir + '\\VCPackages')
self.add_env_path("PATH", r'${PATH}')
self.add_env_path("INCLUDE", VCInstallDir + '\\INCLUDE')
self.add_env_path("INCLUDE", VCInstallDir + '\\ATLMFC\\include')
self.add_env_path("INCLUDE", VCInstallDir + '\\PlatformSDK\\include')
archsuffix = ''
if self.arch == "x64":
archsuffix = '\\amd64'
self.add_env_path("LIB", VCInstallDir + '\\LIB' + archsuffix)
self.add_env_path("LIB", VCInstallDir + '\\ATLMFC\\LIB' + archsuffix)
self.add_env_path("LIB", VCInstallDir + '\\PlatformSDK\\lib' + archsuffix)
self.add_env_path("LIB", VSInstallDir + '\\SDK\\v2.0\\lib' + archsuffix)
# alias VC8 as VS2005
VS2005 = VC8
class VCExpress9(VC8):
@defer.inlineCallbacks
def run(self):
command = ["vcexpress", self.projectfile]
if self.mode == "rebuild":
command.append("/Rebuild")
elif self.mode == "clean":
command.append("/Clean")
else:
command.append("/Build")
command.append(self.config)
if self.useenv:
command.append("/UseEnv")
if self.project is not None:
command.append("/Project")
command.append(self.project)
self.command = command
# Do not use super() here. We want to override VC7.start().
res = yield VisualStudio.run(self)
return res
# Add first support for VC9 (Same as VC8, with a different installdir)
class VC9(VC8):
default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 9.0'
VS2008 = VC9
# VC10 doesn't look like it needs extra stuff.
class VC10(VC9):
default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 10.0'
VS2010 = VC10
# VC11 doesn't look like it needs extra stuff.
class VC11(VC10):
default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 11.0'
VS2012 = VC11
# VC12 doesn't look like it needs extra stuff.
class VC12(VC11):
default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 12.0'
VS2013 = VC12
# VC14 doesn't look like it needs extra stuff.
class VC14(VC12):
default_installdir = 'C:\\Program Files (x86)\\Microsoft Visual Studio 14.0'
VS2015 = VC14
class VC141(VC14):
default_installdir = r"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community"
VS2017 = VC141
class VS2019(VS2017):
default_installdir = r"C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community"
class VS2022(VS2017):
default_installdir = r"C:\\Program Files (x86)\\Microsoft Visual Studio\\2022\\Community"
def _msbuild_format_defines_parameter(defines):
if defines is None or len(defines) == 0:
return ""
return f' /p:DefineConstants="{";".join(defines)}"'
def _msbuild_format_target_parameter(mode, project):
modestring = None
if mode == "clean":
modestring = 'Clean'
elif mode == "build":
modestring = 'Build'
elif mode == "rebuild":
modestring = 'Rebuild'
parameter = ""
if project is not None:
if modestring == "Rebuild" or modestring is None:
parameter = f' /t:"{project}"'
else:
parameter = f' /t:"{project}:{modestring}"'
elif modestring is not None:
parameter = f' /t:{modestring}'
return parameter
class MsBuild4(VisualStudio):
platform = None
defines = None
vcenv_bat = r"${VS110COMNTOOLS}..\..\VC\vcvarsall.bat"
renderables = ['platform']
description = 'building'
def __init__(self, platform, defines=None, **kwargs):
self.platform = platform
self.defines = defines
super().__init__(**kwargs)
def setupEnvironment(self):
super().setupEnvironment()
self.env['VCENV_BAT'] = self.vcenv_bat
def describe_project(self, done=False):
project = self.project
if project is None:
project = 'solution'
return f'{project} for {self.config}|{self.platform}'
def getCurrentSummary(self):
return {'step': 'building ' + self.describe_project()}
def getResultSummary(self):
return {'step': 'built ' + self.describe_project()}
@defer.inlineCallbacks
def run(self):
if self.platform is None:
config.error('platform is mandatory. Please specify a string such as "Win32"')
self.updateSummary()
command = (
f'"%VCENV_BAT%" x86 && msbuild "{self.projectfile}" '
f'/p:Configuration="{self.config}" /p:Platform="{self.platform}" /maxcpucount'
)
command += _msbuild_format_target_parameter(self.mode, self.project)
command += _msbuild_format_defines_parameter(self.defines)
self.command = command
res = yield super().run()
return res
MsBuild = MsBuild4
class MsBuild12(MsBuild4):
vcenv_bat = r"${VS120COMNTOOLS}..\..\VC\vcvarsall.bat"
class MsBuild14(MsBuild4):
vcenv_bat = r"${VS140COMNTOOLS}..\..\VC\vcvarsall.bat"
class MsBuild141(VisualStudio):
platform = None
defines = None
vcenv_bat = r"\VC\Auxiliary\Build\vcvarsall.bat"
renderables = ['platform']
version_range = "[15.0,16.0)"
def __init__(self, platform, defines=None, **kwargs):
self.platform = platform
self.defines = defines
super().__init__(**kwargs)
def setupEnvironment(self):
super().setupEnvironment()
self.env['VCENV_BAT'] = self.vcenv_bat
self.add_env_path("PATH", 'C:\\Program Files (x86)\\Microsoft Visual Studio\\Installer\\')
self.add_env_path("PATH", 'C:\\Program Files\\Microsoft Visual Studio\\Installer\\')
self.add_env_path("PATH", r'${PATH}')
def describe_project(self, done=False):
project = self.project
if project is None:
project = 'solution'
return f'{project} for {self.config}|{self.platform}'
@defer.inlineCallbacks
def run(self):
if self.platform is None:
config.error('platform is mandatory. Please specify a string such as "Win32"')
self.description = 'building ' + self.describe_project()
self.descriptionDone = 'built ' + self.describe_project()
self.updateSummary()
command = (
'FOR /F "tokens=*" %%I in '
f'(\'vswhere.exe -version "{self.version_range}" -products * '
'-property installationPath\') '
f' do "%%I\\%VCENV_BAT%" x86 && msbuild "{self.projectfile}" '
f'/p:Configuration="{self.config}" /p:Platform="{self.platform}" /maxcpucount'
)
command += _msbuild_format_target_parameter(self.mode, self.project)
command += _msbuild_format_defines_parameter(self.defines)
self.command = command
res = yield super().run()
return res
MsBuild15 = MsBuild141
class MsBuild16(MsBuild141):
version_range = "[16.0,17.0)"
class MsBuild17(MsBuild141):
version_range = "[17.0,18.0)"
| 18,240 | Python | .py | 446 | 32.919283 | 98 | 0.623059 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,866 | download_secret_to_worker.py | buildbot_buildbot/master/buildbot/steps/download_secret_to_worker.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import stat
from twisted.internet import defer
from buildbot.process.buildstep import FAILURE
from buildbot.process.buildstep import SUCCESS
from buildbot.process.buildstep import BuildStep
from buildbot.process.results import worst_status
from buildbot.steps.worker import CompositeStepMixin
class DownloadSecretsToWorker(BuildStep, CompositeStepMixin):
renderables = ['secret_to_be_populated']
def __init__(self, populated_secret_list, **kwargs):
super().__init__(**kwargs)
self.secret_to_be_populated = populated_secret_list
@defer.inlineCallbacks
def runPopulateSecrets(self):
result = SUCCESS
for path, secretvalue in self.secret_to_be_populated:
if not isinstance(path, str):
raise ValueError(f"Secret path {path} is not a string")
self.secret_to_be_interpolated = secretvalue
res = yield self.downloadFileContentToWorker(
path, self.secret_to_be_interpolated, mode=stat.S_IRUSR | stat.S_IWUSR
)
result = worst_status(result, res)
return result
@defer.inlineCallbacks
def run(self):
res = yield self.runPopulateSecrets()
return res
class RemoveWorkerFileSecret(BuildStep, CompositeStepMixin):
renderables = ['secret_to_be_populated']
def __init__(self, populated_secret_list, logEnviron=False, **kwargs):
super().__init__(**kwargs)
self.logEnviron = logEnviron
self.secret_to_be_populated = populated_secret_list
@defer.inlineCallbacks
def runRemoveWorkerFileSecret(self):
all_results = []
for path, _ in self.secret_to_be_populated:
res = yield self.runRmFile(path, abandonOnFailure=False)
all_results.append(res)
if FAILURE in all_results:
result = FAILURE
else:
result = SUCCESS
return result
@defer.inlineCallbacks
def run(self):
res = yield self.runRemoveWorkerFileSecret()
return res
| 2,738 | Python | .py | 63 | 37.111111 | 86 | 0.711495 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,867 | http.py | buildbot_buildbot/master/buildbot/steps/http.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.internet import reactor
from buildbot import config
from buildbot.process.buildstep import FAILURE
from buildbot.process.buildstep import SUCCESS
from buildbot.process.buildstep import BuildStep
# use the 'requests' lib: https://requests.readthedocs.io/en/master/
try:
import requests
import txrequests
except ImportError:
txrequests = None
# This step uses a global Session object, which encapsulates a thread pool as
# well as state such as cookies and authentication. This state may pose
# problems for users, where one step may get a cookie that is subsequently used
# by another step in a different build.
_session = None
def getSession():
global _session
if _session is None:
_session = txrequests.Session()
reactor.addSystemEventTrigger("before", "shutdown", closeSession)
return _session
def setSession(session):
global _session
_session = session
def closeSession():
global _session
if _session is not None:
_session.close()
_session = None
def _headerSet(headers):
return frozenset(map(lambda x: x.casefold(), headers))
class HTTPStep(BuildStep):
name = 'HTTPStep'
description = 'Requesting'
descriptionDone = 'Requested'
requestsParams = [
"params",
"data",
"json",
"headers",
"cookies",
"files",
"auth",
"timeout",
"allow_redirects",
"proxies",
"hooks",
"stream",
"verify",
"cert",
]
renderables = [*requestsParams, "method", "url"]
session = None
def __init__(
self, url, method, hide_request_headers=None, hide_response_headers=None, **kwargs
):
if txrequests is None:
config.error("Need to install txrequest to use this step:\n\n pip install txrequests")
if method not in ('POST', 'GET', 'PUT', 'DELETE', 'HEAD', 'OPTIONS'):
config.error(f"Wrong method given: '{method}' is not known")
self.method = method
self.url = url
self.hide_request_headers = _headerSet(hide_request_headers or [])
self.hide_response_headers = _headerSet(hide_response_headers or [])
for param in self.requestsParams:
setattr(self, param, kwargs.pop(param, None))
super().__init__(**kwargs)
@defer.inlineCallbacks
def run(self):
# create a new session if it doesn't exist
self.session = getSession()
requestkwargs = {'method': self.method, 'url': self.url}
for param in self.requestsParams:
value = getattr(self, param, None)
if value is not None:
requestkwargs[param] = value
log = yield self.addLog('log')
# known methods already tested in __init__
yield log.addHeader(f'Performing {self.method} request to {self.url}\n')
if self.params:
yield log.addHeader('Parameters:\n')
params = sorted(self.params.items(), key=lambda x: x[0])
requestkwargs['params'] = params
for k, v in params:
yield log.addHeader(f'\t{k}: {v}\n')
data = requestkwargs.get("data", None)
if data:
yield log.addHeader('Data:\n')
if isinstance(data, dict):
for k, v in data.items():
yield log.addHeader(f'\t{k}: {v}\n')
else:
yield log.addHeader(f'\t{data}\n')
try:
r = yield self.session.request(**requestkwargs)
except requests.exceptions.ConnectionError as e:
yield log.addStderr(f'An exception occurred while performing the request: {e}')
return FAILURE
if r.history:
yield log.addStdout(f'\nRedirected {len(r.history)} times:\n\n')
for rr in r.history:
yield self.log_response(log, rr)
yield log.addStdout('=' * 60 + '\n')
yield self.log_response(log, r)
yield log.finish()
self.descriptionDone = [f"Status code: {r.status_code}"]
if r.status_code < 400:
return SUCCESS
else:
return FAILURE
@defer.inlineCallbacks
def log_response(self, log, response):
yield log.addHeader('Request Headers:\n')
for k, v in response.request.headers.items():
if k.casefold() in self.hide_request_headers:
v = '<HIDDEN>'
yield log.addHeader(f'\t{k}: {v}\n')
yield log.addStdout(f'URL: {response.url}\n')
if response.status_code == requests.codes.ok:
yield log.addStdout(f'Status: {response.status_code}\n')
else:
yield log.addStderr(f'Status: {response.status_code}\n')
yield log.addHeader('Response Headers:\n')
for k, v in response.headers.items():
if k.casefold() in self.hide_response_headers:
v = '<HIDDEN>'
yield log.addHeader(f'\t{k}: {v}\n')
yield log.addStdout(f' ------ Content ------\n{response.text}')
content_log = yield self.addLog('content')
yield content_log.addStdout(response.text)
class POST(HTTPStep):
def __init__(self, url, **kwargs):
super().__init__(url, method='POST', **kwargs)
class GET(HTTPStep):
def __init__(self, url, **kwargs):
super().__init__(url, method='GET', **kwargs)
class PUT(HTTPStep):
def __init__(self, url, **kwargs):
super().__init__(url, method='PUT', **kwargs)
class DELETE(HTTPStep):
def __init__(self, url, **kwargs):
super().__init__(url, method='DELETE', **kwargs)
class HEAD(HTTPStep):
def __init__(self, url, **kwargs):
super().__init__(url, method='HEAD', **kwargs)
class OPTIONS(HTTPStep):
def __init__(self, url, **kwargs):
super().__init__(url, method='OPTIONS', **kwargs)
| 6,638 | Python | .py | 164 | 32.731707 | 98 | 0.628892 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,868 | master.py | buildbot_buildbot/master/buildbot/steps/master.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import os
import pprint
import re
from twisted.internet import defer
from twisted.internet import reactor
from twisted.python import runtime
from buildbot.process.buildstep import CANCELLED
from buildbot.process.buildstep import FAILURE
from buildbot.process.buildstep import SUCCESS
from buildbot.process.buildstep import BuildStep
from buildbot.util import deferwaiter
from buildbot.util import runprocess
class MasterShellCommand(BuildStep):
"""
Run a shell command locally - on the buildmaster. The shell command
COMMAND is specified just as for a RemoteShellCommand. Note that extra
logfiles are not supported.
"""
name = 'MasterShellCommand'
description = 'Running'
descriptionDone = 'Ran'
descriptionSuffix = None
renderables = ['command', 'env']
haltOnFailure = True
flunkOnFailure = True
def __init__(self, command, **kwargs):
self.env = kwargs.pop('env', None)
self.usePTY = kwargs.pop('usePTY', 0)
self.interruptSignal = kwargs.pop('interruptSignal', 'KILL')
self.logEnviron = kwargs.pop('logEnviron', True)
super().__init__(**kwargs)
self.command = command
self.process = None
self.masterWorkdir = self.workdir
self._deferwaiter = deferwaiter.DeferWaiter()
@defer.inlineCallbacks
def run(self):
# render properties
command = self.command
# set up argv
if isinstance(command, (str, bytes)):
if runtime.platformType == 'win32':
# allow %COMSPEC% to have args
argv = os.environ['COMSPEC'].split()
if '/c' not in argv:
argv += ['/c']
argv += [command]
else:
# for posix, use /bin/sh. for other non-posix, well, doesn't
# hurt to try
argv = ['/bin/sh', '-c', command]
else:
if runtime.platformType == 'win32':
# allow %COMSPEC% to have args
argv = os.environ['COMSPEC'].split()
if '/c' not in argv:
argv += ['/c']
argv += list(command)
else:
argv = command
self.stdio_log = yield self.addLog("stdio")
if isinstance(command, (str, bytes)):
yield self.stdio_log.addHeader(command.strip() + "\n\n")
else:
yield self.stdio_log.addHeader(" ".join(command) + "\n\n")
yield self.stdio_log.addHeader("** RUNNING ON BUILDMASTER **\n")
yield self.stdio_log.addHeader(f" in dir {os.getcwd()}\n")
yield self.stdio_log.addHeader(f" argv: {argv}\n")
os_env = os.environ
if self.env is None:
env = os_env
else:
assert isinstance(self.env, dict)
env = self.env
for key, v in self.env.items():
if isinstance(v, list):
# Need to do os.pathsep translation. We could either do that
# by replacing all incoming ':'s with os.pathsep, or by
# accepting lists. I like lists better.
# If it's not a string, treat it as a sequence to be
# turned in to a string.
self.env[key] = os.pathsep.join(self.env[key])
# do substitution on variable values matching pattern: ${name}
p = re.compile(r'\${([0-9a-zA-Z_]*)}')
def subst(match):
return os.environ.get(match.group(1), "")
newenv = {}
for key, v in env.items():
if v is not None:
if not isinstance(v, (str, bytes)):
raise RuntimeError(
"'env' values must be strings or " f"lists; key '{key}' is incorrect"
)
newenv[key] = p.sub(subst, env[key])
# RunProcess will take environment values from os.environ in cases of env not having
# the keys that are in os.environ. Prevent this by putting None into those keys.
for key in os_env:
if key not in env:
env[key] = None
env = newenv
if self.logEnviron:
yield self.stdio_log.addHeader(f" env: {env!r}\n")
if self.stopped:
return CANCELLED
on_stdout = lambda data: self._deferwaiter.add(self.stdio_log.addStdout(data))
on_stderr = lambda data: self._deferwaiter.add(self.stdio_log.addStderr(data))
# TODO add a timeout?
self.process = runprocess.create_process(
reactor,
argv,
workdir=self.masterWorkdir,
use_pty=self.usePTY,
env=env,
collect_stdout=on_stdout,
collect_stderr=on_stderr,
)
yield self.process.start()
yield self._deferwaiter.wait()
if self.process.result_signal is not None:
yield self.stdio_log.addHeader(f"signal {self.process.result_signal}\n")
self.descriptionDone = [f"killed ({self.process.result_signal})"]
return FAILURE
elif self.process.result_rc != 0:
yield self.stdio_log.addHeader(f"exit status {self.process.result_signal}\n")
self.descriptionDone = [f"failed ({self.process.result_rc})"]
return FAILURE
else:
return SUCCESS
@defer.inlineCallbacks
def interrupt(self, reason):
yield super().interrupt(reason)
if self.process is not None:
self.process.send_signal(self.interruptSignal)
class SetProperty(BuildStep):
name = 'SetProperty'
description = ['Setting']
descriptionDone = ['Set']
renderables = ['property', 'value']
def __init__(self, property, value, **kwargs):
super().__init__(**kwargs)
self.property = property
self.value = value
def run(self):
properties = self.build.getProperties()
properties.setProperty(self.property, self.value, self.name, runtime=True)
return defer.succeed(SUCCESS)
class SetProperties(BuildStep):
name = 'SetProperties'
description = ['Setting Properties..']
descriptionDone = ['Properties Set']
renderables = ['properties']
def __init__(self, properties=None, **kwargs):
super().__init__(**kwargs)
self.properties = properties
def run(self):
if self.properties is None:
return defer.succeed(SUCCESS)
for k, v in self.properties.items():
self.setProperty(k, v, self.name, runtime=True)
return defer.succeed(SUCCESS)
class Assert(BuildStep):
name = 'Assert'
description = ['Checking..']
descriptionDone = ["checked"]
renderables = ['check']
def __init__(self, check, **kwargs):
super().__init__(**kwargs)
self.check = check
self.descriptionDone = [f"checked {self.check!r}"]
def run(self):
if self.check:
return defer.succeed(SUCCESS)
return defer.succeed(FAILURE)
class LogRenderable(BuildStep):
name = 'LogRenderable'
description = ['Logging']
descriptionDone = ['Logged']
renderables = ['content']
def __init__(self, content, **kwargs):
super().__init__(**kwargs)
self.content = content
@defer.inlineCallbacks
def run(self):
content = pprint.pformat(self.content)
yield self.addCompleteLog(name='Output', text=content)
return SUCCESS
| 8,276 | Python | .py | 200 | 31.915 | 97 | 0.607965 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,869 | util.py | buildbot_buildbot/master/buildbot/steps/package/util.py | # This program is free software; you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Portions Copyright Buildbot Team Members
# Portions Copyright Marius Rieder <[email protected]>
from buildbot.process import logobserver
class WEObserver(logobserver.LogLineObserver):
def __init__(self):
super().__init__()
self.warnings = []
self.errors = []
def outLineReceived(self, line):
if line.startswith('W: '):
self.warnings.append(line)
elif line.startswith('E: '):
self.errors.append(line)
| 1,133 | Python | .py | 26 | 39.615385 | 79 | 0.73412 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,870 | __init__.py | buildbot_buildbot/master/buildbot/steps/package/__init__.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Portions Copyright Buildbot Team Members
# Portions Copyright Steve 'Ashcrow' Milner <[email protected]>
"""
Steps specific to package formats.
"""
| 831 | Python | .py | 18 | 45.166667 | 79 | 0.788438 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,871 | lintian.py | buildbot_buildbot/master/buildbot/steps/package/deb/lintian.py | # This program is free software; you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Portions Copyright Buildbot Team Members
# Portions Copyright Marius Rieder <[email protected]>
"""
Steps and objects related to lintian
"""
from __future__ import annotations
from twisted.internet import defer
from buildbot import config
from buildbot.process import buildstep
from buildbot.process import logobserver
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.steps.package import util as pkgutil
class MaxQObserver(logobserver.LogLineObserver):
def __init__(self):
super().__init__()
self.failures = 0
def outLineReceived(self, line):
if line.startswith('TEST FAILURE:'):
self.failures += 1
class DebLintian(buildstep.ShellMixin, buildstep.BuildStep):
name = "lintian"
description = "Lintian running"
descriptionDone = "Lintian"
fileloc = None
suppressTags: list[str] = []
flunkOnFailure = False
warnOnFailure = True
def __init__(self, fileloc=None, suppressTags=None, **kwargs):
kwargs = self.setupShellMixin(kwargs)
super().__init__(**kwargs)
if fileloc:
self.fileloc = fileloc
if suppressTags:
self.suppressTags = suppressTags
if not self.fileloc:
config.error("You must specify a fileloc")
self.command = ["lintian", "-v", self.fileloc]
if self.suppressTags:
for tag in self.suppressTags:
self.command += ['--suppress-tags', tag]
self.obs = pkgutil.WEObserver()
self.addLogObserver('stdio', self.obs)
@defer.inlineCallbacks
def run(self):
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
stdio_log = yield self.getLog('stdio')
yield stdio_log.finish()
warnings = self.obs.warnings
errors = self.obs.errors
if warnings:
yield self.addCompleteLog(f'{len(warnings)} Warnings', "\n".join(warnings))
if errors:
yield self.addCompleteLog(f'{len(errors)} Errors', "\n".join(errors))
if cmd.rc != 0 or errors:
return FAILURE
if warnings:
return WARNINGS
return SUCCESS
| 2,927 | Python | .py | 74 | 33.472973 | 87 | 0.696082 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,872 | pbuilder.py | buildbot_buildbot/master/buildbot/steps/package/deb/pbuilder.py | # This program is free software; you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Portions Copyright Buildbot Team Members
# Portions Copyright Marius Rieder <[email protected]>
"""
Steps and objects related to pbuilder
"""
from __future__ import annotations
import re
import stat
import time
from twisted.internet import defer
from twisted.python import log
from buildbot import config
from buildbot.process import logobserver
from buildbot.process import remotecommand
from buildbot.process import results
from buildbot.steps.shell import WarningCountingShellCommand
class DebPbuilder(WarningCountingShellCommand):
"""Build a debian package with pbuilder inside of a chroot."""
name = "pbuilder"
haltOnFailure = True
flunkOnFailure = True
description = ["building"]
descriptionDone = ["built"]
warningPattern = r".*(warning[: ]|\sW: ).*"
architecture = None
distribution: str | None = 'stable'
basetgz = None
_default_basetgz = "/var/cache/pbuilder/{distribution}-{architecture}-buildbot.tgz"
mirror = "http://cdn.debian.net/debian/"
othermirror = ""
extrapackages: list[str] = []
keyring = None
components: str | None = None
maxAge = 60 * 60 * 24 * 7
pbuilder = '/usr/sbin/pbuilder'
baseOption = '--basetgz'
renderables = [
'architecture',
'distribution',
'basetgz',
'mirror',
'othermirror',
'extrapackages',
'keyring',
'components',
]
def __init__(
self,
architecture=None,
distribution=None,
basetgz=None,
mirror=None,
othermirror=None,
extrapackages=None,
keyring=None,
components=None,
**kwargs,
):
super().__init__(**kwargs)
if architecture:
self.architecture = architecture
if distribution:
self.distribution = distribution
if mirror:
self.mirror = mirror
if othermirror:
self.othermirror = "|".join(othermirror)
if extrapackages:
self.extrapackages = extrapackages
if keyring:
self.keyring = keyring
if components:
self.components = components
if basetgz:
self.basetgz = basetgz
if not self.distribution:
config.error("You must specify a distribution.")
self.suppressions.append((None, re.compile(r"\.pbuilderrc does not exist"), None, None))
self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self.logConsumer))
@defer.inlineCallbacks
def run(self):
if self.basetgz is None:
self.basetgz = self._default_basetgz
kwargs = {}
if self.architecture:
kwargs['architecture'] = self.architecture
else:
kwargs['architecture'] = 'local'
kwargs['distribution'] = self.distribution
self.basetgz = self.basetgz.format(**kwargs)
self.command = ['pdebuild', '--buildresult', '.', '--pbuilder', self.pbuilder]
if self.architecture:
self.command += ['--architecture', self.architecture]
self.command += ['--', '--buildresult', '.', self.baseOption, self.basetgz]
if self.extrapackages:
self.command += ['--extrapackages', " ".join(self.extrapackages)]
res = yield self.checkBasetgz()
if res != results.SUCCESS:
return res
res = yield super().run()
return res
@defer.inlineCallbacks
def checkBasetgz(self):
cmd = remotecommand.RemoteCommand('stat', {'file': self.basetgz})
yield self.runCommand(cmd)
if cmd.rc != 0:
log.msg("basetgz not found, initializing it.")
command = [
'sudo',
self.pbuilder,
'--create',
self.baseOption,
self.basetgz,
'--distribution',
self.distribution,
'--mirror',
self.mirror,
]
if self.othermirror:
command += ['--othermirror', self.othermirror]
if self.architecture:
command += ['--architecture', self.architecture]
if self.extrapackages:
command += ['--extrapackages', " ".join(self.extrapackages)]
if self.keyring:
command += ['--debootstrapopts', f"--keyring={self.keyring}"]
if self.components:
command += ['--components', self.components]
cmd = remotecommand.RemoteShellCommand(self.workdir, command)
stdio_log = yield self.addLog("pbuilder")
cmd.useLog(stdio_log, True, "stdio")
self.description = ["PBuilder", "create."]
yield self.updateSummary()
yield self.runCommand(cmd)
if cmd.rc != 0:
log.msg(f"Failure when running {cmd}.")
return results.FAILURE
return results.SUCCESS
s = cmd.updates["stat"][-1]
# basetgz will be a file when running in pbuilder
# and a directory in case of cowbuilder
if stat.S_ISREG(s[stat.ST_MODE]) or stat.S_ISDIR(s[stat.ST_MODE]):
log.msg(f"{self.basetgz} found.")
age = time.time() - s[stat.ST_MTIME]
if age >= self.maxAge:
log.msg("basetgz outdated, updating")
command = ['sudo', self.pbuilder, '--update', self.baseOption, self.basetgz]
cmd = remotecommand.RemoteShellCommand(self.workdir, command)
stdio_log = yield self.addLog("pbuilder")
cmd.useLog(stdio_log, True, "stdio")
yield self.runCommand(cmd)
if cmd.rc != 0:
log.msg(f"Failure when running {cmd}.")
return results.FAILURE
return results.SUCCESS
log.msg(f"{self.basetgz} is not a file or a directory.")
return results.FAILURE
def logConsumer(self):
r = re.compile(r"dpkg-genchanges >\.\./(.+\.changes)")
while True:
_, line = yield
mo = r.search(line)
if mo:
self.setProperty("deb-changes", mo.group(1), "DebPbuilder")
class DebCowbuilder(DebPbuilder):
"""Build a debian package with cowbuilder inside of a chroot."""
name = "cowbuilder"
_default_basetgz = "/var/cache/pbuilder/{distribution}-{architecture}-buildbot.cow/"
pbuilder = '/usr/sbin/cowbuilder'
baseOption = '--basepath'
class UbuPbuilder(DebPbuilder):
"""Build a Ubuntu package with pbuilder inside of a chroot."""
distribution = None
mirror = "http://archive.ubuntu.com/ubuntu/"
components = "main universe"
class UbuCowbuilder(DebCowbuilder):
"""Build a Ubuntu package with cowbuilder inside of a chroot."""
distribution = None
mirror = "http://archive.ubuntu.com/ubuntu/"
components = "main universe"
| 7,638 | Python | .py | 193 | 30.502591 | 96 | 0.614813 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,873 | __init__.py | buildbot_buildbot/master/buildbot/steps/package/rpm/__init__.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Portions Copyright Buildbot Team Members
# Portions Copyright Steve 'Ashcrow' Milner <[email protected]>
"""
Steps specific to the rpm format.
"""
from buildbot.steps.package.rpm.mock import MockBuildSRPM
from buildbot.steps.package.rpm.mock import MockRebuild
from buildbot.steps.package.rpm.rpmbuild import RpmBuild
from buildbot.steps.package.rpm.rpmlint import RpmLint
__all__ = ['RpmBuild', 'RpmLint', 'MockBuildSRPM', 'MockRebuild']
| 1,124 | Python | .py | 23 | 47.782609 | 79 | 0.796178 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,874 | mock.py | buildbot_buildbot/master/buildbot/steps/package/rpm/mock.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Portions Copyright Buildbot Team Members
# Portions Copyright Marius Rieder <[email protected]>
import re
from twisted.internet import defer
from buildbot import config
from buildbot.process import buildstep
from buildbot.process import logobserver
class MockStateObserver(logobserver.LogLineObserver):
"""Supports reading state changes from Mock state.log from mock version
1.1.23."""
_line_re = re.compile(r'^.*(Start|Finish): (.*)$')
def outLineReceived(self, line):
m = self._line_re.search(line.strip())
if m:
if m.group(1) == "Start":
self.step.descriptionSuffix = [f"[{m.group(2)}]"]
else:
self.step.descriptionSuffix = None
self.step.updateSummary()
class Mock(buildstep.ShellMixin, buildstep.CommandMixin, buildstep.BuildStep):
"""Add the mock logfiles and clean them if they already exist. Add support
for the root and resultdir parameter of mock."""
name = "mock"
renderables = ["root", "resultdir"]
haltOnFailure = True
flunkOnFailure = True
mock_logfiles = ['build.log', 'root.log', 'state.log']
root = None
resultdir = None
def __init__(self, root=None, resultdir=None, **kwargs):
kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command'])
super().__init__(**kwargs)
if root:
self.root = root
if resultdir:
self.resultdir = resultdir
if not self.root:
config.error("You must specify a mock root")
self.command = ['mock', '--root', self.root]
if self.resultdir:
self.command += ['--resultdir', self.resultdir]
@defer.inlineCallbacks
def run(self):
# Try to remove the old mock logs first.
if self.resultdir:
for lname in self.mock_logfiles:
self.logfiles[lname] = self.build.path_module.join(self.resultdir, lname)
else:
for lname in self.mock_logfiles:
self.logfiles[lname] = lname
self.addLogObserver('state.log', MockStateObserver())
yield self.runRmdir([
self.build.path_module.join('build', self.logfiles[l]) for l in self.mock_logfiles
])
cmd = yield self.makeRemoteShellCommand()
yield self.runCommand(cmd)
return cmd.results()
def getResultSummary(self):
self.descriptionSuffix = None
return super().getResultSummary()
class MockBuildSRPM(Mock):
"""Build a srpm within a mock. Requires a spec file and a sources dir."""
name = "mockbuildsrpm"
description = ["mock buildsrpm"]
descriptionDone = ["mock buildsrpm"]
spec = None
sources = '.'
def __init__(self, spec=None, sources=None, **kwargs):
"""
Creates the MockBuildSRPM object.
@type spec: str
@param spec: the path of the specfiles.
@type sources: str
@param sources: the path of the sources dir.
@type kwargs: dict
@param kwargs: All further keyword arguments.
"""
super().__init__(**kwargs)
if spec:
self.spec = spec
if sources:
self.sources = sources
if not self.spec:
config.error("You must specify a spec file")
if not self.sources:
config.error("You must specify a sources dir")
self.command += ['--buildsrpm', '--spec', self.spec, '--sources', self.sources]
self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self.logConsumer))
def logConsumer(self):
r = re.compile(r"Wrote: .*/([^/]*.src.rpm)")
while True:
_, line = yield
m = r.search(line)
if m:
self.setProperty("srpm", m.group(1), 'MockBuildSRPM')
class MockRebuild(Mock):
"""Rebuild a srpm within a mock. Requires a srpm file."""
name = "mock"
description = ["mock rebuilding srpm"]
descriptionDone = ["mock rebuild srpm"]
srpm = None
def __init__(self, srpm=None, **kwargs):
"""
Creates the MockRebuildRPM object.
@type srpm: str
@param srpm: the path of the srpm file.
@type kwargs: dict
@param kwargs: All further keyword arguments.
"""
super().__init__(**kwargs)
if srpm:
self.srpm = srpm
if not self.srpm:
config.error("You must specify a srpm")
self.command += ['--rebuild', self.srpm]
| 5,200 | Python | .py | 128 | 32.859375 | 94 | 0.638767 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,875 | rpmbuild.py | buildbot_buildbot/master/buildbot/steps/package/rpm/rpmbuild.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Portions Copyright Buildbot Team Members
# Portions Copyright Dan Radez <[email protected]>
# Portions Copyright Steve 'Ashcrow' Milner <[email protected]>
import os
from twisted.internet import defer
from buildbot import config
from buildbot.process import buildstep
from buildbot.process import logobserver
class RpmBuild(buildstep.ShellMixin, buildstep.BuildStep):
"""
RpmBuild build step.
"""
renderables = ['dist']
name = "rpmbuilder"
haltOnFailure = True
flunkOnFailure = True
description = ["RPMBUILD"]
descriptionDone = ["RPMBUILD"]
def __init__(
self,
specfile=None,
topdir='`pwd`',
builddir='`pwd`',
rpmdir='`pwd`',
sourcedir='`pwd`',
specdir='`pwd`',
srcrpmdir='`pwd`',
dist='.el6',
define=None,
autoRelease=False,
vcsRevision=False,
**kwargs,
):
kwargs = self.setupShellMixin(kwargs, prohibitArgs=['command'])
super().__init__(**kwargs)
self.dist = dist
self.base_rpmbuild = (
f'rpmbuild --define "_topdir {topdir}" --define "_builddir {builddir}"'
f' --define "_rpmdir {rpmdir}" --define "_sourcedir {sourcedir}"'
f' --define "_specdir {specdir}" --define "_srcrpmdir {srcrpmdir}"'
)
if define is None:
define = {}
for k, v in define.items():
self.base_rpmbuild += f" --define \"{k} {v}\""
self.specfile = specfile
self.autoRelease = autoRelease
self.vcsRevision = vcsRevision
if not self.specfile:
config.error("You must specify a specfile")
self.addLogObserver('stdio', logobserver.LineConsumerLogObserver(self.logConsumer))
@defer.inlineCallbacks
def run(self):
rpm_extras_dict = {}
rpm_extras_dict['dist'] = self.dist
if self.autoRelease:
relfile = f"{os.path.basename(self.specfile).split('.')[0]}.release"
try:
with open(relfile, encoding='utf-8') as rfile:
rel = int(rfile.readline().strip())
except (OSError, TypeError, ValueError):
rel = 0
rpm_extras_dict['_release'] = rel
with open(relfile, 'w', encoding='utf-8') as rfile:
rfile.write(str(rel + 1))
if self.vcsRevision:
revision = self.getProperty('got_revision')
# only do this in the case where there's a single codebase
if revision and not isinstance(revision, dict):
rpm_extras_dict['_revision'] = revision
self.rpmbuild = self.base_rpmbuild
# The unit tests expect a certain order, so we sort the dict to keep
# format the same every time
for k, v in sorted(rpm_extras_dict.items()):
self.rpmbuild = f'{self.rpmbuild} --define "{k} {v}"'
command = f'{self.rpmbuild} -ba {self.specfile}'
cmd = yield self.makeRemoteShellCommand(command=command)
yield self.runCommand(cmd)
stdio_log = yield self.getLog('stdio')
yield stdio_log.finish()
yield self.addCompleteLog('RPM Command Log', "\n".join(self.rpmcmdlog))
if self.rpmerrors:
yield self.addCompleteLog('RPM Errors', "\n".join(self.rpmerrors))
return cmd.results()
def logConsumer(self):
rpm_prefixes = [
'Provides:',
'Requires(',
'Requires:',
'Checking for unpackaged',
'Wrote:',
'Executing(%',
'+ ',
'Processing files:',
]
rpm_err_pfx = [' ', 'RPM build errors:', 'error: ']
self.rpmcmdlog = []
self.rpmerrors = []
while True:
_, line = yield
for pfx in rpm_prefixes:
if line.startswith(pfx):
self.rpmcmdlog.append(line)
break
for err in rpm_err_pfx:
if line.startswith(err):
self.rpmerrors.append(line)
break
| 4,813 | Python | .py | 121 | 30.694215 | 91 | 0.603943 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,876 | rpmlint.py | buildbot_buildbot/master/buildbot/steps/package/rpm/rpmlint.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Portions Copyright Buildbot Team Members
# Portions Copyright Steve 'Ashcrow' Milner <[email protected]>
"""
Steps and objects related to rpmlint.
"""
from twisted.internet import defer
from buildbot.steps.package import util as pkgutil
from buildbot.steps.shell import Test
class RpmLint(Test):
"""
Rpmlint build step.
"""
name = "rpmlint"
description = ["Checking for RPM/SPEC issues"]
descriptionDone = ["Finished checking RPM/SPEC issues"]
fileloc = '.'
config = None
def __init__(self, fileloc=None, config=None, **kwargs):
"""
Create the Rpmlint object.
@type fileloc: str
@param fileloc: Location glob of the specs or rpms.
@type config: str
@param config: path to the rpmlint user config.
@type kwargs: dict
@param fileloc: all other keyword arguments.
"""
super().__init__(**kwargs)
if fileloc:
self.fileloc = fileloc
if config:
self.config = config
self.command = ["rpmlint", "-i"]
if self.config:
self.command += ['-f', self.config]
self.command.append(self.fileloc)
self.obs = pkgutil.WEObserver()
self.addLogObserver('stdio', self.obs)
@defer.inlineCallbacks
def createSummary(self):
"""
Create nice summary logs.
@param log: log to create summary off of.
"""
warnings = self.obs.warnings
errors = []
if warnings:
yield self.addCompleteLog(f'{len(warnings)} Warnings', "\n".join(warnings))
if errors:
yield self.addCompleteLog(f'{len(errors)} Errors', "\n".join(errors))
| 2,384 | Python | .py | 63 | 31.809524 | 87 | 0.670277 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,877 | github.py | buildbot_buildbot/master/buildbot/steps/source/github.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from buildbot.steps.source.git import Git
class GitHub(Git):
def run_vc(self, branch, revision, patch):
# ignore the revision if the branch ends with /merge
if branch.endswith("/merge"):
revision = None
return super().run_vc(branch, revision, patch)
| 999 | Python | .py | 21 | 44.47619 | 79 | 0.753593 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,878 | gitlab.py | buildbot_buildbot/master/buildbot/steps/source/gitlab.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.python import log
from buildbot.steps.source.git import Git
class GitLab(Git):
"""
Source step that knows how to handle merge requests from
the GitLab change source
"""
def run_vc(self, branch, revision, patch):
# If this is a merge request:
if self.build.hasProperty("target_branch"):
target_repourl = self.build.getProperty("target_git_ssh_url", None)
if self.repourl != target_repourl:
log.msg(
"GitLab.run_vc: note: GitLab step for merge requests"
f" should probably have repourl='{target_repourl}' instead "
f"of '{self.repourl}'?"
)
# This step is (probably) configured to fetch the target
# branch of a merge (because it is impractical for users to
# configure one builder for each of the infinite number of
# possible source branches for merge requests).
# Point instead to the source being proposed for merge.
branch = self.build.getProperty("source_branch", None)
# FIXME: layering violation, should not be modifying self here?
self.repourl = self.build.getProperty("source_git_ssh_url", None)
# The revision is unlikely to exist in the repo already,
# so tell Git to not check.
revision = None
return super().run_vc(branch, revision, patch)
| 2,169 | Python | .py | 43 | 42.488372 | 80 | 0.674847 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,879 | cvs.py | buildbot_buildbot/master/buildbot/steps/source/cvs.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import re
import time
from email.utils import formatdate
from twisted.internet import defer
from twisted.internet import reactor
from twisted.python import log
from buildbot.interfaces import WorkerSetupError
from buildbot.process import buildstep
from buildbot.process import remotecommand
from buildbot.process import results
from buildbot.process.remotetransfer import StringFileWriter
from buildbot.steps.source.base import Source
class CVS(Source):
name = "cvs"
renderables = ["cvsroot"]
def __init__(
self,
cvsroot=None,
cvsmodule='',
mode='incremental',
method=None,
branch=None,
global_options=None,
extra_options=None,
login=None,
**kwargs,
):
self.cvsroot = cvsroot
self.cvsmodule = cvsmodule
self.branch = branch
if global_options is None:
global_options = []
self.global_options = global_options
if extra_options is None:
extra_options = []
self.extra_options = extra_options
self.login = login
self.mode = mode
self.method = method
self.srcdir = 'source'
if not self._hasAttrGroupMember('mode', self.mode):
raise ValueError(f"mode {self.mode} is not one of {self._listAttrGroupMembers('mode')}")
super().__init__(**kwargs)
@defer.inlineCallbacks
def run_vc(self, branch, revision, patch):
self.branch = branch
self.revision = revision
self.stdio_log = yield self.addLogForRemoteCommands("stdio")
self.method = self._getMethod()
installed = yield self.checkCvs()
if not installed:
raise WorkerSetupError("CVS is not installed on worker")
yield self.checkLogin()
patched = yield self.sourcedirIsPatched()
if patched:
yield self.purge(False)
yield self._getAttrGroupMember('mode', self.mode)()
if patch:
yield self.patch(patch)
yield self.parseGotRevision()
return results.SUCCESS
@defer.inlineCallbacks
def mode_incremental(self):
updatable = yield self._sourcedirIsUpdatable()
if updatable:
rv = yield self.doUpdate()
else:
rv = yield self.clobber()
return rv
@defer.inlineCallbacks
def mode_full(self):
if self.method == 'clobber':
rv = yield self.clobber()
return rv
elif self.method == 'copy':
rv = yield self.copy()
return rv
updatable = yield self._sourcedirIsUpdatable()
if not updatable:
log.msg("CVS repo not present, making full checkout")
rv = yield self.doCheckout(self.workdir)
elif self.method == 'clean':
rv = yield self.clean()
elif self.method == 'fresh':
rv = yield self.fresh()
else:
raise ValueError("Unknown method, check your configuration")
return rv
@defer.inlineCallbacks
def _clobber(self):
cmd = remotecommand.RemoteCommand(
'rmdir', {'dir': self.workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout}
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if cmd.rc:
raise RuntimeError("Failed to delete directory")
@defer.inlineCallbacks
def clobber(self):
yield self._clobber()
res = yield self.doCheckout(self.workdir)
return res
@defer.inlineCallbacks
def fresh(
self,
):
yield self.purge(True)
res = yield self.doUpdate()
return res
@defer.inlineCallbacks
def clean(
self,
):
yield self.purge(False)
res = yield self.doUpdate()
return res
@defer.inlineCallbacks
def copy(self):
cmd = remotecommand.RemoteCommand(
'rmdir', {'dir': self.workdir, 'logEnviron': self.logEnviron, 'timeout': self.timeout}
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
old_workdir = self.workdir
self.workdir = self.srcdir
yield self.mode_incremental()
cmd = remotecommand.RemoteCommand(
'cpdir',
{
'fromdir': self.srcdir,
'todir': old_workdir,
'logEnviron': self.logEnviron,
'timeout': self.timeout,
},
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
self.workdir = old_workdir
return results.SUCCESS
@defer.inlineCallbacks
def purge(self, ignore_ignores):
command = ['cvsdiscard']
if ignore_ignores:
command += ['--ignore']
cmd = remotecommand.RemoteShellCommand(
self.workdir, command, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if cmd.didFail():
raise buildstep.BuildStepFailed()
@defer.inlineCallbacks
def doCheckout(self, dir):
command = ['-d', self.cvsroot, '-z3', 'checkout', '-d', dir]
command = self.global_options + command + self.extra_options
if self.branch:
command += ['-r', self.branch]
if self.revision:
command += ['-D', self.revision]
command += [self.cvsmodule]
if self.retry:
abandonOnFailure = self.retry[1] <= 0
else:
abandonOnFailure = True
res = yield self._dovccmd(command, '', abandonOnFailure=abandonOnFailure)
if self.retry:
if self.stopped or res == 0:
return res
delay, repeats = self.retry
if repeats > 0:
log.msg(f"Checkout failed, trying {repeats} more times after {delay} seconds")
self.retry = (delay, repeats - 1)
df = defer.Deferred()
df.addCallback(lambda _: self._clobber())
df.addCallback(lambda _: self.doCheckout(self.workdir))
reactor.callLater(delay, df.callback, None)
res = yield df
return res
@defer.inlineCallbacks
def doUpdate(self):
command = ['-z3', 'update', '-dP']
branch = self.branch
# special case. 'cvs update -r HEAD -D today' gives no files; see #2351
if branch == 'HEAD' and self.revision:
branch = None
if branch:
command += ['-r', self.branch]
if self.revision:
command += ['-D', self.revision]
res = yield self._dovccmd(command)
return res
@defer.inlineCallbacks
def checkLogin(self):
if self.login:
yield self._dovccmd(['-d', self.cvsroot, 'login'], initialStdin=self.login + "\n")
@defer.inlineCallbacks
def _dovccmd(self, command, workdir=None, abandonOnFailure=True, initialStdin=None):
if workdir is None:
workdir = self.workdir
if not command:
raise ValueError("No command specified")
cmd = remotecommand.RemoteShellCommand(
workdir,
["cvs", *command],
env=self.env,
timeout=self.timeout,
logEnviron=self.logEnviron,
initialStdin=initialStdin,
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if cmd.rc != 0 and abandonOnFailure:
log.msg(f"Source step failed while running command {cmd}")
raise buildstep.BuildStepFailed()
return cmd.rc
def _cvsEntriesContainStickyDates(self, entries):
for line in entries.splitlines():
if line == 'D': # the last line contains just a single 'D'
pass
elif line.split('/')[-1].startswith('D'):
# fields are separated by slashes, the last field contains the tag or date
# sticky dates start with 'D'
return True
return False # no sticky dates
@defer.inlineCallbacks
def _sourcedirIsUpdatable(self):
myFileWriter = StringFileWriter()
args = {
'workdir': self.build.path_module.join(self.workdir, 'CVS'),
'writer': myFileWriter,
'maxsize': None,
'blocksize': 32 * 1024,
}
def uploadFileArgs(source):
full_args = dict(args)
if self.workerVersionIsOlderThan('uploadFile', '3.0'):
full_args['slavesrc'] = source
else:
full_args['workersrc'] = source
return full_args
cmd = remotecommand.RemoteCommand('uploadFile', uploadFileArgs('Root'), ignore_updates=True)
yield self.runCommand(cmd)
if cmd.rc is not None and cmd.rc != 0:
return False
# on Windows, the cvsroot may not contain the password, so compare to
# both
cvsroot_without_pw = re.sub("(:pserver:[^:]*):[^@]*(@.*)", r"\1\2", self.cvsroot)
if myFileWriter.buffer.strip() not in (self.cvsroot, cvsroot_without_pw):
return False
myFileWriter.buffer = ""
cmd = remotecommand.RemoteCommand(
'uploadFile', uploadFileArgs('Repository'), ignore_updates=True
)
yield self.runCommand(cmd)
if cmd.rc is not None and cmd.rc != 0:
return False
if myFileWriter.buffer.strip() != self.cvsmodule:
return False
# if there are sticky dates (from an earlier build with revision),
# we can't update (unless we remove those tags with cvs update -A)
myFileWriter.buffer = ""
cmd = remotecommand.RemoteCommand(
'uploadFile', uploadFileArgs('Entries'), ignore_updates=True
)
yield self.runCommand(cmd)
if cmd.rc is not None and cmd.rc != 0:
return False
if self._cvsEntriesContainStickyDates(myFileWriter.buffer):
return False
return True
def parseGotRevision(self):
revision = time.strftime("%Y-%m-%d %H:%M:%S +0000", time.gmtime())
self.updateSourceProperty('got_revision', revision)
@defer.inlineCallbacks
def checkCvs(self):
res = yield self._dovccmd(['--version'])
return res == 0
def _getMethod(self):
if self.method is not None and self.mode != 'incremental':
return self.method
elif self.mode == 'incremental':
return None
elif self.method is None and self.mode == 'full':
return 'fresh'
return None
def computeSourceRevision(self, changes):
if not changes:
return None
lastChange = max(c.when for c in changes)
lastSubmit = max(br.submittedAt for br in self.build.requests)
when = (lastChange + lastSubmit) / 2
return formatdate(when)
| 11,692 | Python | .py | 306 | 28.905229 | 100 | 0.609773 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,880 | mtn.py | buildbot_buildbot/master/buildbot/steps/source/mtn.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
Source step code for Monotone
"""
from twisted.internet import defer
from twisted.internet import reactor
from twisted.python import log
from buildbot.config import ConfigErrors
from buildbot.interfaces import WorkerSetupError
from buildbot.process import buildstep
from buildbot.process import remotecommand
from buildbot.process.results import SUCCESS
from buildbot.steps.source.base import Source
class Monotone(Source):
"""Class for Monotone with all smarts"""
name = 'monotone'
renderables = ['repourl']
possible_methods = ('clobber', 'copy', 'fresh', 'clean')
def __init__(
self, repourl=None, branch=None, progress=False, mode='incremental', method=None, **kwargs
):
self.repourl = repourl
self.method = method
self.mode = mode
self.branch = branch
self.sourcedata = f"{self.repourl}?{self.branch}"
self.database = 'db.mtn'
self.progress = progress
super().__init__(**kwargs)
errors = []
if not self._hasAttrGroupMember('mode', self.mode):
errors.append(f"mode {self.mode} is not one of {self._listAttrGroupMembers('mode')}")
if self.mode == 'incremental' and self.method:
errors.append("Incremental mode does not require method")
if self.mode == 'full':
if self.method is None:
self.method = 'copy'
elif self.method not in self.possible_methods:
errors.append(f"Invalid method for mode == {self.mode}")
if repourl is None:
errors.append("you must provide repourl")
if branch is None:
errors.append("you must provide branch")
if errors:
raise ConfigErrors(errors)
@defer.inlineCallbacks
def run_vc(self, branch, revision, patch):
self.revision = revision
self.stdio_log = yield self.addLogForRemoteCommands("stdio")
try:
monotoneInstalled = yield self.checkMonotone()
if not monotoneInstalled:
raise WorkerSetupError("Monotone is not installed on worker")
yield self._checkDb()
yield self._retryPull()
# If we're not throwing away the workdir, check if it's
# somehow patched or modified and revert.
if self.mode != 'full' or self.method not in ('clobber', 'copy'):
patched = yield self.sourcedirIsPatched()
if patched:
yield self.clean()
# Call a mode specific method
fn = self._getAttrGroupMember('mode', self.mode)
yield fn()
if patch:
yield self.patch(patch)
yield self.parseGotRevision()
return SUCCESS
finally:
pass # FIXME: remove this try:raise block
@defer.inlineCallbacks
def mode_full(self):
if self.method == 'clobber':
yield self.clobber()
return
elif self.method == 'copy':
yield self.copy()
return
updatable = yield self._sourcedirIsUpdatable()
if not updatable:
yield self.clobber()
elif self.method == 'clean':
yield self.clean()
yield self._update()
elif self.method == 'fresh':
yield self.clean(False)
yield self._update()
else:
raise ValueError("Unknown method, check your configuration")
@defer.inlineCallbacks
def mode_incremental(self):
updatable = yield self._sourcedirIsUpdatable()
if not updatable:
yield self.clobber()
else:
yield self._update()
@defer.inlineCallbacks
def clobber(self):
yield self.runRmdir(self.workdir)
yield self._checkout()
@defer.inlineCallbacks
def copy(self):
cmd = remotecommand.RemoteCommand(
'rmdir',
{
'dir': self.workdir,
'logEnviron': self.logEnviron,
'timeout': self.timeout,
},
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
self.workdir = 'source'
yield self.mode_incremental()
cmd = remotecommand.RemoteCommand(
'cpdir',
{
'fromdir': 'source',
'todir': 'build',
'logEnviron': self.logEnviron,
'timeout': self.timeout,
},
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
self.workdir = 'build'
return 0
@defer.inlineCallbacks
def checkMonotone(self):
cmd = remotecommand.RemoteShellCommand(
self.workdir,
['mtn', '--version'],
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout,
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
return cmd.rc == 0
@defer.inlineCallbacks
def clean(self, ignore_ignored=True):
files = []
commands = [['mtn', 'ls', 'unknown']]
if not ignore_ignored:
commands.append(['mtn', 'ls', 'ignored'])
for cmd in commands:
stdout = yield self._dovccmd(cmd, workdir=self.workdir, collectStdout=True)
if not stdout:
continue
for filename in stdout.strip().split('\n'):
filename = self.workdir + '/' + str(filename)
files.append(filename)
if not files:
rc = 0
else:
if self.workerVersionIsOlderThan('rmdir', '2.14'):
rc = yield self.removeFiles(files)
else:
rc = yield self.runRmdir(files, abandonOnFailure=False)
if rc != 0:
log.msg("Failed removing files")
raise buildstep.BuildStepFailed()
@defer.inlineCallbacks
def removeFiles(self, files):
for filename in files:
res = yield self.runRmdir(filename, abandonOnFailure=False)
if res:
return res
return 0
def _checkout(self, abandonOnFailure=False):
command = ['mtn', 'checkout', self.workdir, '--db', self.database]
if self.revision:
command.extend(['--revision', self.revision])
command.extend(['--branch', self.branch])
return self._dovccmd(command, workdir='.', abandonOnFailure=abandonOnFailure)
def _update(self, abandonOnFailure=False):
command = ['mtn', 'update']
if self.revision:
command.extend(['--revision', self.revision])
else:
command.extend(['--revision', 'h:' + self.branch])
command.extend(['--branch', self.branch])
return self._dovccmd(command, workdir=self.workdir, abandonOnFailure=abandonOnFailure)
def _pull(self, abandonOnFailure=False):
command = ['mtn', 'pull', self.sourcedata, '--db', self.database]
if self.progress:
command.extend(['--ticker=dot'])
else:
command.extend(['--ticker=none'])
d = self._dovccmd(command, workdir='.', abandonOnFailure=abandonOnFailure)
return d
@defer.inlineCallbacks
def _retryPull(self):
if self.retry:
abandonOnFailure = self.retry[1] <= 0
else:
abandonOnFailure = True
res = yield self._pull(abandonOnFailure)
if self.retry:
delay, repeats = self.retry
if self.stopped or res == 0 or repeats <= 0:
return res
else:
log.msg(f"Checkout failed, trying {repeats} more times after {delay} seconds")
self.retry = (delay, repeats - 1)
df = defer.Deferred()
df.addCallback(lambda _: self._retryPull())
reactor.callLater(delay, df.callback, None)
yield df
return None
@defer.inlineCallbacks
def parseGotRevision(self):
stdout = yield self._dovccmd(
['mtn', 'automate', 'select', 'w:'], workdir=self.workdir, collectStdout=True
)
revision = stdout.strip()
if len(revision) != 40:
raise buildstep.BuildStepFailed()
log.msg(f"Got Monotone revision {revision}")
self.updateSourceProperty('got_revision', revision)
return 0
@defer.inlineCallbacks
def _dovccmd(
self,
command,
workdir,
collectStdout=False,
initialStdin=None,
decodeRC=None,
abandonOnFailure=True,
):
if not command:
raise ValueError("No command specified")
if decodeRC is None:
decodeRC = {0: SUCCESS}
cmd = remotecommand.RemoteShellCommand(
workdir,
command,
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout,
collectStdout=collectStdout,
initialStdin=initialStdin,
decodeRC=decodeRC,
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if abandonOnFailure and cmd.didFail():
log.msg(f"Source step failed while running command {cmd}")
raise buildstep.BuildStepFailed()
if collectStdout:
return cmd.stdout
else:
return cmd.rc
@defer.inlineCallbacks
def _checkDb(self):
db_exists = yield self.pathExists(self.database)
db_needs_init = False
if db_exists:
stdout = yield self._dovccmd(
['mtn', 'db', 'info', '--db', self.database], workdir='.', collectStdout=True
)
if stdout.find("migration needed") >= 0:
log.msg("Older format database found, migrating it")
yield self._dovccmd(['mtn', 'db', 'migrate', '--db', self.database], workdir='.')
elif (
stdout.find("too new, cannot use") >= 0
or stdout.find("database has no tables") >= 0
):
# The database is of a newer format which the worker's
# mtn version can not handle. Drop it and pull again
# with that monotone version installed on the
# worker. Do the same if it's an empty file.
yield self.runRmdir(self.database)
db_needs_init = True
elif stdout.find("not a monotone database") >= 0:
# There exists a database file, but it's not a valid
# monotone database. Do not delete it, but fail with
# an error.
raise buildstep.BuildStepFailed()
else:
log.msg("Database exists and compatible")
else:
db_needs_init = True
log.msg("Database does not exist")
if db_needs_init:
command = ['mtn', 'db', 'init', '--db', self.database]
yield self._dovccmd(command, workdir='.')
@defer.inlineCallbacks
def _sourcedirIsUpdatable(self):
workdir_path = self.build.path_module.join(self.workdir, '_MTN')
workdir_exists = yield self.pathExists(workdir_path)
if not workdir_exists:
log.msg("Workdir does not exist, falling back to a fresh clone")
return workdir_exists
| 12,111 | Python | .py | 308 | 29.00974 | 98 | 0.591377 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,881 | git.py | buildbot_buildbot/master/buildbot/steps/source/git.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
from typing import TYPE_CHECKING
from twisted.internet import defer
from twisted.internet import reactor
from twisted.python import log
from buildbot import config as bbconfig
from buildbot.interfaces import WorkerSetupError
from buildbot.process import buildstep
from buildbot.process import remotecommand
from buildbot.steps.source.base import Source
from buildbot.steps.worker import CompositeStepMixin
from buildbot.util.git import RC_SUCCESS
from buildbot.util.git import GitStepMixin
from buildbot.util.git_credential import GitCredentialOptions
from buildbot.util.git_credential import add_user_password_to_credentials
if TYPE_CHECKING:
from buildbot.interfaces import IRenderable
GIT_HASH_LENGTH = 40
def isTrueOrIsExactlyZero(v):
# nonzero values are true...
if v:
return True
# ... and True for the number zero, but we have to
# explicitly guard against v==False, since
# isinstance(False, int) is surprisingly True
if isinstance(v, int) and v is not False:
return True
# all other false-ish values are false
return False
git_describe_flags = [
# on or off
('all', lambda v: ['--all'] if v else None),
('always', lambda v: ['--always'] if v else None),
('contains', lambda v: ['--contains'] if v else None),
('debug', lambda v: ['--debug'] if v else None),
('long', lambda v: ['--long'] if v else None),
('exact-match', lambda v: ['--exact-match'] if v else None),
('tags', lambda v: ['--tags'] if v else None),
('first-parent', lambda v: ['--first-parent'] if v else None),
# string parameter
('match', lambda v: ['--match', v] if v else None),
('exclude', lambda v: ['--exclude', v] if v else None),
# numeric parameter
('abbrev', lambda v: [f'--abbrev={v}'] if isTrueOrIsExactlyZero(v) else None),
('candidates', lambda v: [f'--candidates={v}'] if isTrueOrIsExactlyZero(v) else None),
# optional string parameter
('dirty', lambda v: ['--dirty'] if (v is True or v == '') else None),
('dirty', lambda v: [f'--dirty={v}'] if (v and v is not True) else None),
]
class Git(Source, GitStepMixin):
name = 'git'
renderables = ["repourl", "reference", "branch", "codebase", "mode", "method", "origin"]
def __init__(
self,
repourl=None,
branch='HEAD',
mode='incremental',
method=None,
reference=None,
submodules=False,
remoteSubmodules=False,
tags=False,
shallow=False,
filters=None,
progress=True,
retryFetch=False,
clobberOnFailure=False,
getDescription=False,
config=None,
origin=None,
sshPrivateKey=None,
sshHostKey=None,
sshKnownHosts=None,
auth_credentials: tuple[IRenderable | str, IRenderable | str] | None = None,
git_credentials: GitCredentialOptions | None = None,
**kwargs,
):
if not getDescription and not isinstance(getDescription, dict):
getDescription = False
self.branch = branch
self.method = method
self.repourl = repourl
self.reference = reference
self.retryFetch = retryFetch
self.submodules = submodules
self.remoteSubmodules = remoteSubmodules
self.tags = tags
self.shallow = shallow
self.filters = filters
self.clobberOnFailure = clobberOnFailure
self.mode = mode
self.prog = progress
self.getDescription = getDescription
self.config = config
self.srcdir = 'source'
self.origin = origin
super().__init__(**kwargs)
self.setupGitStep()
if auth_credentials is not None:
git_credentials = add_user_password_to_credentials(
auth_credentials,
repourl,
git_credentials,
)
self.setup_git_auth(
sshPrivateKey,
sshHostKey,
sshKnownHosts,
git_credentials,
)
if isinstance(self.mode, str):
if not self._hasAttrGroupMember('mode', self.mode):
bbconfig.error(
f"Git: mode must be {' or '.join(self._listAttrGroupMembers('mode'))}"
)
if isinstance(self.method, str):
if self.mode == 'full' and self.method not in [
'clean',
'fresh',
'clobber',
'copy',
None,
]:
bbconfig.error("Git: invalid method for mode 'full'.")
if self.shallow and (self.mode != 'full' or self.method != 'clobber'):
bbconfig.error(
"Git: in mode 'full' shallow only possible with method 'clobber'."
)
if not isinstance(self.getDescription, (bool, dict)):
bbconfig.error("Git: getDescription must be a boolean or a dict.")
@defer.inlineCallbacks
def run_vc(self, branch, revision, patch):
self.branch = branch or 'HEAD'
self.revision = revision
self.method = self._getMethod()
self.stdio_log = yield self.addLogForRemoteCommands("stdio")
auth_workdir = self._get_auth_data_workdir()
try:
gitInstalled = yield self.checkFeatureSupport()
if not gitInstalled:
raise WorkerSetupError("git is not installed on worker")
patched = yield self.sourcedirIsPatched()
if patched:
yield self._dovccmd(['clean', '-f', '-f', '-d', '-x'])
yield self._git_auth.download_auth_files_if_needed(auth_workdir)
yield self._getAttrGroupMember('mode', self.mode)()
if patch:
yield self.patch(patch)
yield self.parseGotRevision()
res = yield self.parseCommitDescription()
return res
finally:
yield self._git_auth.remove_auth_files_if_needed(auth_workdir)
@defer.inlineCallbacks
def mode_full(self):
if self.method == 'clobber':
yield self.clobber()
return
elif self.method == 'copy':
yield self.copy()
return
action = yield self._sourcedirIsUpdatable()
if action == "clobber":
yield self.clobber()
return
elif action == "clone":
log.msg("No git repo present, making full clone")
yield self._fullCloneOrFallback(self.shallow)
elif self.method == 'clean':
yield self.clean()
elif self.method == 'fresh':
yield self.fresh()
else:
raise ValueError("Unknown method, check your configuration")
@defer.inlineCallbacks
def mode_incremental(self):
action = yield self._sourcedirIsUpdatable()
# if not updatable, do a full checkout
if action == "clobber":
yield self.clobber()
return
elif action == "clone":
log.msg("No git repo present, making full clone")
yield self._fullCloneOrFallback(shallowClone=self.shallow)
return
yield self._fetchOrFallback()
yield self._syncSubmodule(None)
yield self._updateSubmodule(None)
@defer.inlineCallbacks
def clean(self):
clean_command = ['clean', '-f', '-f', '-d']
rc = yield self._dovccmd(clean_command)
if rc != RC_SUCCESS:
raise buildstep.BuildStepFailed
rc = yield self._fetchOrFallback()
if rc != RC_SUCCESS:
raise buildstep.BuildStepFailed
rc = yield self._syncSubmodule()
if rc != RC_SUCCESS:
raise buildstep.BuildStepFailed
rc = yield self._updateSubmodule()
if rc != RC_SUCCESS:
raise buildstep.BuildStepFailed
rc = yield self._cleanSubmodule()
if rc != RC_SUCCESS:
raise buildstep.BuildStepFailed
if self.submodules:
rc = yield self._dovccmd(clean_command)
if rc != RC_SUCCESS:
raise buildstep.BuildStepFailed
return RC_SUCCESS
@defer.inlineCallbacks
def clobber(self):
yield self._doClobber()
res = yield self._fullClone(shallowClone=self.shallow)
if res != RC_SUCCESS:
raise buildstep.BuildStepFailed
@defer.inlineCallbacks
def fresh(self):
clean_command = ['clean', '-f', '-f', '-d', '-x']
res = yield self._dovccmd(clean_command, abandonOnFailure=False)
if res == RC_SUCCESS:
yield self._fetchOrFallback()
else:
yield self._doClobber()
yield self._fullCloneOrFallback(shallowClone=self.shallow)
yield self._syncSubmodule()
yield self._updateSubmodule()
yield self._cleanSubmodule()
if self.submodules:
yield self._dovccmd(clean_command)
@defer.inlineCallbacks
def copy(self):
yield self.runRmdir(self.workdir, abandonOnFailure=False, timeout=self.timeout)
old_workdir = self.workdir
self.workdir = self.srcdir
try:
yield self.mode_incremental()
cmd = remotecommand.RemoteCommand(
'cpdir',
{
'fromdir': self.srcdir,
'todir': old_workdir,
'logEnviron': self.logEnviron,
'timeout': self.timeout,
},
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if cmd.didFail():
raise buildstep.BuildStepFailed()
return RC_SUCCESS
finally:
self.workdir = old_workdir
@defer.inlineCallbacks
def parseGotRevision(self, _=None):
stdout = yield self._dovccmd(['rev-parse', 'HEAD'], collectStdout=True)
revision = stdout.strip()
if len(revision) != GIT_HASH_LENGTH:
raise buildstep.BuildStepFailed()
log.msg(f"Got Git revision {revision}")
self.updateSourceProperty('got_revision', revision)
return RC_SUCCESS
@defer.inlineCallbacks
def parseCommitDescription(self, _=None):
# dict() should not return here
if isinstance(self.getDescription, bool) and not self.getDescription:
return RC_SUCCESS
cmd = ['describe']
if isinstance(self.getDescription, dict):
for opt, arg in git_describe_flags:
opt = self.getDescription.get(opt, None)
arg = arg(opt)
if arg:
cmd.extend(arg)
# 'git describe' takes a commitish as an argument for all options
# *except* --dirty
if not any(arg.startswith('--dirty') for arg in cmd):
cmd.append('HEAD')
try:
stdout = yield self._dovccmd(cmd, collectStdout=True)
desc = stdout.strip()
self.updateSourceProperty('commit-description', desc)
except Exception:
pass
return RC_SUCCESS
def _get_auth_data_workdir(self):
if self.method == 'copy' and self.mode == 'full':
return self.srcdir
return self.workdir
@defer.inlineCallbacks
def _fetch(self, _, shallowClone, abandonOnFailure=True):
fetch_required = True
# If the revision already exists in the repo, we don't need to fetch. However, if tags
# were requested, then fetch still needs to be performed for the tags.
if not self.tags and self.revision:
rc = yield self._dovccmd(['cat-file', '-e', self.revision], abandonOnFailure=False)
if rc == RC_SUCCESS:
fetch_required = False
if fetch_required:
command = ['fetch', '-f']
if shallowClone:
command += ['--depth', str(int(shallowClone))]
if self.tags:
command.append("--tags")
# If the 'progress' option is set, tell git fetch to output
# progress information to the log. This can solve issues with
# long fetches killed due to lack of output, but only works
# with Git 1.7.2 or later.
if self.prog:
if self.supportsProgress:
command.append('--progress')
else:
log.msg("Git versions < 1.7.2 don't support progress")
command += [self.repourl, self.branch]
res = yield self._dovccmd(command, abandonOnFailure=abandonOnFailure)
if res != RC_SUCCESS:
return res
if self.revision:
rev = self.revision
else:
rev = 'FETCH_HEAD'
command = ['checkout', '-f', rev]
res = yield self._dovccmd(command, abandonOnFailure=abandonOnFailure)
# Rename the branch if needed.
if res == RC_SUCCESS and self.branch != 'HEAD':
# Ignore errors
yield self._dovccmd(['checkout', '-B', self.branch], abandonOnFailure=False)
return res
@defer.inlineCallbacks
def _fetchOrFallback(self, _=None):
"""
Handles fallbacks for failure of fetch,
wrapper for self._fetch
"""
abandonOnFailure = not self.retryFetch and not self.clobberOnFailure
res = yield self._fetch(None, shallowClone=self.shallow, abandonOnFailure=abandonOnFailure)
if res == RC_SUCCESS:
return res
elif self.retryFetch:
yield self._fetch(None, shallowClone=self.shallow)
elif self.clobberOnFailure:
yield self.clobber()
else:
raise buildstep.BuildStepFailed()
return None
@defer.inlineCallbacks
def _clone(self, shallowClone):
"""Retry if clone failed"""
command = ['clone']
switchToBranch = self.branch != 'HEAD'
if self.supportsBranch and self.branch != 'HEAD':
if self.branch.startswith('refs/'):
# we can't choose this branch from 'git clone' directly; we
# must do so after the clone
command += ['--no-checkout']
else:
switchToBranch = False
command += ['--branch', self.branch]
if shallowClone:
command += ['--depth', str(int(shallowClone))]
if self.reference:
command += ['--reference', self.reference]
if self.origin:
command += ['--origin', self.origin]
if self.filters:
if self.supportsFilters:
for filter in self.filters:
command += ['--filter', filter]
else:
log.msg("Git versions < 2.27.0 don't support filters on clone")
command += [self.repourl, '.']
if self.prog:
if self.supportsProgress:
command.append('--progress')
else:
log.msg("Git versions < 1.7.2 don't support progress")
if self.retry:
abandonOnFailure = self.retry[1] <= 0
else:
abandonOnFailure = True
# If it's a shallow clone abort build step
res = yield self._dovccmd(command, abandonOnFailure=(abandonOnFailure and shallowClone))
if switchToBranch:
res = yield self._fetch(None, shallowClone=shallowClone)
done = self.stopped or res == RC_SUCCESS # or shallow clone??
if self.retry and not done:
delay, repeats = self.retry
if repeats > 0:
log.msg(f"Checkout failed, trying {repeats} more times after {delay} seconds")
self.retry = (delay, repeats - 1)
df = defer.Deferred()
df.addCallback(lambda _: self._doClobber())
df.addCallback(lambda _: self._clone(shallowClone))
reactor.callLater(delay, df.callback, None)
res = yield df
return res
@defer.inlineCallbacks
def _fullClone(self, shallowClone=False):
"""Perform full clone and checkout to the revision if specified
In the case of shallow clones if any of the step fail abort whole build step.
"""
res = yield self._clone(shallowClone)
if res != RC_SUCCESS:
return res
# If revision specified checkout that revision
if self.revision:
res = yield self._dovccmd(['checkout', '-f', self.revision], shallowClone)
# init and update submodules, recursively. If there's not recursion
# it will not do it.
if self.submodules:
cmdArgs = ["submodule", "update", "--init", "--recursive"]
if self.remoteSubmodules:
cmdArgs.append("--remote")
if shallowClone:
cmdArgs.extend(["--depth", str(int(shallowClone))])
res = yield self._dovccmd(cmdArgs, shallowClone)
return res
@defer.inlineCallbacks
def _fullCloneOrFallback(self, shallowClone):
"""Wrapper for _fullClone(). In the case of failure, if clobberOnFailure
is set to True remove the build directory and try a full clone again.
"""
res = yield self._fullClone(shallowClone)
if res != RC_SUCCESS:
if not self.clobberOnFailure:
raise buildstep.BuildStepFailed()
res = yield self.clobber()
return res
@defer.inlineCallbacks
def _doClobber(self):
"""Remove the work directory"""
rc = yield self.runRmdir(self.workdir, timeout=self.timeout)
if rc != RC_SUCCESS:
raise RuntimeError("Failed to delete directory")
return rc
def computeSourceRevision(self, changes):
if not changes:
return None
return changes[-1].revision
@defer.inlineCallbacks
def _syncSubmodule(self, _=None):
rc = RC_SUCCESS
if self.submodules:
rc = yield self._dovccmd(['submodule', 'sync'])
return rc
@defer.inlineCallbacks
def _updateSubmodule(self, _=None):
rc = RC_SUCCESS
if self.submodules:
vccmd = ['submodule', 'update', '--init', '--recursive']
if self.supportsSubmoduleForce:
vccmd.extend(['--force'])
if self.supportsSubmoduleCheckout:
vccmd.extend(["--checkout"])
if self.remoteSubmodules:
vccmd.extend(["--remote"])
rc = yield self._dovccmd(vccmd)
return rc
@defer.inlineCallbacks
def _cleanSubmodule(self, _=None):
rc = RC_SUCCESS
if self.submodules:
subcommand = 'git clean -f -f -d'
if self.mode == 'full' and self.method == 'fresh':
subcommand += ' -x'
command = ['submodule', 'foreach', '--recursive', subcommand]
rc = yield self._dovccmd(command)
return rc
def _getMethod(self):
if self.method is not None and self.mode != 'incremental':
return self.method
elif self.mode == 'incremental':
return None
elif self.method is None and self.mode == 'full':
return 'fresh'
return None
@defer.inlineCallbacks
def applyPatch(self, patch):
yield self._dovccmd(['update-index', '--refresh'])
res = yield self._dovccmd(['apply', '--index', '-p', str(patch[0])], initialStdin=patch[1])
return res
@defer.inlineCallbacks
def _sourcedirIsUpdatable(self):
if self.workerVersionIsOlderThan('listdir', '2.16'):
git_path = self.build.path_module.join(self.workdir, '.git')
exists = yield self.pathExists(git_path)
if exists:
return "update"
return "clone"
cmd = remotecommand.RemoteCommand('listdir', {'dir': self.workdir})
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if 'files' not in cmd.updates:
# no files - directory doesn't exist
return "clone"
files = cmd.updates['files'][0]
if '.git' in files:
return "update"
elif files:
return "clobber"
else:
return "clone"
class GitPush(buildstep.BuildStep, GitStepMixin, CompositeStepMixin):
description = None
descriptionDone = None
descriptionSuffix = None
name = 'gitpush'
renderables = ['repourl', 'branch']
def __init__(
self,
workdir=None,
repourl=None,
branch=None,
force=False,
env=None,
timeout=20 * 60,
logEnviron=True,
sshPrivateKey=None,
sshHostKey=None,
sshKnownHosts=None,
auth_credentials: tuple[IRenderable | str, IRenderable | str] | None = None,
git_credentials: GitCredentialOptions | None = None,
config=None,
**kwargs,
):
self.workdir = workdir
self.repourl = repourl
self.branch = branch
self.force = force
self.env = env
self.timeout = timeout
self.logEnviron = logEnviron
self.config = config
super().__init__(**kwargs)
self.setupGitStep()
if auth_credentials is not None:
git_credentials = add_user_password_to_credentials(
auth_credentials,
repourl,
git_credentials,
)
self.setup_git_auth(
sshPrivateKey,
sshHostKey,
sshKnownHosts,
git_credentials,
)
if not self.branch:
bbconfig.error('GitPush: must provide branch')
def _get_auth_data_workdir(self):
return self.workdir
@defer.inlineCallbacks
def run(self):
self.stdio_log = yield self.addLog("stdio")
auth_workdir = self._get_auth_data_workdir()
try:
gitInstalled = yield self.checkFeatureSupport()
if not gitInstalled:
raise WorkerSetupError("git is not installed on worker")
yield self._git_auth.download_auth_files_if_needed(auth_workdir)
ret = yield self._doPush()
return ret
finally:
yield self._git_auth.remove_auth_files_if_needed(auth_workdir)
@defer.inlineCallbacks
def _doPush(self):
cmd = ['push', self.repourl, self.branch]
if self.force:
cmd.append('--force')
ret = yield self._dovccmd(cmd)
return ret
class GitTag(buildstep.BuildStep, GitStepMixin, CompositeStepMixin):
description = None
descriptionDone = None
descriptionSuffix = None
name = 'gittag'
renderables = ['repourl', 'tagName', 'messages']
def __init__(
self,
workdir=None,
tagName=None,
annotated=False,
messages=None,
force=False,
env=None,
timeout=20 * 60,
logEnviron=True,
config=None,
**kwargs,
):
self.workdir = workdir
self.tagName = tagName
self.annotated = annotated
self.messages = messages
self.force = force
self.env = env
self.timeout = timeout
self.logEnviron = logEnviron
self.config = config
# These attributes are required for GitStepMixin but not useful to tag
self.repourl = " "
super().__init__(**kwargs)
self.setupGitStep()
if not self.tagName:
bbconfig.error('GitTag: must provide tagName')
if self.annotated and not self.messages:
bbconfig.error('GitTag: must provide messages in case of annotated tag')
if not self.annotated and self.messages:
bbconfig.error('GitTag: messages are required only in case of annotated tag')
if self.messages and not isinstance(self.messages, list):
bbconfig.error('GitTag: messages should be a list')
@defer.inlineCallbacks
def run(self):
self.stdio_log = yield self.addLog("stdio")
gitInstalled = yield self.checkFeatureSupport()
if not gitInstalled:
raise WorkerSetupError("git is not installed on worker")
ret = yield self._doTag()
return ret
@defer.inlineCallbacks
def _doTag(self):
cmd = ['tag']
if self.annotated:
cmd.append('-a')
cmd.append(self.tagName)
for msg in self.messages:
cmd.extend(['-m', msg])
else:
cmd.append(self.tagName)
if self.force:
cmd.append('--force')
ret = yield self._dovccmd(cmd)
return ret
class GitCommit(buildstep.BuildStep, GitStepMixin, CompositeStepMixin):
description = None
descriptionDone = None
descriptionSuffix = None
name = 'gitcommit'
renderables = ['paths', 'messages']
def __init__(
self,
workdir=None,
paths=None,
messages=None,
env=None,
timeout=20 * 60,
logEnviron=True,
emptyCommits='disallow',
config=None,
no_verify=False,
**kwargs,
):
self.workdir = workdir
self.messages = messages
self.paths = paths
self.env = env
self.timeout = timeout
self.logEnviron = logEnviron
self.config = config
self.emptyCommits = emptyCommits
self.no_verify = no_verify
# The repourl attribute is required by
# GitStepMixin, but isn't needed by git add and commit operations
self.repourl = " "
super().__init__(**kwargs)
self.setupGitStep()
if not self.messages:
bbconfig.error('GitCommit: must provide messages')
if not isinstance(self.messages, list):
bbconfig.error('GitCommit: messages must be a list')
if not self.paths:
bbconfig.error('GitCommit: must provide paths')
if not isinstance(self.paths, list):
bbconfig.error('GitCommit: paths must be a list')
if self.emptyCommits not in ('disallow', 'create-empty-commit', 'ignore'):
bbconfig.error(
'GitCommit: emptyCommits must be one of "disallow", '
'"create-empty-commit" and "ignore"'
)
@defer.inlineCallbacks
def run(self):
self.stdio_log = yield self.addLog("stdio")
gitInstalled = yield self.checkFeatureSupport()
if not gitInstalled:
raise WorkerSetupError("git is not installed on worker")
yield self._checkDetachedHead()
yield self._doAdd()
yield self._doCommit()
return RC_SUCCESS
@defer.inlineCallbacks
def _checkDetachedHead(self):
cmd = ['symbolic-ref', 'HEAD']
rc = yield self._dovccmd(cmd, abandonOnFailure=False)
if rc != RC_SUCCESS:
yield self.stdio_log.addStderr("You are in detached HEAD")
raise buildstep.BuildStepFailed
@defer.inlineCallbacks
def _checkHasSomethingToCommit(self):
cmd = ['status', '--porcelain=v1']
stdout = yield self._dovccmd(cmd, collectStdout=True)
for line in stdout.splitlines(False):
if line[0] in 'MADRCU':
return True
return False
@defer.inlineCallbacks
def _doCommit(self):
if self.emptyCommits == 'ignore':
has_commit = yield self._checkHasSomethingToCommit()
if not has_commit:
return 0
cmd = ['commit']
for message in self.messages:
cmd.extend(['-m', message])
if self.emptyCommits == 'create-empty-commit':
cmd.extend(['--allow-empty'])
if self.no_verify:
cmd.extend(['--no-verify'])
ret = yield self._dovccmd(cmd)
return ret
@defer.inlineCallbacks
def _doAdd(self):
cmd = ['add']
cmd.extend(self.paths)
ret = yield self._dovccmd(cmd)
return ret
| 28,880 | Python | .py | 740 | 28.995946 | 99 | 0.595591 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,882 | gerrit.py | buildbot_buildbot/master/buildbot/steps/source/gerrit.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from buildbot.steps.source.git import Git
class Gerrit(Git):
def run_vc(self, branch, revision, patch):
gerrit_branch = None
changed_project = self.build.getProperty('event.change.project')
if not self.sourcestamp or (self.sourcestamp.project != changed_project):
# If we don't have a sourcestamp, or the project is wrong, this
# isn't the repo that's changed. Drop through and check out the
# head of the given branch
pass
elif self.build.hasProperty("event.patchSet.ref"):
gerrit_branch = self.build.getProperty("event.patchSet.ref")
self.updateSourceProperty("gerrit_branch", gerrit_branch)
else:
try:
change = self.build.getProperty("gerrit_change", '').split('/')
if len(change) == 2:
gerrit_branch = (
f"refs/changes/{(int(change[0]) % 100):2}/{int(change[0])}/{int(change[1])}"
)
self.updateSourceProperty("gerrit_branch", gerrit_branch)
except Exception:
pass
branch = gerrit_branch or branch
return super().run_vc(branch, revision, patch)
| 1,949 | Python | .py | 39 | 41.435897 | 100 | 0.658613 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,883 | darcs.py | buildbot_buildbot/master/buildbot/steps/source/darcs.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
Source step code for darcs
"""
from twisted.internet import defer
from twisted.internet import reactor
from twisted.python import log
from buildbot.config import ConfigErrors
from buildbot.interfaces import WorkerSetupError
from buildbot.process import buildstep
from buildbot.process import remotecommand
from buildbot.process import results
from buildbot.process.results import SUCCESS
from buildbot.steps.source.base import Source
class Darcs(Source):
"""Class for Darcs with all smarts"""
name = 'darcs'
renderables = ['repourl']
possible_methods = ('clobber', 'copy')
def __init__(self, repourl=None, mode='incremental', method=None, **kwargs):
self.repourl = repourl
self.method = method
self.mode = mode
super().__init__(**kwargs)
errors = []
if not self._hasAttrGroupMember('mode', self.mode):
errors.append(f"mode {self.mode} is not one of {self._listAttrGroupMembers('mode')}")
if self.mode == 'incremental' and self.method:
errors.append("Incremental mode does not require method")
if self.mode == 'full':
if self.method is None:
self.method = 'copy'
elif self.method not in self.possible_methods:
errors.append(f"Invalid method for mode == {self.mode}")
if repourl is None:
errors.append("you must provide repourl")
if errors:
raise ConfigErrors(errors)
@defer.inlineCallbacks
def run_vc(self, branch, revision, patch):
self.revision = revision
self.stdio_log = yield self.addLogForRemoteCommands("stdio")
installed = yield self.checkDarcs()
if not installed:
raise WorkerSetupError("Darcs is not installed on worker")
patched = yield self.sourcedirIsPatched()
if patched:
yield self.copy()
yield self._getAttrGroupMember('mode', self.mode)()
if patch:
yield self.patch(patch)
yield self.parseGotRevision()
return results.SUCCESS
@defer.inlineCallbacks
def checkDarcs(self):
cmd = remotecommand.RemoteShellCommand(
self.workdir,
['darcs', '--version'],
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout,
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
return cmd.rc == 0
@defer.inlineCallbacks
def mode_full(self):
if self.method == 'clobber':
yield self.clobber()
return
elif self.method == 'copy':
yield self.copy()
return
@defer.inlineCallbacks
def mode_incremental(self):
updatable = yield self._sourcedirIsUpdatable()
if not updatable:
yield self._checkout()
else:
command = ['darcs', 'pull', '--all', '--verbose']
yield self._dovccmd(command)
@defer.inlineCallbacks
def copy(self):
cmd = remotecommand.RemoteCommand(
'rmdir',
{
'dir': self.workdir,
'logEnviron': self.logEnviron,
'timeout': self.timeout,
},
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
self.workdir = 'source'
yield self.mode_incremental()
cmd = remotecommand.RemoteCommand(
'cpdir',
{
'fromdir': 'source',
'todir': 'build',
'logEnviron': self.logEnviron,
'timeout': self.timeout,
},
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
self.workdir = 'build'
@defer.inlineCallbacks
def clobber(self):
yield self.runRmdir(self.workdir)
yield self._checkout()
@defer.inlineCallbacks
def _clone(self, abandonOnFailure=False):
command = ['darcs', 'get', '--verbose', '--lazy', '--repo-name', self.workdir]
if self.revision:
yield self.downloadFileContentToWorker('.darcs-context', self.revision)
command.append('--context')
command.append('.darcs-context')
command.append(self.repourl)
yield self._dovccmd(command, abandonOnFailure=abandonOnFailure, wkdir='.')
@defer.inlineCallbacks
def _checkout(self):
if self.retry:
abandonOnFailure = self.retry[1] <= 0
else:
abandonOnFailure = True
res = yield self._clone(abandonOnFailure)
if self.retry:
if self.stopped or res == 0:
return res
delay, repeats = self.retry
if repeats > 0:
log.msg(f"Checkout failed, trying {repeats} more times after {delay} seconds")
self.retry = (delay, repeats - 1)
df = defer.Deferred()
df.addCallback(lambda _: self.runRmdir(self.workdir))
df.addCallback(lambda _: self._checkout())
reactor.callLater(delay, df.callback, None)
res = yield df
return res
@defer.inlineCallbacks
def parseGotRevision(self):
revision = yield self._dovccmd(['darcs', 'changes', '--max-count=1'], collectStdout=True)
self.updateSourceProperty('got_revision', revision)
@defer.inlineCallbacks
def _dovccmd(
self,
command,
collectStdout=False,
initialStdin=None,
decodeRC=None,
abandonOnFailure=True,
wkdir=None,
):
if not command:
raise ValueError("No command specified")
if decodeRC is None:
decodeRC = {0: SUCCESS}
workdir = wkdir or self.workdir
cmd = remotecommand.RemoteShellCommand(
workdir,
command,
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout,
collectStdout=collectStdout,
initialStdin=initialStdin,
decodeRC=decodeRC,
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if abandonOnFailure and cmd.didFail():
log.msg(f"Source step failed while running command {cmd}")
raise buildstep.BuildStepFailed()
if collectStdout:
return cmd.stdout
return cmd.rc
def _sourcedirIsUpdatable(self):
return self.pathExists(self.build.path_module.join(self.workdir, '_darcs'))
| 7,269 | Python | .py | 192 | 28.692708 | 97 | 0.620224 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,884 | repo.py | buildbot_buildbot/master/buildbot/steps/source/repo.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import re
import textwrap
from typing import ClassVar
from typing import Sequence
from twisted.internet import defer
from twisted.internet import reactor
from zope.interface import implementer
from buildbot import util
from buildbot.interfaces import IRenderable
from buildbot.process import buildstep
from buildbot.process import remotecommand
from buildbot.process import results
from buildbot.steps.source.base import Source
@implementer(IRenderable)
class RepoDownloadsFromProperties(util.ComparableMixin):
parse_download_re = (
re.compile(r"repo download ([^ ]+) ([0-9]+/[0-9]+)"),
re.compile(r"([^ ]+) ([0-9]+/[0-9]+)"),
re.compile(r"([^ ]+)/([0-9]+/[0-9]+)"),
)
compare_attrs: ClassVar[Sequence[str]] = ('names',)
def __init__(self, names):
self.names = names
def getRenderingFor(self, props):
downloads = []
for propName in self.names:
s = props.getProperty(propName)
if s is not None:
downloads.extend(self.parseDownloadProperty(s))
return downloads
def parseDownloadProperty(self, s):
"""
lets try to be nice in the format we want
can support several instances of "repo download proj number/patch"
(direct copy paste from gerrit web site) or several instances of "proj number/patch"
(simpler version)
This feature allows integrator to build with several pending interdependent changes.
returns list of repo downloads sent to the worker
"""
if s is None:
return []
ret = []
for cur_re in self.parse_download_re:
res = cur_re.search(s)
while res:
ret.append(f"{res.group(1)} {res.group(2)}")
s = s[: res.start(0)] + s[res.end(0) :]
res = cur_re.search(s)
return ret
@implementer(IRenderable)
class RepoDownloadsFromChangeSource(util.ComparableMixin):
compare_attrs: ClassVar[Sequence[str]] = ('codebase',)
def __init__(self, codebase=None):
self.codebase = codebase
def getRenderingFor(self, props):
downloads = []
if self.codebase is None:
changes = props.getBuild().allChanges()
else:
changes = props.getBuild().getSourceStamp(self.codebase).changes
for change in changes:
if (
"event.type" in change.properties
and change.properties["event.type"] == "patchset-created"
):
downloads.append(
f'{change.properties["event.change.project"]} '
f'{change.properties["event.change.number"]}/'
f'{change.properties["event.patchSet.number"]}'
)
return downloads
class Repo(Source):
"""Class for Repo with all the smarts"""
name = 'repo'
renderables = [
"manifestURL",
"manifestBranch",
"manifestFile",
"tarball",
"jobs",
"syncAllBranches",
"updateTarballAge",
"manifestOverrideUrl",
"repoDownloads",
"depth",
"submodules",
]
ref_not_found_re = re.compile(r"fatal: Couldn't find remote ref")
cherry_pick_error_re = re.compile(
r"|".join([
r"Automatic cherry-pick failed",
r"error: fatal: possibly due to conflict resolution.",
])
)
re_change = re.compile(r".* refs/changes/\d\d/(\d+)/(\d+) -> FETCH_HEAD$")
re_head = re.compile(r"^HEAD is now at ([0-9a-f]+)...")
# number of retries, if we detect mirror desynchronization
mirror_sync_retry = 10
# wait 1min between retries (thus default total retry time is 10min)
mirror_sync_sleep = 60
def __init__(
self,
manifestURL=None,
manifestBranch="master",
manifestFile="default.xml",
tarball=None,
jobs=None,
syncAllBranches=False,
updateTarballAge=7 * 24.0 * 3600.0,
manifestOverrideUrl=None,
repoDownloads=None,
depth=0,
submodules=False,
syncQuietly=False,
**kwargs,
):
"""
@type manifestURL: string
@param manifestURL: The URL which points at the repo manifests repository.
@type manifestBranch: string
@param manifestBranch: The manifest branch to check out by default.
@type manifestFile: string
@param manifestFile: The manifest to use for sync.
@type syncAllBranches: bool.
@param syncAllBranches: true, then we must slowly synchronize all branches.
@type updateTarballAge: float
@param updateTarballAge: renderable to determine the update tarball policy,
given properties
Returns: max age of tarball in seconds, or None, if we
want to skip tarball update
@type manifestOverrideUrl: string
@param manifestOverrideUrl: optional http URL for overriding the manifest
usually coming from Property setup by a ForceScheduler
@type repoDownloads: list of strings
@param repoDownloads: optional repo download to perform after the repo sync
@type depth: integer
@param depth: optional depth parameter to repo init.
If specified, create a shallow clone with given depth.
@type submodules: string
@param submodules: optional submodules parameter to repo init.
@type syncQuietly: bool.
@param syncQuietly: true, then suppress verbose output from repo sync.
"""
self.manifestURL = manifestURL
self.manifestBranch = manifestBranch
self.manifestFile = manifestFile
self.tarball = tarball
self.jobs = jobs
self.syncAllBranches = syncAllBranches
self.updateTarballAge = updateTarballAge
self.manifestOverrideUrl = manifestOverrideUrl
if repoDownloads is None:
repoDownloads = []
self.repoDownloads = repoDownloads
self.depth = depth
self.submodules = submodules
self.syncQuietly = syncQuietly
super().__init__(**kwargs)
assert self.manifestURL is not None
def computeSourceRevision(self, changes):
if not changes:
return None
return changes[-1].revision
def filterManifestPatches(self):
"""
Patches to manifest projects are a bit special.
repo does not support a way to download them automatically,
so we need to implement the boilerplate manually.
This code separates the manifest patches from the other patches,
and generates commands to import those manifest patches.
"""
manifest_unrelated_downloads = []
manifest_related_downloads = []
for download in self.repoDownloads:
project, ch_ps = download.split(" ")[-2:]
if self.manifestURL.endswith("/" + project) or self.manifestURL.endswith(
"/" + project + ".git"
):
ch, ps = map(int, ch_ps.split("/"))
branch = f"refs/changes/{ch % 100:02}/{ch}/{ps}"
manifest_related_downloads.append(["git", "fetch", self.manifestURL, branch])
manifest_related_downloads.append(["git", "cherry-pick", "FETCH_HEAD"])
else:
manifest_unrelated_downloads.append(download)
self.repoDownloads = manifest_unrelated_downloads
self.manifestDownloads = manifest_related_downloads
def _repoCmd(self, command, abandonOnFailure=True, **kwargs):
return self._Cmd(["repo", *command], abandonOnFailure=abandonOnFailure, **kwargs)
@defer.inlineCallbacks
def _Cmd(self, command, abandonOnFailure=True, workdir=None, **kwargs):
if workdir is None:
workdir = self.workdir
cmd = remotecommand.RemoteShellCommand(
workdir,
command,
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout,
**kwargs,
)
self.lastCommand = cmd
# does not make sense to logEnviron for each command (just for first)
self.logEnviron = False
cmd.useLog(self.stdio_log, False)
yield self.stdio_log.addHeader(f'Starting command: {" ".join(command)}\n')
self.description = ' '.join(command[:2])
# FIXME: enable when new style step is switched on yield self.updateSummary()
yield self.runCommand(cmd)
if abandonOnFailure and cmd.didFail():
self.descriptionDone = f'repo failed at: {" ".join(command[:2])}'
msg = f"Source step failed while running command {cmd}\n"
yield self.stdio_log.addStderr(msg)
raise buildstep.BuildStepFailed()
return cmd.rc
def repoDir(self):
return self.build.path_module.join(self.workdir, ".repo")
def sourcedirIsUpdateable(self):
return self.pathExists(self.repoDir())
def run_vc(self, branch, revision, patch):
return self.doStartVC()
@defer.inlineCallbacks
def doStartVC(self):
self.stdio_log = yield self.addLogForRemoteCommands("stdio")
self.filterManifestPatches()
if self.repoDownloads:
yield self.stdio_log.addHeader(
"will download:\nrepo download {}\n".format(
"\nrepo download ".join(self.repoDownloads)
)
)
self.willRetryInCaseOfFailure = True
try:
yield self.doRepoSync()
except buildstep.BuildStepFailed as e:
if not self.willRetryInCaseOfFailure:
raise
yield self.stdio_log.addStderr(
"got issue at first try:\n" + str(e) + "\nRetry after clobber..."
)
yield self.doRepoSync(forceClobber=True)
yield self.maybeUpdateTarball()
# starting from here, clobbering will not help
yield self.doRepoDownloads()
return results.SUCCESS
@defer.inlineCallbacks
def doClobberStart(self):
yield self.runRmdir(self.workdir)
yield self.runMkdir(self.workdir)
yield self.maybeExtractTarball()
@defer.inlineCallbacks
def doRepoSync(self, forceClobber=False):
updatable = yield self.sourcedirIsUpdateable()
if not updatable or forceClobber:
# no need to re-clobber in case of failure
self.willRetryInCaseOfFailure = False
yield self.doClobberStart()
yield self.doCleanup()
command = [
'init',
'-u',
self.manifestURL,
'-b',
self.manifestBranch,
'-m',
self.manifestFile,
'--depth',
str(self.depth),
]
if self.submodules:
command.append('--submodules')
yield self._repoCmd(command)
if self.manifestOverrideUrl:
msg = f"overriding manifest with {self.manifestOverrideUrl}\n"
yield self.stdio_log.addHeader(msg)
local_path = self.build.path_module.join(self.workdir, self.manifestOverrideUrl)
local_file = yield self.pathExists(local_path)
if local_file:
yield self._Cmd(["cp", "-f", self.manifestOverrideUrl, "manifest_override.xml"])
else:
yield self._Cmd(["wget", self.manifestOverrideUrl, "-O", "manifest_override.xml"])
yield self._Cmd(
["ln", "-sf", "../manifest_override.xml", "manifest.xml"],
workdir=self.build.path_module.join(self.workdir, ".repo"),
)
for command in self.manifestDownloads:
yield self._Cmd(
command, workdir=self.build.path_module.join(self.workdir, ".repo", "manifests")
)
command = ['sync', '--force-sync']
if self.jobs:
command.append('-j' + str(self.jobs))
if not self.syncAllBranches:
command.append('-c')
if self.syncQuietly:
command.append('-q')
self.description = "repo sync"
# FIXME: enable when new style step is used: yield self.updateSummary()
yield self.stdio_log.addHeader(
f"synching manifest {self.manifestFile} from branch "
f"{self.manifestBranch} from {self.manifestURL}\n"
)
yield self._repoCmd(command)
command = ['manifest', '-r', '-o', 'manifest-original.xml']
yield self._repoCmd(command)
# check whether msg matches one of the
# compiled regexps in self.re_error_messages
def _findErrorMessages(self, error_re):
for logname in ['stderr', 'stdout']:
if not hasattr(self.lastCommand, logname):
continue
msg = getattr(self.lastCommand, logname)
if re.search(error_re, msg) is not None:
return True
return False
def _sleep(self, delay):
d = defer.Deferred()
reactor.callLater(delay, d.callback, 1)
return d
@defer.inlineCallbacks
def doRepoDownloads(self):
self.repo_downloaded = ""
for download in self.repoDownloads:
command = ["download", *download.split(" ")]
yield self.stdio_log.addHeader(f"downloading changeset {download}\n")
retry = self.mirror_sync_retry + 1
while retry > 0:
yield self._repoCmd(
command, abandonOnFailure=False, collectStdout=True, collectStderr=True
)
if not self._findErrorMessages(self.ref_not_found_re):
break
retry -= 1
yield self.stdio_log.addStderr(f"failed downloading changeset {download}\n")
yield self.stdio_log.addHeader("wait one minute for mirror sync\n")
yield self._sleep(self.mirror_sync_sleep)
if retry == 0:
self.descriptionDone = f"repo: change {download} does not exist"
raise buildstep.BuildStepFailed()
if self.lastCommand.didFail() or self._findErrorMessages(self.cherry_pick_error_re):
# cherry pick error! We create a diff with status current workdir
# in stdout, which reveals the merge errors and exit
command = ['forall', '-c', 'git', 'diff', 'HEAD']
yield self._repoCmd(command, abandonOnFailure=False)
self.descriptionDone = f"download failed: {download}"
raise buildstep.BuildStepFailed()
if hasattr(self.lastCommand, 'stderr'):
lines = self.lastCommand.stderr.split("\n")
match1 = match2 = False
for line in lines:
if not match1:
match1 = self.re_change.match(line)
if not match2:
match2 = self.re_head.match(line)
if match1 and match2:
self.repo_downloaded += (
f"{match1.group(1)}/{match1.group(2)} {match2.group(1)} "
)
self.setProperty("repo_downloaded", self.repo_downloaded, "Source")
def computeTarballOptions(self):
# Keep in mind that the compression part of tarball generation
# can be non negligible
tar = ['tar']
if self.tarball.endswith("pigz"):
tar.append('-I')
tar.append('pigz')
elif self.tarball.endswith("gz"):
tar.append('-z')
elif self.tarball.endswith("bz2") or self.tarball.endswith("bz"):
tar.append('-j')
elif self.tarball.endswith("lzma"):
tar.append('--lzma')
elif self.tarball.endswith("lzop"):
tar.append('--lzop')
return tar
@defer.inlineCallbacks
def maybeExtractTarball(self):
if self.tarball:
tar = [*self.computeTarballOptions(), "-xvf", self.tarball]
res = yield self._Cmd(tar, abandonOnFailure=False)
if res: # error with tarball.. erase repo dir and tarball
yield self._Cmd(["rm", "-f", self.tarball], abandonOnFailure=False)
yield self.runRmdir(self.repoDir(), abandonOnFailure=False)
@defer.inlineCallbacks
def maybeUpdateTarball(self):
if not self.tarball or self.updateTarballAge is None:
return
# tarball path is absolute, so we cannot use worker's stat command
# stat -c%Y gives mtime in second since epoch
res = yield self._Cmd(
["stat", "-c%Y", self.tarball], collectStdout=True, abandonOnFailure=False
)
age = 0
if not res:
tarball_mtime = int(self.lastCommand.stdout)
yield self._Cmd(["stat", "-c%Y", "."], collectStdout=True)
now_mtime = int(self.lastCommand.stdout)
age = now_mtime - tarball_mtime
if res or age > self.updateTarballAge:
tar = [*self.computeTarballOptions(), "-cvf", self.tarball, ".repo"]
res = yield self._Cmd(tar, abandonOnFailure=False)
if res: # error with tarball.. erase tarball, but don't fail
yield self._Cmd(["rm", "-f", self.tarball], abandonOnFailure=False)
# a simple shell script to gather all cleanup tweaks...
# doing them one by one just complicate the stuff
# and mess up the stdio log
def _getCleanupCommand(self):
"""also used by tests for expectations"""
return textwrap.dedent("""\
set -v
if [ -d .repo/manifests ]
then
# repo just refuse to run if manifest is messed up
# so ensure we are in a known state
cd .repo/manifests
rm -f .git/index.lock
git fetch origin
git reset --hard remotes/origin/%(manifestBranch)s
git config branch.default.merge %(manifestBranch)s
cd ..
ln -sf manifests/%(manifestFile)s manifest.xml
cd ..
fi
repo forall -c rm -f .git/index.lock
repo forall -c git clean -f -d -x 2>/dev/null
repo forall -c git reset --hard HEAD 2>/dev/null
rm -f %(workdir)s/.repo/project.list
""") % {
"manifestBranch": self.manifestBranch,
"manifestFile": self.manifestFile,
"workdir": self.workdir,
}
def doCleanup(self):
command = self._getCleanupCommand()
return self._Cmd(["bash", "-c", command], abandonOnFailure=False)
| 19,521 | Python | .py | 448 | 32.892857 | 98 | 0.605777 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,885 | __init__.py | buildbot_buildbot/master/buildbot/steps/source/__init__.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from buildbot.steps.source.base import Source
_hush_pyflakes = [Source]
| 779 | Python | .py | 16 | 47.5625 | 79 | 0.791064 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,886 | base.py | buildbot_buildbot/master/buildbot/steps/source/base.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
from twisted.internet import defer
from twisted.python import log
from buildbot.process import buildstep
from buildbot.process import properties
from buildbot.process import remotecommand
from buildbot.process.results import FAILURE
from buildbot.steps.worker import CompositeStepMixin
from buildbot.util import bytes2unicode
class Source(buildstep.BuildStep, CompositeStepMixin):
"""This is a base class to generate a source tree in the worker.
Each version control system has a specialized subclass, and is expected
to override __init__ and implement computeSourceRevision() and
run_vc(). The class as a whole builds up the self.args dictionary, then
starts a RemoteCommand with those arguments.
"""
renderables = ['description', 'descriptionDone', 'descriptionSuffix', 'workdir', 'env']
description = None # set this to a list of short strings to override
descriptionDone = None # alternate description when the step is complete
descriptionSuffix = None # extra information to append to suffix
# if the checkout fails, there's no point in doing anything else
haltOnFailure = True
flunkOnFailure = True
branch: str | None = None # the default branch, should be set in __init__
def __init__(
self,
workdir=None,
mode='update',
alwaysUseLatest=False,
timeout=20 * 60,
retry=None,
env=None,
logEnviron=True,
description=None,
descriptionDone=None,
descriptionSuffix=None,
codebase='',
**kwargs,
):
"""
@type workdir: string
@param workdir: local directory (relative to the Builder's root)
where the tree should be placed
@type alwaysUseLatest: boolean
@param alwaysUseLatest: whether to always update to the most
recent available sources for this build.
Normally the Source step asks its Build for a list of all
Changes that are supposed to go into the build, then computes a
'source stamp' (revision number or timestamp) that will cause
exactly that set of changes to be present in the checked out
tree. This is turned into, e.g., 'cvs update -D timestamp', or
'svn update -r revnum'. If alwaysUseLatest=True, bypass this
computation and always update to the latest available sources
for each build.
The source stamp helps avoid a race condition in which someone
commits a change after the master has decided to start a build
but before the worker finishes checking out the sources. At best
this results in a build which contains more changes than the
buildmaster thinks it has (possibly resulting in the wrong
person taking the blame for any problems that result), at worst
is can result in an incoherent set of sources (splitting a
non-atomic commit) which may not build at all.
@type logEnviron: boolean
@param logEnviron: If this option is true (the default), then the
step's logfile will describe the environment
variables on the worker. In situations where the
environment is not relevant and is long, it may
be easier to set logEnviron=False.
@type codebase: string
@param codebase: Specifies which changes in a build are processed by
the step. The default codebase value is ''. The codebase must correspond
to a codebase assigned by the codebaseGenerator. If no codebaseGenerator
is defined in the master then codebase doesn't need to be set, the
default value will then match all changes.
"""
descriptions_for_mode = {"clobber": "checkout", "export": "exporting"}
descriptionDones_for_mode = {"clobber": "checkout", "export": "export"}
if not description:
description = [descriptions_for_mode.get(mode, "updating")]
if not descriptionDone:
descriptionDone = [descriptionDones_for_mode.get(mode, "update")]
if not descriptionSuffix and codebase:
descriptionSuffix = [codebase]
super().__init__(
description=description,
descriptionDone=descriptionDone,
descriptionSuffix=descriptionSuffix,
**kwargs,
)
# This will get added to args later, after properties are rendered
self.workdir = workdir
self.sourcestamp = None
self.codebase = codebase
if self.codebase:
self.name = properties.Interpolate(
"%(kw:name)s-%(kw:codebase)s", name=self.name, codebase=self.codebase
)
self.alwaysUseLatest = alwaysUseLatest
self.logEnviron = logEnviron
self.env = env
self.timeout = timeout
self.retry = retry
def _hasAttrGroupMember(self, attrGroup, attr):
"""
The hasattr equivalent for attribute groups: returns whether the given
member is in the attribute group.
"""
method_name = f'{attrGroup}_{attr}'
return hasattr(self, method_name)
def _getAttrGroupMember(self, attrGroup, attr):
"""
The getattr equivalent for attribute groups: gets and returns the
attribute group member.
"""
method_name = f'{attrGroup}_{attr}'
return getattr(self, method_name)
def _listAttrGroupMembers(self, attrGroup):
"""
Returns a list of all members in the attribute group.
"""
from inspect import getmembers
from inspect import ismethod
methods = getmembers(self, ismethod)
group_prefix = attrGroup + '_'
group_len = len(group_prefix)
group_members = [
method[0][group_len:] for method in methods if method[0].startswith(group_prefix)
]
return group_members
def updateSourceProperty(self, name, value, source=''):
"""
Update a property, indexing the property by codebase if codebase is not
''. Source steps should generally use this instead of setProperty.
"""
# pick a decent source name
if source == '':
source = self.__class__.__name__
if self.codebase != '':
assert not isinstance(
self.getProperty(name, None), str
), f"Sourcestep {self.name} has a codebase, other sourcesteps don't"
property_dict = self.getProperty(name, {})
property_dict[self.codebase] = value
super().setProperty(name, property_dict, source)
else:
assert not isinstance(
self.getProperty(name, None), dict
), f"Sourcestep {self.name} does not have a codebase, other sourcesteps do"
super().setProperty(name, value, source)
def computeSourceRevision(self, changes):
"""Each subclass must implement this method to do something more
precise than -rHEAD every time. For version control systems that use
repository-wide change numbers (SVN, P4), this can simply take the
maximum such number from all the changes involved in this build. For
systems that do not (CVS), it needs to create a timestamp based upon
the latest Change, the Build's treeStableTimer, and an optional
self.checkoutDelay value."""
return None
@defer.inlineCallbacks
def applyPatch(self, patch):
patch_command = [
'patch',
f'-p{patch[0]}',
'--remove-empty-files',
'--force',
'--forward',
'-i',
'.buildbot-diff',
]
cmd = remotecommand.RemoteShellCommand(
self.workdir, patch_command, env=self.env, logEnviron=self.logEnviron
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if cmd.didFail():
raise buildstep.BuildStepFailed()
return cmd.rc
@defer.inlineCallbacks
def patch(self, patch):
diff = patch[1]
root = None
if len(patch) >= 3:
root = patch[2]
if root:
workdir_root = self.build.path_module.join(self.workdir, root)
workdir_root_abspath = self.build.path_module.abspath(workdir_root)
workdir_abspath = self.build.path_module.abspath(self.workdir)
if workdir_root_abspath.startswith(workdir_abspath):
self.workdir = workdir_root
yield self.downloadFileContentToWorker('.buildbot-diff', diff)
yield self.downloadFileContentToWorker('.buildbot-patched', 'patched\n')
yield self.applyPatch(patch)
cmd = remotecommand.RemoteCommand(
'rmdir',
{
'dir': self.build.path_module.join(self.workdir, ".buildbot-diff"),
'logEnviron': self.logEnviron,
},
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if cmd.didFail():
raise buildstep.BuildStepFailed()
return cmd.rc
def sourcedirIsPatched(self):
d = self.pathExists(self.build.path_module.join(self.workdir, '.buildbot-patched'))
return d
@defer.inlineCallbacks
def run(self):
if getattr(self, 'startVC', None) is not None:
msg = (
'Old-style source steps are no longer supported. Please convert your custom '
'source step to new style (replace startVC with run_vc and convert all used '
'old style APIs to new style). Please consider contributing the source step to '
'upstream BuildBot so that such migrations can be avoided in the future.'
)
raise NotImplementedError(msg)
if not self.alwaysUseLatest:
# what source stamp would this step like to use?
s = self.build.getSourceStamp(self.codebase)
self.sourcestamp = s
if self.sourcestamp:
# if branch is None, then use the Step's "default" branch
branch = s.branch or self.branch
# if revision is None, use the latest sources (-rHEAD)
revision = s.revision
if not revision:
revision = self.computeSourceRevision(s.changes)
# the revision property is currently None, so set it to something
# more interesting
if revision is not None:
self.updateSourceProperty('revision', str(revision))
# if patch is None, then do not patch the tree after checkout
# 'patch' is None or a tuple of (patchlevel, diff, root)
# root is optional.
patch = s.patch
if patch:
yield self.addCompleteLog("patch", bytes2unicode(patch[1], errors='ignore'))
else:
log.msg(f"No sourcestamp found in build for codebase '{self.codebase}'")
self.descriptionDone = f"Codebase {self.codebase} not in build"
yield self.addCompleteLog(
"log", "No sourcestamp found in build for " f"codebase '{self.codebase}'"
)
return FAILURE
else:
revision = None
branch = self.branch
patch = None
res = yield self.run_vc(branch, revision, patch)
return res
| 12,327 | Python | .py | 265 | 36.339623 | 96 | 0.637044 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,887 | svn.py | buildbot_buildbot/master/buildbot/steps/source/svn.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import re
import xml.dom.minidom
import xml.parsers.expat
from urllib.parse import quote as urlquote
from urllib.parse import unquote as urlunquote
from urllib.parse import urlparse
from urllib.parse import urlunparse
from twisted.internet import defer
from twisted.internet import reactor
from twisted.python import log
from buildbot.config import ConfigErrors
from buildbot.interfaces import WorkerSetupError
from buildbot.process import buildstep
from buildbot.process import remotecommand
from buildbot.steps.source.base import Source
class SVN(Source):
"""I perform Subversion checkout/update operations."""
name = 'svn'
renderables = ['repourl', 'password']
possible_methods = ('clean', 'fresh', 'clobber', 'copy', 'export', None)
def __init__(
self,
repourl=None,
mode='incremental',
method=None,
username=None,
password=None,
extra_args=None,
keep_on_purge=None,
depth=None,
preferLastChangedRev=False,
**kwargs,
):
self.repourl = repourl
self.username = username
self.password = password
self.extra_args = extra_args
self.keep_on_purge = keep_on_purge or []
self.depth = depth
self.method = method
self.mode = mode
self.preferLastChangedRev = preferLastChangedRev
super().__init__(**kwargs)
errors = []
if not self._hasAttrGroupMember('mode', self.mode):
errors.append(f"mode {self.mode} is not one of {self._listAttrGroupMembers('mode')}")
if self.method not in self.possible_methods:
errors.append(f"method {self.method} is not one of {self.possible_methods}")
if repourl is None:
errors.append("you must provide repourl")
if errors:
raise ConfigErrors(errors)
@defer.inlineCallbacks
def run_vc(self, branch, revision, patch):
self.revision = revision
self.method = self._getMethod()
self.stdio_log = yield self.addLogForRemoteCommands("stdio")
# if the version is new enough, and the password is set, then obfuscate
# it
if self.password is not None:
if not self.workerVersionIsOlderThan('shell', '2.16'):
self.password = ('obfuscated', self.password, 'XXXXXX')
else:
log.msg("Worker does not understand obfuscation; svn password will be logged")
installed = yield self.checkSvn()
if not installed:
raise WorkerSetupError("SVN is not installed on worker")
patched = yield self.sourcedirIsPatched()
if patched:
yield self.purge(False)
yield self._getAttrGroupMember('mode', self.mode)()
if patch:
yield self.patch(patch)
res = yield self.parseGotRevision()
return res
@defer.inlineCallbacks
def mode_full(self):
if self.method == 'clobber':
yield self.clobber()
return
elif self.method in ['copy', 'export']:
yield self.copy()
return
updatable = yield self._sourcedirIsUpdatable()
if not updatable:
# blow away the old (un-updatable) directory and checkout
yield self.clobber()
elif self.method == 'clean':
yield self.clean()
elif self.method == 'fresh':
yield self.fresh()
@defer.inlineCallbacks
def mode_incremental(self):
updatable = yield self._sourcedirIsUpdatable()
if not updatable:
# blow away the old (un-updatable) directory and checkout
yield self.clobber()
else:
# otherwise, do an update
command = ['update']
if self.revision:
command.extend(['--revision', str(self.revision)])
yield self._dovccmd(command)
@defer.inlineCallbacks
def clobber(self):
yield self.runRmdir(self.workdir, timeout=self.timeout)
yield self._checkout()
@defer.inlineCallbacks
def fresh(self):
yield self.purge(True)
cmd = ['update']
if self.revision:
cmd.extend(['--revision', str(self.revision)])
yield self._dovccmd(cmd)
@defer.inlineCallbacks
def clean(self):
yield self.purge(False)
cmd = ['update']
if self.revision:
cmd.extend(['--revision', str(self.revision)])
yield self._dovccmd(cmd)
@defer.inlineCallbacks
def copy(self):
yield self.runRmdir(self.workdir, timeout=self.timeout)
checkout_dir = 'source'
if self.codebase:
checkout_dir = self.build.path_module.join(checkout_dir, self.codebase)
# temporarily set workdir = checkout_dir and do an incremental checkout
old_workdir = self.workdir
try:
self.workdir = checkout_dir
yield self.mode_incremental()
finally:
self.workdir = old_workdir
self.workdir = old_workdir
# if we're copying, copy; otherwise, export from source to build
if self.method == 'copy':
cmd = remotecommand.RemoteCommand(
'cpdir',
{'fromdir': checkout_dir, 'todir': self.workdir, 'logEnviron': self.logEnviron},
)
else:
export_cmd = ['svn', 'export']
if self.revision:
export_cmd.extend(["--revision", str(self.revision)])
if self.username:
export_cmd.extend(['--username', self.username])
if self.password is not None:
export_cmd.extend(['--password', self.password])
if self.extra_args:
export_cmd.extend(self.extra_args)
export_cmd.extend([checkout_dir, self.workdir])
cmd = remotecommand.RemoteShellCommand(
'', export_cmd, env=self.env, logEnviron=self.logEnviron, timeout=self.timeout
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if cmd.didFail():
raise buildstep.BuildStepFailed()
@defer.inlineCallbacks
def _dovccmd(self, command, collectStdout=False, collectStderr=False, abandonOnFailure=True):
assert command, "No command specified"
command.extend(['--non-interactive', '--no-auth-cache'])
if self.username:
command.extend(['--username', self.username])
if self.password is not None:
command.extend(['--password', self.password])
if self.depth:
command.extend(['--depth', self.depth])
if self.extra_args:
command.extend(self.extra_args)
cmd = remotecommand.RemoteShellCommand(
self.workdir,
['svn', *command],
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout,
collectStdout=collectStdout,
collectStderr=collectStderr,
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if cmd.didFail() and abandonOnFailure:
log.msg(f"Source step failed while running command {cmd}")
raise buildstep.BuildStepFailed()
if collectStdout and collectStderr:
return (cmd.stdout, cmd.stderr)
elif collectStdout:
return cmd.stdout
elif collectStderr:
return cmd.stderr
return cmd.rc
def _getMethod(self):
if self.method is not None and self.mode != 'incremental':
return self.method
elif self.mode == 'incremental':
return None
elif self.method is None and self.mode == 'full':
return 'fresh'
return None
@defer.inlineCallbacks
def _sourcedirIsUpdatable(self):
# first, perform a stat to ensure that this is really an svn directory
res = yield self.pathExists(self.build.path_module.join(self.workdir, '.svn'))
if not res:
return False
# then run 'svn info --xml' to check that the URL matches our repourl
stdout, stderr = yield self._dovccmd(
['info', '--xml'], collectStdout=True, collectStderr=True, abandonOnFailure=False
)
# svn: E155037: Previous operation has not finished; run 'cleanup' if
# it was interrupted
if 'E155037:' in stderr:
return False
try:
stdout_xml = xml.dom.minidom.parseString(stdout)
extractedurl = stdout_xml.getElementsByTagName('url')[0].firstChild.nodeValue
except xml.parsers.expat.ExpatError as e:
yield self.stdio_log.addHeader("Corrupted xml, aborting step")
raise buildstep.BuildStepFailed() from e
return extractedurl == self.svnUriCanonicalize(self.repourl)
@defer.inlineCallbacks
def parseGotRevision(self):
# if this was a full/export, then we need to check svnversion in the
# *source* directory, not the build directory
svnversion_dir = self.workdir
if self.mode == 'full' and self.method == 'export':
svnversion_dir = 'source'
cmd = remotecommand.RemoteShellCommand(
svnversion_dir,
['svn', 'info', '--xml'],
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout,
collectStdout=True,
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
stdout = cmd.stdout
try:
stdout_xml = xml.dom.minidom.parseString(stdout)
except xml.parsers.expat.ExpatError as e:
yield self.stdio_log.addHeader("Corrupted xml, aborting step")
raise buildstep.BuildStepFailed() from e
revision = None
if self.preferLastChangedRev:
try:
revision = stdout_xml.getElementsByTagName('commit')[0].attributes['revision'].value
except (KeyError, IndexError):
msg = (
"SVN.parseGotRevision unable to detect Last Changed Rev in"
" output of svn info"
)
log.msg(msg)
# fall through and try to get 'Revision' instead
if revision is None:
try:
revision = stdout_xml.getElementsByTagName('entry')[0].attributes['revision'].value
except (KeyError, IndexError) as e:
msg = "SVN.parseGotRevision unable to detect revision in output of svn info"
log.msg(msg)
raise buildstep.BuildStepFailed() from e
yield self.stdio_log.addHeader(f"Got SVN revision {revision}")
self.updateSourceProperty('got_revision', revision)
return cmd.rc
@defer.inlineCallbacks
def purge(self, ignore_ignores):
"""Delete everything that shown up on status."""
command = ['status', '--xml']
if ignore_ignores:
command.append('--no-ignore')
stdout = yield self._dovccmd(command, collectStdout=True)
files = []
for filename in self.getUnversionedFiles(stdout, self.keep_on_purge):
filename = self.build.path_module.join(self.workdir, filename)
files.append(filename)
if files:
if self.workerVersionIsOlderThan('rmdir', '2.14'):
rc = yield self.removeFiles(files)
else:
rc = yield self.runRmdir(files, abandonOnFailure=False, timeout=self.timeout)
if rc != 0:
log.msg("Failed removing files")
raise buildstep.BuildStepFailed()
@staticmethod
def getUnversionedFiles(xmlStr, keep_on_purge):
try:
result_xml = xml.dom.minidom.parseString(xmlStr)
except xml.parsers.expat.ExpatError as e:
log.err("Corrupted xml, aborting step")
raise buildstep.BuildStepFailed() from e
for entry in result_xml.getElementsByTagName('entry'):
(wc_status,) = entry.getElementsByTagName('wc-status')
if wc_status.getAttribute('item') == 'external':
continue
if wc_status.getAttribute('item') == 'missing':
continue
filename = entry.getAttribute('path')
if filename in keep_on_purge or filename == '':
continue
yield filename
@defer.inlineCallbacks
def removeFiles(self, files):
for filename in files:
res = yield self.runRmdir(filename, abandonOnFailure=False, timeout=self.timeout)
if res:
return res
return 0
@defer.inlineCallbacks
def checkSvn(self):
cmd = remotecommand.RemoteShellCommand(
self.workdir,
['svn', '--version'],
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout,
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
return cmd.rc == 0
def computeSourceRevision(self, changes):
if not changes or None in [c.revision for c in changes]:
return None
lastChange = max(int(c.revision) for c in changes)
return lastChange
@staticmethod
def svnUriCanonicalize(uri):
collapse = re.compile(r'([^/]+/\.\./?|/\./|//|/\.$|/\.\.$|^/\.\.)')
server_authority = re.compile(r'^(?:([^@]+)@)?([^:]+)(?::(.+))?$')
default_port = {'http': '80', 'https': '443', 'svn': '3690'}
relative_schemes = ['http', 'https', 'svn']
def quote(uri):
return urlquote(uri, "!$&'()*+,-./:=@_~", encoding="latin-1")
if not uri or uri == '/':
return uri
(scheme, authority, path, parameters, query, fragment) = urlparse(uri)
scheme = scheme.lower()
if authority:
mo = server_authority.match(authority)
if not mo:
return uri # give up
userinfo, host, port = mo.groups()
if host[-1] == '.':
host = host[:-1]
authority = host.lower()
if userinfo:
authority = f"{userinfo}@{authority}"
if port and port != default_port.get(scheme, None):
authority = f"{authority}:{port}"
if scheme in relative_schemes:
last_path = path
while True:
path = collapse.sub('/', path, 1)
if last_path == path:
break
last_path = path
path = quote(urlunquote(path))
canonical_uri = urlunparse((scheme, authority, path, parameters, query, fragment))
if canonical_uri == '/':
return canonical_uri
elif canonical_uri[-1] == '/' and canonical_uri[-2] != '/':
return canonical_uri[:-1]
return canonical_uri
@defer.inlineCallbacks
def _checkout(self):
checkout_cmd = ['checkout', self.repourl, '.']
if self.revision:
checkout_cmd.extend(["--revision", str(self.revision)])
if self.retry:
abandonOnFailure = self.retry[1] <= 0
else:
abandonOnFailure = True
res = yield self._dovccmd(checkout_cmd, abandonOnFailure=abandonOnFailure)
if self.retry:
if self.stopped or res == 0:
return
delay, repeats = self.retry
if repeats > 0:
log.msg(f"Checkout failed, trying {repeats} more times after {delay} seconds")
self.retry = (delay, repeats - 1)
df = defer.Deferred()
df.addCallback(lambda _: self.runRmdir(self.workdir, timeout=self.timeout))
df.addCallback(lambda _: self._checkout())
reactor.callLater(delay, df.callback, None)
yield df
| 16,641 | Python | .py | 399 | 31.406015 | 100 | 0.603782 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,888 | mercurial.py | buildbot_buildbot/master/buildbot/steps/source/mercurial.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
Source step code for mercurial
"""
from twisted.internet import defer
from twisted.internet import reactor
from twisted.python import log
from buildbot.config import ConfigErrors
from buildbot.interfaces import WorkerSetupError
from buildbot.process import buildstep
from buildbot.process import remotecommand
from buildbot.process import results
from buildbot.process.results import SUCCESS
from buildbot.steps.source.base import Source
class Mercurial(Source):
"""Class for Mercurial with all the smarts"""
name = "hg"
renderables = ["repourl"]
possible_methods = (None, 'clean', 'fresh', 'clobber')
possible_branchTypes = ('inrepo', 'dirname')
def __init__(
self,
repourl=None,
mode='incremental',
method=None,
defaultBranch=None,
branchType='dirname',
clobberOnBranchChange=True,
**kwargs,
):
"""
@type repourl: string
@param repourl: the URL which points at the Mercurial repository.
if 'dirname' branches are enabled, this is the base URL
to which a branch name will be appended. It should
probably end in a slash.
@param defaultBranch: if branches are enabled, this is the branch
to use if the Build does not specify one
explicitly.
For 'dirname' branches, It will simply be
appended to C{repourl} and the result handed to
the 'hg update' command.
For 'inrepo' branches, this specifies the named
revision to which the tree will update after a
clone.
@param branchType: either 'dirname' or 'inrepo' depending on whether
the branch name should be appended to the C{repourl}
or the branch is a mercurial named branch and can be
found within the C{repourl}
@param clobberOnBranchChange: boolean, defaults to True. If set and
using inrepos branches, clobber the tree
at each branch change. Otherwise, just
update to the branch.
"""
self.repourl = repourl
self.defaultBranch = self.branch = defaultBranch
self.branchType = branchType
self.method = method
self.clobberOnBranchChange = clobberOnBranchChange
self.mode = mode
super().__init__(**kwargs)
errors = []
if not self._hasAttrGroupMember('mode', self.mode):
errors.append(f"mode {self.mode} is not one of {self._listAttrGroupMembers('mode')}")
if self.method not in self.possible_methods:
errors.append(f"method {self.method} is not one of {self.possible_methods}")
if self.branchType not in self.possible_branchTypes:
errors.append(f"branchType {self.branchType} is not one of {self.possible_branchTypes}")
if repourl is None:
errors.append("you must provide a repourl")
if errors:
raise ConfigErrors(errors)
@defer.inlineCallbacks
def run_vc(self, branch, revision, patch):
self.revision = revision
self.method = self._getMethod()
self.stdio_log = yield self.addLogForRemoteCommands("stdio")
installed = yield self.checkHg()
if not installed:
raise WorkerSetupError("Mercurial is not installed on worker")
# FIXME: this does not do anything
yield self.sourcedirIsPatched()
if self.branchType == 'dirname':
self.repourl = self.repourl + (branch or '')
self.branch = self.defaultBranch
self.update_branch = branch
elif self.branchType == 'inrepo':
self.update_branch = branch or 'default'
yield self._getAttrGroupMember('mode', self.mode)()
if patch:
yield self.patch(patch)
yield self.parseGotRevision()
return results.SUCCESS
@defer.inlineCallbacks
def mode_full(self):
if self.method == 'clobber':
yield self.clobber()
return
updatable = yield self._sourcedirIsUpdatable()
if not updatable:
yield self._clone()
yield self._update()
elif self.method == 'clean':
yield self.clean()
elif self.method == 'fresh':
yield self.fresh()
else:
raise ValueError("Unknown method, check your configuration")
@defer.inlineCallbacks
def mode_incremental(self):
if self.method is not None:
raise ValueError(self.method)
updatable = yield self._sourcedirIsUpdatable()
if updatable:
yield self._dovccmd(self.getHgPullCommand())
else:
yield self._clone()
yield self._checkBranchChange()
@defer.inlineCallbacks
def clean(self):
command = ['--config', 'extensions.purge=', 'purge']
yield self._dovccmd(command)
yield self._pullUpdate()
@defer.inlineCallbacks
def _clobber(self):
cmd = remotecommand.RemoteCommand(
'rmdir', {'dir': self.workdir, 'logEnviron': self.logEnviron}
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
@defer.inlineCallbacks
def clobber(self):
yield self._clobber()
yield self._clone()
yield self._update()
@defer.inlineCallbacks
def fresh(self):
command = ['--config', 'extensions.purge=', 'purge', '--all']
yield self._dovccmd(command)
yield self._pullUpdate()
@defer.inlineCallbacks
def parseGotRevision(self):
stdout = yield self._dovccmd(['parents', '--template', '{node}\\n'], collectStdout=True)
revision = stdout.strip()
if len(revision) != 40:
raise ValueError("Incorrect revision id")
log.msg(f"Got Mercurial revision {revision}")
self.updateSourceProperty('got_revision', revision)
@defer.inlineCallbacks
def _checkBranchChange(self):
current_branch = yield self._getCurrentBranch()
msg = (
f"Working dir is on in-repo branch '{current_branch}' and build needs "
f"'{self.update_branch}'."
)
if current_branch != self.update_branch and self.clobberOnBranchChange:
msg += ' Clobbering.'
log.msg(msg)
yield self.clobber()
return
msg += ' Updating.'
log.msg(msg)
yield self._removeAddedFilesAndUpdate(None)
def getHgPullCommand(self):
command = ['pull', self.repourl]
if self.revision:
command.extend(['--rev', self.revision])
elif self.branchType == 'inrepo':
command.extend(['--rev', self.update_branch])
return command
@defer.inlineCallbacks
def _pullUpdate(self):
command = self.getHgPullCommand()
yield self._dovccmd(command)
yield self._checkBranchChange()
@defer.inlineCallbacks
def _dovccmd(
self, command, collectStdout=False, initialStdin=None, decodeRC=None, abandonOnFailure=True
):
if not command:
raise ValueError("No command specified")
if decodeRC is None:
decodeRC = {0: SUCCESS}
cmd = remotecommand.RemoteShellCommand(
self.workdir,
["hg", "--verbose", *command],
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout,
collectStdout=collectStdout,
initialStdin=initialStdin,
decodeRC=decodeRC,
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if abandonOnFailure and cmd.didFail():
log.msg(f"Source step failed while running command {cmd}")
raise buildstep.BuildStepFailed()
if collectStdout:
return cmd.stdout
return cmd.rc
def computeSourceRevision(self, changes):
if not changes:
return None
# without knowing the revision ancestry graph, we can't sort the
# changes at all. So for now, assume they were given to us in sorted
# order, and just pay attention to the last one. See ticket #103 for
# more details.
if len(changes) > 1:
log.msg(
"Mercurial.computeSourceRevision: warning: "
f"there are {len(changes)} changes here, assuming the last one is "
"the most recent"
)
return changes[-1].revision
@defer.inlineCallbacks
def _getCurrentBranch(self):
if self.branchType == 'dirname':
return self.branch
stdout = yield self._dovccmd(['identify', '--branch'], collectStdout=True)
return stdout.strip()
def _getMethod(self):
if self.method is not None and self.mode != 'incremental':
return self.method
elif self.mode == 'incremental':
return None
elif self.method is None and self.mode == 'full':
return 'fresh'
return None
def _sourcedirIsUpdatable(self):
return self.pathExists(self.build.path_module.join(self.workdir, '.hg'))
@defer.inlineCallbacks
def _removeAddedFilesAndUpdate(self, _):
command = ['locate', 'set:added()']
stdout = yield self._dovccmd(command, collectStdout=True, decodeRC={0: SUCCESS, 1: SUCCESS})
files = []
for filename in stdout.splitlines():
filename = self.workdir + '/' + filename
files.append(filename)
if files:
if self.workerVersionIsOlderThan('rmdir', '2.14'):
yield self.removeFiles(files)
else:
cmd = remotecommand.RemoteCommand(
'rmdir',
{
'dir': files,
'logEnviron': self.logEnviron,
},
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
yield self._update()
@defer.inlineCallbacks
def removeFiles(self, files):
for filename in files:
cmd = remotecommand.RemoteCommand(
'rmdir',
{
'dir': filename,
'logEnviron': self.logEnviron,
},
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if cmd.rc != 0:
return cmd.rc
return 0
@defer.inlineCallbacks
def _update(self):
command = ['update', '--clean']
if self.revision:
command += ['--rev', self.revision]
elif self.branchType == 'inrepo':
command += ['--rev', self.update_branch]
yield self._dovccmd(command)
def _clone(self):
if self.retry:
abandonOnFailure = self.retry[1] <= 0
else:
abandonOnFailure = True
d = self._dovccmd(
['clone', '--noupdate', self.repourl, '.'], abandonOnFailure=abandonOnFailure
)
def _retry(res):
if self.stopped or res == 0:
return res
delay, repeats = self.retry
if repeats > 0:
log.msg(f"Checkout failed, trying {repeats} more times after {delay} seconds")
self.retry = (delay, repeats - 1)
df = defer.Deferred()
df.addCallback(lambda _: self._clobber())
df.addCallback(lambda _: self._clone())
reactor.callLater(delay, df.callback, None)
return df
return res
if self.retry:
d.addCallback(_retry)
return d
def checkHg(self):
d = self._dovccmd(['--version'])
@d.addCallback
def check(res):
return res == 0
return d
def applyPatch(self, patch):
d = self._dovccmd(
['import', '--no-commit', '-p', str(patch[0]), '-'], initialStdin=patch[1]
)
return d
| 13,082 | Python | .py | 324 | 29.638889 | 100 | 0.592584 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,889 | p4.py | buildbot_buildbot/master/buildbot/steps/source/p4.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
# Portions Copyright 2013 Bad Dog Consulting
from __future__ import annotations
import re
from twisted.internet import defer
from twisted.python import log
from buildbot import config
from buildbot import interfaces
from buildbot.interfaces import WorkerSetupError
from buildbot.process import buildstep
from buildbot.process import remotecommand
from buildbot.process import results
from buildbot.process.properties import Interpolate
from buildbot.steps.source import Source
# Notes:
# see
# http://perforce.com/perforce/doc.current/manuals/cmdref/o.gopts.html#1040647
# for getting p4 command to output marshalled python dictionaries as output
# for commands.
# Perhaps switch to using 'p4 -G' : From URL above:
# -G Causes all output (and batch input for form commands with -i) to be
# formatted as marshalled Python dictionary objects. This is most often used
# when scripting.
class P4(Source):
"""Perform Perforce checkout/update operations."""
name = 'p4'
renderables = [
'mode',
'p4base',
'p4client',
'p4viewspec',
'p4branch',
'p4passwd',
'p4port',
'p4user',
]
possible_modes = ('incremental', 'full')
possible_client_types = (None, 'readonly', 'partitioned')
def __init__(
self,
mode='incremental',
method=None,
p4base=None,
p4branch=None,
p4port=None,
p4user=None,
p4passwd=None,
p4extra_views=(),
p4line_end='local',
p4viewspec=None,
p4viewspec_suffix='...',
p4client: Interpolate | None = None,
p4client_spec_options='allwrite rmdir',
p4client_type=None,
p4extra_args=None,
p4bin='p4',
use_tickets=False,
stream=False,
debug=False,
**kwargs,
):
self.method = method
self.mode = mode
self.p4branch = p4branch
self.p4bin = p4bin
self.p4base = p4base
self.p4port = p4port
self.p4user = p4user
self.p4passwd = p4passwd
self.p4extra_views = p4extra_views
self.p4viewspec = p4viewspec
self.p4viewspec_suffix = p4viewspec_suffix
self.p4line_end = p4line_end
if p4client is None:
p4client = Interpolate('buildbot_%(prop:workername)s_%(prop:buildername)s')
self.p4client = p4client
self.p4client_spec_options = p4client_spec_options
self.p4client_type = p4client_type
self.p4extra_args = p4extra_args
self.use_tickets = use_tickets
self.stream = stream
self.debug = debug
super().__init__(**kwargs)
if self.mode not in self.possible_modes and not interfaces.IRenderable.providedBy(
self.mode
):
config.error(f"mode {self.mode} is not an IRenderable, or one of {self.possible_modes}")
if not p4viewspec and p4base is None:
config.error("You must provide p4base or p4viewspec")
if p4viewspec and (p4base or p4branch or p4extra_views):
config.error(
"Either provide p4viewspec or p4base and p4branch (and optionally p4extra_views)"
)
if p4viewspec and isinstance(p4viewspec, str):
config.error(
"p4viewspec must not be a string, and should be a sequence of 2 element sequences"
)
if not interfaces.IRenderable.providedBy(p4base) and p4base and not p4base.startswith('/'):
config.error(f'p4base should start with // [p4base = {p4base}]')
if not interfaces.IRenderable.providedBy(p4base) and p4base and p4base.endswith('/'):
config.error(f'p4base should not end with a trailing / [p4base = {p4base}]')
if not interfaces.IRenderable.providedBy(p4branch) and p4branch and p4branch.endswith('/'):
config.error(f'p4branch should not end with a trailing / [p4branch = {p4branch}]')
if stream:
if p4extra_views or p4viewspec:
config.error('You can\'t use p4extra_views not p4viewspec with stream')
if not p4base or not p4branch:
config.error('You must specify both p4base and p4branch when using stream')
if not interfaces.IRenderable.providedBy(p4base) and " " in p4base:
config.error('p4base must not contain any whitespace')
if not interfaces.IRenderable.providedBy(p4branch) and " " in p4branch:
config.error('p4branch must not contain any whitespace')
if self.p4client_spec_options is None:
self.p4client_spec_options = ''
if (
self.p4client_type not in self.possible_client_types
and not interfaces.IRenderable.providedBy(self.p4client_type)
):
config.error(
f"p4client_type {self.p4client_type} is not an IRenderable, "
"or one of {self.possible_client_types}"
)
@defer.inlineCallbacks
def run_vc(self, branch, revision, patch):
if self.debug:
log.msg('in run_vc')
self.method = self._getMethod()
self.stdio_log = yield self.addLogForRemoteCommands("stdio")
installed = yield self.checkP4()
if not installed:
raise WorkerSetupError("p4 is not installed on worker")
# Try to obfuscate the password when used as an argument to commands.
if self.p4passwd is not None:
if not self.workerVersionIsOlderThan('shell', '2.16'):
self.p4passwd_arg = ('obfuscated', self.p4passwd, 'XXXXXX')
else:
self.p4passwd_arg = self.p4passwd
log.msg("Worker does not understand obfuscation; p4 password will be logged")
if self.use_tickets and self.p4passwd:
yield self._acquireTicket()
# First we need to create the client
yield self._createClientSpec()
self.revision = yield self.get_sync_revision(revision)
yield self._getAttrGroupMember('mode', self.mode)()
self.updateSourceProperty('got_revision', self.revision)
return results.SUCCESS
@defer.inlineCallbacks
def mode_full(self):
if self.debug:
log.msg("P4:full()..")
# Then p4 sync #none
yield self._dovccmd(['sync', '#none'])
# Then remove directory.
yield self.runRmdir(self.workdir)
# Then we need to sync the client
if self.revision:
if self.debug:
log.msg(
"P4: full() sync command based on :client:%s changeset:%d",
self.p4client,
int(self.revision),
)
yield self._dovccmd(
['sync', f'//{self.p4client}/...@{int(self.revision)}'], collectStdout=True
)
else:
if self.debug:
log.msg("P4: full() sync command based on :client:%s no revision", self.p4client)
yield self._dovccmd(['sync'], collectStdout=True)
if self.debug:
log.msg("P4: full() sync done.")
@defer.inlineCallbacks
def mode_incremental(self):
if self.debug:
log.msg("P4:incremental()")
# and plan to do a checkout
command = [
'sync',
]
if self.revision:
command.extend([f'//{self.p4client}/...@{int(self.revision)}'])
if self.debug:
log.msg("P4:incremental() command:%s revision:%s", command, self.revision)
yield self._dovccmd(command)
def _buildVCCommand(self, doCommand):
assert doCommand, "No command specified"
command = [
self.p4bin,
]
if self.p4port:
command.extend(['-p', self.p4port])
if self.p4user:
command.extend(['-u', self.p4user])
if not self.use_tickets and self.p4passwd:
command.extend(['-P', self.p4passwd_arg])
if self.p4client:
command.extend(['-c', self.p4client])
# Only add the extra arguments for the `sync` command.
if doCommand[0] == 'sync' and self.p4extra_args:
command.extend(self.p4extra_args)
command.extend(doCommand)
return command
@defer.inlineCallbacks
def _dovccmd(self, command, collectStdout=False, initialStdin=None):
command = self._buildVCCommand(command)
if self.debug:
log.msg(f"P4:_dovccmd():workdir->{self.workdir}")
cmd = remotecommand.RemoteShellCommand(
self.workdir,
command,
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout,
collectStdout=collectStdout,
initialStdin=initialStdin,
)
cmd.useLog(self.stdio_log, False)
if self.debug:
log.msg(f'Starting p4 command : p4 {" ".join(command)}')
yield self.runCommand(cmd)
if cmd.rc != 0:
if self.debug:
log.msg(f"P4:_dovccmd():Source step failed while running command {cmd}")
raise buildstep.BuildStepFailed()
if collectStdout:
return cmd.stdout
return cmd.rc
def _getMethod(self):
if self.method is not None and self.mode != 'incremental':
return self.method
elif self.mode == 'incremental':
return None
elif self.method is None and self.mode == 'full':
return 'fresh'
return None
@defer.inlineCallbacks
def _createClientSpec(self):
builddir = self.getProperty('builddir')
if self.debug:
log.msg(f"P4:_createClientSpec() builddir:{builddir}")
log.msg(f"P4:_createClientSpec() SELF.workdir:{self.workdir}")
prop_dict = self.getProperties().asDict()
prop_dict['p4client'] = self.p4client
root = self.build.path_module.normpath(self.build.path_module.join(builddir, self.workdir))
client_spec = ''
client_spec += f"Client: {self.p4client}\n\n"
client_spec += f"Owner: {self.p4user}\n\n"
client_spec += f"Description:\n\tCreated by {self.p4user}\n\n"
client_spec += f"Root:\t{root}\n\n"
client_spec += f"Options:\t{self.p4client_spec_options}\n\n"
if self.p4line_end:
client_spec += f"LineEnd:\t{self.p4line_end}\n\n"
else:
client_spec += "LineEnd:\tlocal\n\n"
if self.p4client_type is not None:
client_spec += f"Type:\t{self.p4client_type}\n\n"
# Perforce generates the view for stream-associated workspaces
if self.stream:
client_spec += f"Stream:\t{self.p4base}/{self.p4branch}\n"
else:
# Setup a view
client_spec += "View:\n"
def has_whitespace(*args):
return any(re.search(r'\s', i) for i in args if i is not None)
if self.p4viewspec:
# uses only p4viewspec array of tuples to build view
# If the user specifies a viewspec via an array of tuples then
# Ignore any specified p4base,p4branch, and/or p4extra_views
suffix = self.p4viewspec_suffix or ''
for k, v in self.p4viewspec:
if self.debug:
log.msg(f'P4:_createClientSpec():key:{k} value:{v}')
qa = '"' if has_whitespace(k, suffix) else ''
qb = '"' if has_whitespace(self.p4client, v, suffix) else ''
client_spec += f'\t{qa}{k}{suffix}{qa} {qb}//{self.p4client}/{v}{suffix}{qb}\n'
else:
# Uses p4base, p4branch, p4extra_views
qa = '"' if has_whitespace(self.p4base, self.p4branch) else ''
client_spec += f"\t{qa}{self.p4base}"
if self.p4branch:
client_spec += f"/{self.p4branch}"
client_spec += f"/...{qa} "
qb = '"' if has_whitespace(self.p4client) else ''
client_spec += f"{qb}//{self.p4client}/...{qb}\n"
if self.p4extra_views:
for k, v in self.p4extra_views:
qa = '"' if has_whitespace(k) else ''
qb = '"' if has_whitespace(k, self.p4client, v) else ''
client_spec += f"\t{qa}{k}/...{qa} {qb}//{self.p4client}/{v}/...{qb}\n"
if self.debug:
log.msg(client_spec)
stdout = yield self._dovccmd(['client', '-i'], collectStdout=True, initialStdin=client_spec)
mo = re.search(r'Client (\S+) (.+)$', stdout, re.M)
return mo and (mo.group(2) == 'saved.' or mo.group(2) == 'not changed.')
@defer.inlineCallbacks
def _acquireTicket(self):
if self.debug:
log.msg("P4:acquireTicket()")
# TODO: check first if the ticket is still valid?
initialStdin = self.p4passwd + "\n"
yield self._dovccmd(['login'], initialStdin=initialStdin)
@defer.inlineCallbacks
def get_sync_revision(self, revision=None):
revision = f"@{revision}" if revision else "#head"
if self.debug:
log.msg("P4: get_sync_revision() retrieve client actual revision at %s", revision)
changes_command_args = ['-ztag', 'changes', '-m1', f"//{self.p4client}/...{revision}"]
command = self._buildVCCommand(changes_command_args)
cmd = remotecommand.RemoteShellCommand(
self.workdir,
command,
env=self.env,
timeout=self.timeout,
logEnviron=self.logEnviron,
collectStdout=True,
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
stdout = cmd.stdout.splitlines(keepends=False)
# pylint: disable=wrong-spelling-in-comment
# Example output from p4 -ztag changes -m1
# ... change 212798
# ... time 1694770219
# ... user user@user-unix-bldng2
# ... client UserClient
# ... status submitted
# ... changeType public
# ... path //Depot/Path/...
# ... desc change to pickup build
change_identifier = "... change "
revision = next(
(
line[len(change_identifier) :]
for line in stdout
if line.startswith(change_identifier)
),
None,
)
try:
int(revision)
except ValueError as error:
log.msg(
"p4.get_sync_revision unable to parse output of %s: %s",
['p4', *changes_command_args],
stdout,
)
raise buildstep.BuildStepFailed() from error
return revision
@defer.inlineCallbacks
def purge(self, ignore_ignores):
"""Delete everything that shown up on status."""
command = ['sync', '#none']
if ignore_ignores:
command.append('--no-ignore')
yield self._dovccmd(command, collectStdout=True)
# FIXME: do the following comments need addressing?
# add deferred to rm tree
# then add defer to sync to revision
@defer.inlineCallbacks
def checkP4(self):
cmd = remotecommand.RemoteShellCommand(
self.workdir, [self.p4bin, '-V'], env=self.env, logEnviron=self.logEnviron
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
return cmd.rc == 0
def computeSourceRevision(self, changes):
if not changes or None in [c.revision for c in changes]:
return None
lastChange = max(int(c.revision) for c in changes)
return lastChange
| 16,492 | Python | .py | 384 | 32.713542 | 100 | 0.601497 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,890 | bzr.py | buildbot_buildbot/master/buildbot/steps/source/bzr.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import os
from twisted.internet import defer
from twisted.internet import reactor
from twisted.python import log
from buildbot.interfaces import WorkerSetupError
from buildbot.process import buildstep
from buildbot.process import remotecommand
from buildbot.process import results
from buildbot.steps.source.base import Source
class Bzr(Source):
name = 'bzr'
renderables = ['repourl', 'baseURL']
def __init__(
self,
repourl=None,
baseURL=None,
mode='incremental',
method=None,
defaultBranch=None,
**kwargs,
):
self.repourl = repourl
self.baseURL = baseURL
self.branch = defaultBranch
self.mode = mode
self.method = method
super().__init__(**kwargs)
if repourl and baseURL:
raise ValueError("you must provide exactly one of repourl and baseURL")
if repourl is None and baseURL is None:
raise ValueError("you must provide at least one of repourl and baseURL")
if baseURL is not None and defaultBranch is None:
raise ValueError("you must provide defaultBranch with baseURL")
if not self._hasAttrGroupMember('mode', self.mode):
raise ValueError(f"mode {self.mode} is not one of {self._listAttrGroupMembers('mode')}")
if self.mode == 'full':
assert self.method in ['clean', 'fresh', 'clobber', 'copy', None]
@defer.inlineCallbacks
def run_vc(self, branch, revision, patch):
if branch:
self.branch = branch
self.revision = revision
self.method = self._getMethod()
self.stdio_log = yield self.addLogForRemoteCommands("stdio")
if self.repourl is None:
self.repourl = os.path.join(self.baseURL, self.branch)
installed = yield self.checkBzr()
if not installed:
raise WorkerSetupError("bzr is not installed on worker")
patched = yield self.sourcedirIsPatched()
if patched:
yield self._dovccmd(['clean-tree', '--ignored', '--force'])
yield self._getAttrGroupMember('mode', self.mode)()
if patch:
yield self.patch(patch)
yield self.parseGotRevision()
return results.SUCCESS
@defer.inlineCallbacks
def mode_incremental(self):
updatable = yield self._sourcedirIsUpdatable()
if updatable:
command = ['update']
if self.revision:
command.extend(['-r', self.revision])
yield self._dovccmd(command)
else:
yield self._doFull()
@defer.inlineCallbacks
def mode_full(self):
if self.method == 'clobber':
yield self.clobber()
return
elif self.method == 'copy':
self.workdir = 'source'
yield self.copy()
return
updatable = self._sourcedirIsUpdatable()
if not updatable:
log.msg("No bzr repo present, making full checkout")
yield self._doFull()
elif self.method == 'clean':
yield self.clean()
elif self.method == 'fresh':
yield self.fresh()
else:
raise ValueError("Unknown method, check your configuration")
@defer.inlineCallbacks
def _clobber(self):
cmd = remotecommand.RemoteCommand(
'rmdir',
{
'dir': self.workdir,
'logEnviron': self.logEnviron,
},
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
if cmd.rc != 0:
raise RuntimeError("Failed to delete directory")
@defer.inlineCallbacks
def clobber(self):
yield self._clobber()
yield self._doFull()
@defer.inlineCallbacks
def copy(self):
cmd = remotecommand.RemoteCommand(
'rmdir',
{
'dir': 'build',
'logEnviron': self.logEnviron,
},
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
yield self.mode_incremental()
cmd = remotecommand.RemoteCommand(
'cpdir',
{
'fromdir': 'source',
'todir': 'build',
'logEnviron': self.logEnviron,
},
)
cmd.useLog(self.stdio_log, False)
yield self.runCommand(cmd)
def clean(self):
d = self._dovccmd(['clean-tree', '--ignored', '--force'])
command = ['update']
if self.revision:
command.extend(['-r', self.revision])
d.addCallback(lambda _: self._dovccmd(command))
return d
def fresh(self):
d = self._dovccmd(['clean-tree', '--force'])
command = ['update']
if self.revision:
command.extend(['-r', self.revision])
d.addCallback(lambda _: self._dovccmd(command))
return d
@defer.inlineCallbacks
def _doFull(self):
command = ['checkout', self.repourl, '.']
if self.revision:
command.extend(['-r', self.revision])
if self.retry:
abandonOnFailure = self.retry[1] <= 0
else:
abandonOnFailure = True
res = yield self._dovccmd(command, abandonOnFailure=abandonOnFailure)
if self.retry:
if self.stopped or res == 0:
return res
delay, repeats = self.retry
if repeats > 0:
log.msg(f"Checkout failed, trying {repeats} more times after {delay} seconds")
self.retry = (delay, repeats - 1)
df = defer.Deferred()
df.addCallback(lambda _: self._clobber())
df.addCallback(lambda _: self._doFull())
reactor.callLater(delay, df.callback, None)
res = yield df
return res
def _sourcedirIsUpdatable(self):
return self.pathExists(self.build.path_module.join(self.workdir, '.bzr'))
def computeSourceRevision(self, changes):
if not changes:
return None
lastChange = max(int(c.revision) for c in changes)
return lastChange
def _dovccmd(self, command, abandonOnFailure=True, collectStdout=False):
cmd = remotecommand.RemoteShellCommand(
self.workdir,
['bzr', *command],
env=self.env,
logEnviron=self.logEnviron,
timeout=self.timeout,
collectStdout=collectStdout,
)
cmd.useLog(self.stdio_log, False)
d = self.runCommand(cmd)
@d.addCallback
def evaluateCommand(_):
if abandonOnFailure and cmd.didFail():
log.msg(f"Source step failed while running command {cmd}")
raise buildstep.BuildStepFailed()
if collectStdout:
return cmd.stdout
return cmd.rc
return d
def checkBzr(self):
d = self._dovccmd(['--version'])
@d.addCallback
def check(res):
return res == 0
return d
def _getMethod(self):
if self.method is not None and self.mode != 'incremental':
return self.method
elif self.mode == 'incremental':
return None
elif self.method is None and self.mode == 'full':
return 'fresh'
return None
@defer.inlineCallbacks
def parseGotRevision(self):
stdout = yield self._dovccmd(
["version-info", "--custom", "--template='{revno}"], collectStdout=True
)
revision = stdout.strip("'")
try:
int(revision)
except ValueError as e:
log.msg("Invalid revision number")
raise buildstep.BuildStepFailed() from e
log.msg(f"Got Git revision {revision}")
self.updateSourceProperty('got_revision', revision)
| 8,617 | Python | .py | 230 | 27.865217 | 100 | 0.600408 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,891 | wamp.py | buildbot_buildbot/master/buildbot/mq/wamp.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import json
from autobahn.wamp.exception import TransportLost
from autobahn.wamp.types import PublishOptions
from autobahn.wamp.types import SubscribeOptions
from twisted.internet import defer
from twisted.python import log
from buildbot.mq import base
from buildbot.util import service
from buildbot.util import toJson
class WampMQ(service.ReconfigurableServiceMixin, base.MQBase):
NAMESPACE = "org.buildbot.mq"
def produce(self, routingKey, data):
d = self._produce(routingKey, data)
d.addErrback(log.err, "Problem while producing message on topic " + repr(routingKey))
@classmethod
def messageTopic(cls, routingKey):
def ifNone(v, default):
return default if v is None else v
# replace None values by "" in routing key
routingKey = [ifNone(key, "") for key in routingKey]
# then join them with "dot", and add the prefix
return cls.NAMESPACE + "." + ".".join(routingKey)
@classmethod
def routingKeyFromMessageTopic(cls, topic):
# just split the topic, and remove the NAMESPACE prefix
return tuple(topic[len(WampMQ.NAMESPACE) + 1 :].split("."))
def _produce(self, routingKey, data):
_data = json.loads(json.dumps(data, default=toJson))
options = PublishOptions(exclude_me=False)
return self.master.wamp.publish(self.messageTopic(routingKey), _data, options=options)
def startConsuming(self, callback, _filter, persistent_name=None):
if persistent_name is not None:
log.err(f'wampmq: persistent queues are not persisted: {persistent_name} {_filter}')
qr = QueueRef(self, callback)
self._startConsuming(qr, callback, _filter)
return defer.succeed(qr)
def _startConsuming(self, qr, callback, _filter, persistent_name=None):
return qr.subscribe(self.master.wamp, self, _filter)
class QueueRef(base.QueueRef):
def __init__(self, mq, callback):
super().__init__(callback)
self.unreg = None
self.mq = mq
@defer.inlineCallbacks
def subscribe(self, connector_service, wamp_service, _filter):
self.filter = _filter
self.emulated = False
options = {"details_arg": 'details'}
if None in _filter:
options["match"] = "wildcard"
options = SubscribeOptions(**options)
_filter = WampMQ.messageTopic(_filter)
self.unreg = yield connector_service.subscribe(self.wampInvoke, _filter, options=options)
if self.callback is None:
yield self.stopConsuming()
def wampInvoke(self, msg, details):
if details.topic is not None:
# in the case of a wildcard, wamp router sends the topic
topic = WampMQ.routingKeyFromMessageTopic(details.topic)
else:
# in the case of an exact match, then we can use our own topic
topic = self.filter
self.mq.invokeQref(self, topic, msg)
@defer.inlineCallbacks
def stopConsuming(self):
self.callback = None
if self.unreg is not None:
unreg = self.unreg
self.unreg = None
try:
yield unreg.unsubscribe()
except TransportLost:
pass
except Exception as e:
log.err(e, 'When unsubscribing MQ connection ' + str(unreg))
| 4,069 | Python | .py | 89 | 38.561798 | 97 | 0.686364 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,892 | connector.py | buildbot_buildbot/master/buildbot/mq/connector.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
from twisted.internet import defer
from twisted.python.reflect import namedObject
from buildbot.util import service
class MQConnector(service.ReconfigurableServiceMixin, service.AsyncMultiService):
classes = {
'simple': {
'class': "buildbot.mq.simple.SimpleMQ",
'keys': set(['debug']),
},
'wamp': {
'class': "buildbot.mq.wamp.WampMQ",
'keys': set(["router_url", "realm", "wamp_debug_level"]),
},
}
name: str | None = 'mq' # type: ignore[assignment]
def __init__(self):
super().__init__()
self.impl = None # set in setup
self.impl_type = None # set in setup
@defer.inlineCallbacks
def setup(self):
assert not self.impl
# imports are done locally so that we don't try to import
# implementation-specific modules unless they're required.
typ = self.master.config.mq['type']
assert typ in self.classes # this is checked by MasterConfig
self.impl_type = typ
cls = namedObject(self.classes[typ]['class'])
self.impl = cls()
# set up the impl as a child service
yield self.impl.setServiceParent(self)
# configure it (early)
self.impl.reconfigServiceWithBuildbotConfig(self.master.config)
# copy the methods onto this object for ease of access
self.produce = self.impl.produce
self.startConsuming = self.impl.startConsuming
self.waitUntilEvent = self.impl.waitUntilEvent
def reconfigServiceWithBuildbotConfig(self, new_config):
# double-check -- the master ensures this in config checks
assert self.impl_type == new_config.mq['type']
return super().reconfigServiceWithBuildbotConfig(new_config)
def produce(self, routing_key, data):
# will be patched after configuration to point to the running
# implementation's method
raise NotImplementedError
def startConsuming(self, callback, filter, persistent_name=None):
# will be patched after configuration to point to the running
# implementation's method
raise NotImplementedError
| 2,917 | Python | .py | 64 | 39.03125 | 81 | 0.694856 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,893 | simple.py | buildbot_buildbot/master/buildbot/mq/simple.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import pprint
from twisted.internet import defer
from twisted.python import log
from buildbot.mq import base
from buildbot.util import service
from buildbot.util import tuplematch
class SimpleMQ(service.ReconfigurableServiceMixin, base.MQBase):
def __init__(self):
super().__init__()
self.qrefs = []
self.persistent_qrefs = {}
self.debug = False
def reconfigServiceWithBuildbotConfig(self, new_config):
self.debug = new_config.mq.get('debug', False)
return super().reconfigServiceWithBuildbotConfig(new_config)
def produce(self, routingKey, data):
if self.debug:
log.msg(f"MSG: {routingKey}\n{pprint.pformat(data)}")
for qref in self.qrefs:
if tuplematch.matchTuple(routingKey, qref.filter):
self.invokeQref(qref, routingKey, data)
def startConsuming(self, callback, filter, persistent_name=None):
if any(not isinstance(k, str) and k is not None for k in filter):
raise AssertionError(f"{filter} is not a filter")
if persistent_name:
if persistent_name in self.persistent_qrefs:
qref = self.persistent_qrefs[persistent_name]
qref.startConsuming(callback)
else:
qref = PersistentQueueRef(self, callback, filter)
self.qrefs.append(qref)
self.persistent_qrefs[persistent_name] = qref
else:
qref = QueueRef(self, callback, filter)
self.qrefs.append(qref)
return defer.succeed(qref)
class QueueRef(base.QueueRef):
__slots__ = ['mq', 'filter']
def __init__(self, mq, callback, filter):
super().__init__(callback)
self.mq = mq
self.filter = filter
def stopConsuming(self):
self.callback = None
try:
self.mq.qrefs.remove(self)
except ValueError:
pass
class PersistentQueueRef(QueueRef):
__slots__ = ['active', 'queue']
def __init__(self, mq, callback, filter):
super().__init__(mq, callback, filter)
self.queue = []
def startConsuming(self, callback):
self.callback = callback
self.active = True
# invoke for every message that was missed
queue = self.queue
self.queue = []
for routingKey, data in queue:
self.invoke(routingKey, data)
def stopConsuming(self):
self.callback = self.addToQueue
self.active = False
def addToQueue(self, routingKey, data):
self.queue.append((routingKey, data))
| 3,303 | Python | .py | 80 | 33.8375 | 79 | 0.665626 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,894 | base.py | buildbot_buildbot/master/buildbot/mq/base.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.python import failure
from twisted.python import log
from buildbot.util import deferwaiter
from buildbot.util import service
class MQBase(service.AsyncService):
name = 'mq-implementation'
def __init__(self):
super().__init__()
self._deferwaiter = deferwaiter.DeferWaiter()
@defer.inlineCallbacks
def stopService(self):
yield self._deferwaiter.wait()
yield super().stopService()
@defer.inlineCallbacks
def waitUntilEvent(self, filter, check_callback):
d = defer.Deferred()
buildCompleteConsumer = yield self.startConsuming(
lambda key, value: d.callback((key, value)), filter
)
check = yield check_callback()
# we only wait if the check callback return true
if not check:
res = yield d
else:
res = None
yield buildCompleteConsumer.stopConsuming()
return res
def invokeQref(self, qref, routingKey, data):
self._deferwaiter.add(qref.invoke(routingKey, data))
class QueueRef:
__slots__ = ['callback']
def __init__(self, callback):
self.callback = callback
def invoke(self, routing_key, data):
# Potentially returns a Deferred
if not self.callback:
return None
try:
x = self.callback(routing_key, data)
except Exception:
log.err(failure.Failure(), f'while invoking {self.callback!r}')
return None
if isinstance(x, defer.Deferred):
x.addErrback(log.err, f'while invoking {self.callback!r}')
return x
def stopConsuming(self):
# This method may return a Deferred.
# subclasses should set self.callback to None in this method.
raise NotImplementedError
| 2,544 | Python | .py | 64 | 33.328125 | 79 | 0.689655 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,895 | local.py | buildbot_buildbot/master/buildbot/worker/local.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Portions Copyright Buildbot Team Members
import os
from twisted.internet import defer
from buildbot.config import error
from buildbot.worker.base import Worker
class LocalWorker(Worker):
def checkConfig(self, name, workdir=None, **kwargs):
kwargs['password'] = None
super().checkConfig(name, **kwargs)
self.LocalWorkerFactory = None
try:
# importing here to avoid dependency on buildbot worker package
from buildbot_worker.bot import LocalWorker as RemoteLocalWorker
self.LocalWorkerFactory = RemoteLocalWorker
except ImportError:
error(
"LocalWorker needs the buildbot-worker package installed "
"(pip install buildbot-worker)"
)
self.remote_worker = None
@defer.inlineCallbacks
def reconfigService(self, name, workdir=None, **kwargs):
kwargs['password'] = None
yield super().reconfigService(name, **kwargs)
if workdir is None:
workdir = name
workdir = os.path.abspath(os.path.join(self.master.basedir, "workers", workdir))
if not os.path.isdir(workdir):
os.makedirs(workdir)
if self.remote_worker is None:
# create the actual worker as a child service
# we only create at reconfig, to avoid polluting memory in case of
# reconfig
self.remote_worker = self.LocalWorkerFactory(name, workdir)
yield self.remote_worker.setServiceParent(self)
else:
# The case of a reconfig, we forward the parameters
self.remote_worker.bot.basedir = workdir
| 2,337 | Python | .py | 51 | 38.45098 | 88 | 0.694774 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,896 | marathon.py | buildbot_buildbot/master/buildbot/worker/marathon.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.logger import Logger
from buildbot import util
from buildbot.interfaces import LatentWorkerFailedToSubstantiate
from buildbot.util.httpclientservice import HTTPSession
from buildbot.util.latent import CompatibleLatentWorkerMixin
from buildbot.worker.docker import DockerBaseWorker
log = Logger()
class MarathonLatentWorker(CompatibleLatentWorkerMixin, DockerBaseWorker):
"""Marathon is a distributed docker container launcher for Mesos"""
instance = None
image = None
_http = None
def checkConfig(
self,
name,
marathon_url,
image,
marathon_auth=None,
marathon_extra_config=None,
marathon_app_prefix="buildbot-worker/",
masterFQDN=None,
**kwargs,
):
super().checkConfig(name, image=image, masterFQDN=masterFQDN, **kwargs)
@defer.inlineCallbacks
def reconfigService(
self,
name,
marathon_url,
image,
marathon_auth=None,
marathon_extra_config=None,
marathon_app_prefix="buildbot-worker/",
masterFQDN=None,
**kwargs,
):
# Set build_wait_timeout to 0s if not explicitly set: Starting a
# container is almost immediate, we can afford doing so for each build.
if 'build_wait_timeout' not in kwargs:
kwargs['build_wait_timeout'] = 0
yield super().reconfigService(name, image=image, masterFQDN=masterFQDN, **kwargs)
self._http = HTTPSession(self.master.httpservice, marathon_url, auth=marathon_auth)
if marathon_extra_config is None:
marathon_extra_config = {}
self.marathon_extra_config = marathon_extra_config
self.marathon_app_prefix = marathon_app_prefix
def getApplicationId(self):
return self.marathon_app_prefix + self.getContainerName()
def renderWorkerProps(self, build):
return build.render((self.image, self.marathon_extra_config))
@defer.inlineCallbacks
def start_instance(self, build):
yield self.stop_instance(reportFailure=False)
image, marathon_extra_config = yield self.renderWorkerPropsOnStart(build)
marathon_config = {
"container": {
"docker": {
"image": image,
"network": "BRIDGE",
},
"type": "DOCKER",
},
"id": self.getApplicationId(),
"instances": 1,
"env": self.createEnvironment(),
}
util.dictionary_merge(marathon_config, marathon_extra_config)
res = yield self._http.post("/v2/apps", json=marathon_config)
res_json = yield res.json()
if res.code != 201:
raise LatentWorkerFailedToSubstantiate(
f"Unable to create Marathon app: {self.getApplicationId()} "
f"{res.code}: {res_json['message']} {res_json}"
)
self.instance = res_json
return True
@defer.inlineCallbacks
def stop_instance(self, fast=False, reportFailure=True):
res = yield self._http.delete(f"/v2/apps/{self.getApplicationId()}")
self.instance = None
self.resetWorkerPropsOnStop()
if res.code != 200 and reportFailure:
res_json = yield res.json()
# the error is not documented :-(
log.warn(
"Unable to delete Marathon app: {id} {code}: {message} {details}",
id=self.getApplicationId(),
code=res.code,
message=res_json.get('message'),
details=res_json,
)
| 4,362 | Python | .py | 106 | 32.811321 | 91 | 0.655262 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,897 | upcloud.py | buildbot_buildbot/master/buildbot/worker/upcloud.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
# -*- Coding: utf-8 -*-
import hashlib
import socket
from twisted.internet import defer
from twisted.python import log
from buildbot import config
from buildbot import util
from buildbot.interfaces import LatentWorkerFailedToSubstantiate
from buildbot.util.httpclientservice import HTTPSession
from buildbot.worker import AbstractLatentWorker
DEFAULT_ZONE = "de-fra1"
DEFAULT_PLAN = "1xCPU-1GB"
DEFAULT_BASE_URL = "https://api.upcloud.com/1.3"
DEFAULT_OS_DISK_SIZE = 10
DEFAULT_CORE_NUMBER = 1
DEFAULT_MEMORY_AMOUNT = 512
class UpcloudLatentWorker(AbstractLatentWorker):
instance = None
def checkConfig(
self,
name,
password=None,
api_username=None,
api_password=None,
image=None,
hostconfig=None,
base_url=DEFAULT_BASE_URL,
masterFQDN=None,
**kwargs,
):
if image is None or api_username is None or api_password is None:
config.error(
"UpcloudLatentWorker: You need to specify at least"
" an image name, zone, api_username and api_password"
)
AbstractLatentWorker.checkConfig(self, name, password, **kwargs)
@defer.inlineCallbacks
def reconfigService(
self,
name,
password=None,
zone=None,
api_username=None,
api_password=None,
image=None,
hostconfig=None,
base_url=DEFAULT_BASE_URL,
masterFQDN=None,
**kwargs,
):
if password is None:
password = self.getRandomPass()
if masterFQDN is None:
masterFQDN = socket.getfqdn()
self.masterFQDN = masterFQDN
self.image = image
if hostconfig is None:
hostconfig = {}
self.hostconfig = hostconfig
self.client = yield HTTPSession(
self.master.httpservice,
base_url,
auth=(api_username, api_password),
debug=kwargs.get('debug', False),
)
masterName = util.unicode2bytes(self.master.name)
self.masterhash = hashlib.sha1(masterName).hexdigest()[:6]
yield AbstractLatentWorker.reconfigService(self, name, password, **kwargs)
@defer.inlineCallbacks
def _resolve_image(self, image):
# get templates
result = yield self.client.get("/storage/template")
uuid = None
if result.code == 200:
templates = yield result.json()
for template in templates["storages"]["storage"]:
if image == template["title"]:
uuid = template["uuid"]
break
return uuid
def getContainerName(self):
return (f'buildbot-{self.workername}-{self.masterhash}').replace("_", "-")
@defer.inlineCallbacks
def start_instance(self, build):
if self.instance is not None:
raise ValueError('instance active')
# convert image to UUID
image, hostconfig = yield build.render([self.image, self.hostconfig])
image_uuid = yield self._resolve_image(image)
if image_uuid is None:
log.msg(
f"{self.__class__.__name__} {self.workername}: Instance creation failed: "
f"Cannot find template {image}"
)
raise LatentWorkerFailedToSubstantiate(self.getContainerName(), 'resolving image')
# compose json
req = {
"server": {
"zone": hostconfig.get('zone', DEFAULT_ZONE),
"title": self.getContainerName(),
"hostname": hostconfig.get('hostname', self.name),
"user_data": hostconfig.get('user_data', ""),
"login_user": {
"username": "root",
"ssh_keys": {
"ssh_key": hostconfig.get('ssh_keys', []),
},
},
"password_delivery": "none",
"storage_devices": {
"storage_device": [
{
"action": "clone",
"storage": image_uuid,
"title": self.getContainerName(),
"size": hostconfig.get("os_disk_size", DEFAULT_OS_DISK_SIZE),
"tier": "maxiops",
}
],
},
}
}
req["server"]["plan"] = hostconfig.get("plan", DEFAULT_PLAN)
if req["server"]["plan"] == "custom":
req["server"]["core_number"] = hostconfig.get("core_number", DEFAULT_CORE_NUMBER)
req["server"]["memory_amount"] = hostconfig.get("memory_amount", DEFAULT_MEMORY_AMOUNT)
# request instance
result = yield self.client.post("/server", json=req)
if result.code // 100 != 2:
reason = yield result.content()
log.msg(
f"{self.__class__.__name__} {self.workername}: Instance creation failed: "
f"{result.code} {reason}"
)
self.failed_to_start(req['server']['hostname'], 'starting')
instance = yield result.json()
self.instance = instance["server"]
self.instance["Id"] = self.instance["uuid"].split("-")[-1]
# wait until server is actually up
while (yield self._state()) not in ["started"]:
yield util.asyncSleep(1, reactor=self.master.reactor)
result = yield self.client.get(f'/server/{self.instance["uuid"]}')
instance = yield result.json()
log.msg(
f'{self.__class__.__name__} {self.workername}: Instance {self.instance["Id"]} '
f'created (root password {self.instance["password"]})'
)
# include root password as worker property
self.properties.setProperty("root_password", self.instance['password'], "Worker")
return [self.instance["Id"], image]
@defer.inlineCallbacks
def _state(self):
result = yield self.client.get(f'/server/{self.instance["uuid"]}')
if result.code == 404:
return "absent"
else:
server = yield result.json()
return server["server"]["state"]
@defer.inlineCallbacks
def stop_instance(self, fast=False):
if self.instance is None:
# be gentle. Something may just be trying to alert us that an
# instance never attached, and it's because, somehow, we never
# started.
return
log.msg(
f'{self.__class__.__name__} {self.workername}: Stopping instance '
f'{self.instance["Id"]}...'
)
result = yield self.client.post(
f'/server/{self.instance["uuid"]}/stop',
json={"stop_server": {"stop_type": "hard", "timeout": "1"}},
)
if result.code // 100 != 2:
reason = yield result.content()
reason = (
f'{self.__class__.__name__} {self.workername} failed to stop instance '
f'{self.instance["Id"]} ({self._state()}): {reason.decode()}'
)
self.instance = None
raise RuntimeError(reason)
while (yield self._state()) not in ["stopped", "absent"]:
yield util.asyncSleep(1, reactor=self.master.reactor)
# destroy it
result = yield self.client.delete(f'/server/{self.instance["uuid"]}?storages=1')
if result.code // 100 != 2:
reason = yield result.content()
reason = (
f'{self.__class__.__name__} {self.workername} failed to delete instance '
f'{self.instance["Id"]} ({self._state()}): {reason.decode()}'
)
self.instance = None
raise RuntimeError(reason)
| 8,519 | Python | .py | 207 | 30.700483 | 99 | 0.578738 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,898 | libvirt.py | buildbot_buildbot/master/buildbot/worker/libvirt.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Portions Copyright Buildbot Team Members
# Portions Copyright 2010 Isotoma Limited
import os
import socket
from twisted.internet import defer
from twisted.python import log
from buildbot import config
from buildbot.interfaces import LatentWorkerFailedToSubstantiate
from buildbot.util import runprocess
from buildbot.util.queue import ConnectableThreadQueue
from buildbot.worker import AbstractLatentWorker
try:
import libvirt
except ImportError:
libvirt = None
def handle_connect_close(conn, reason, opaque):
opaque.close_connection()
class ThreadWithQueue(ConnectableThreadQueue):
def __init__(self, pool, uri, *args, **kwargs):
self.pool = pool # currently used only for testing
self.uri = uri
super().__init__(*args, **kwargs)
def on_close_connection(self, conn):
self.close_connection()
def close_connection(self):
conn = self.conn
super().close_connection()
conn.close()
def libvirt_open(self):
return libvirt.open(self.uri)
def create_connection(self):
try:
log.msg(f"Connecting to {self.uri}")
conn = self.libvirt_open()
conn.registerCloseCallback(handle_connect_close, self)
log.msg(f"Connected to {self.uri}")
return conn
except Exception as e:
log.err(f"Error connecting to {self.uri}: {e}, will retry later")
return None
class ServerThreadPool:
ThreadClass = ThreadWithQueue
def __init__(self):
self.threads = {}
def do(self, uri, func, *args, **kwargs):
# returns a Deferred
if uri not in self.threads:
self.threads[uri] = self.ThreadClass(self, uri)
def logging_func(conn, *args, **kwargs):
try:
return func(conn, *args, **kwargs)
except Exception as e:
log.err(f"libvirt: Exception on {uri}: {e}")
raise
return self.threads[uri].execute_in_thread(logging_func, *args, **kwargs)
def is_connected(self, uri):
if uri in self.threads:
return self.threads[uri].conn is not None
return False
def is_connecting(self, uri):
if uri in self.threads:
return self.threads[uri].connecting
return False
@defer.inlineCallbacks
def get_or_create_connection(self, uri):
if uri not in self.threads:
yield self.do(uri, lambda: None)
return self.threads[uri].conn
def reset_connection(self, uri):
if uri in self.threads:
self.threads[uri].close_connection()
else:
log.err(f'libvirt.ServerThreadPool: Unknown connection {uri}')
# A module is effectively a singleton class, so this is OK
threadpool = ServerThreadPool()
class Connection:
def __init__(self, uri):
self.uri = uri
class LibVirtWorker(AbstractLatentWorker):
pool = threadpool
metadata = '<auth username="{}" password="{}" master="{}"/>'
ns = 'http://buildbot.net/'
metakey = 'buildbot'
def __init__(
self,
name,
password,
hd_image=None,
base_image=None,
uri="system:///",
xml=None,
masterFQDN=None,
**kwargs,
):
super().__init__(name, password, **kwargs)
if not libvirt:
config.error("The python module 'libvirt' is needed to use a LibVirtWorker")
self.uri = uri
self.image = hd_image
self.base_image = base_image
self.xml = xml
if masterFQDN:
self.masterFQDN = masterFQDN
else:
self.masterFQDN = socket.getfqdn()
self.cheap_copy = True
self.graceful_shutdown = False
def _pool_do(self, func):
return self.pool.do(self.uri, func)
@defer.inlineCallbacks
def _get_domain(self):
try:
domain = yield self._pool_do(lambda conn: conn.lookupByName(self.workername))
return domain
except libvirt.libvirtError as e:
log.err(f'LibVirtWorker: got error when accessing domain: {e}')
try:
self.pool.reset_connection(self.uri)
except Exception as e1:
log.err(f'LibVirtWorker: got error when resetting connection: {e1}')
raise e
@defer.inlineCallbacks
def _get_domain_id(self):
domain = yield self._get_domain()
if domain is None:
return -1
domain_id = yield self._pool_do(lambda conn: domain.ID())
return domain_id
@defer.inlineCallbacks
def _prepare_base_image(self):
"""
I am a private method for creating (possibly cheap) copies of a
base_image for start_instance to boot.
"""
if not self.base_image:
return
if self.cheap_copy:
clone_cmd = [
'qemu-img',
'create',
'-o',
'backing_fmt=qcow2',
'-b',
self.base_image,
'-f',
'qcow2',
self.image,
]
else:
clone_cmd = ['cp', self.base_image, self.image]
log.msg(f"Cloning base image: {clone_cmd}'")
try:
rc = yield runprocess.run_process(
self.master.reactor, clone_cmd, collect_stdout=False, collect_stderr=False
)
if rc != 0:
raise LatentWorkerFailedToSubstantiate(f'Failed to clone image (rc={rc})')
except Exception as e:
log.err(f"Cloning failed: {e}")
raise
@defer.inlineCallbacks
def start_instance(self, build):
"""
I start a new instance of a VM.
If a base_image is specified, I will make a clone of that otherwise i will
use image directly.
If i'm not given libvirt domain definition XML, I will look for my name
in the list of defined virtual machines and start that.
"""
try:
domain_id = yield self._get_domain_id()
if domain_id != -1:
raise LatentWorkerFailedToSubstantiate(
f"{self}: Cannot start_instance as it's already active"
)
except Exception as e:
raise LatentWorkerFailedToSubstantiate(
f'{self}: Got error while retrieving domain ID: {e}'
) from e
yield self._prepare_base_image()
try:
if self.xml:
yield self._pool_do(lambda conn: conn.createXML(self.xml, 0))
else:
domain = yield self._get_domain()
yield self._pool_do(
lambda conn: domain.setMetadata(
libvirt.VIR_DOMAIN_METADATA_ELEMENT,
self.metadata.format(self.workername, self.password, self.masterFQDN),
self.metakey,
self.ns,
libvirt.VIR_DOMAIN_AFFECT_CONFIG,
)
)
yield self._pool_do(lambda conn: domain.create())
except Exception as e:
raise LatentWorkerFailedToSubstantiate(
f'{self}: Got error while starting VM: {e}'
) from e
return True
@defer.inlineCallbacks
def stop_instance(self, fast=False):
"""
I attempt to stop a running VM.
I make sure any connection to the worker is removed.
If the VM was using a cloned image, I remove the clone
When everything is tidied up, I ask that bbot looks for work to do
"""
domain_id = yield self._get_domain_id()
if domain_id == -1:
log.msg(f"{self}: Domain is unexpectedly not running")
return
domain = yield self._get_domain()
if self.graceful_shutdown and not fast:
log.msg(f"Graceful shutdown chosen for {self.workername}")
try:
yield self._pool_do(lambda conn: domain.shutdown())
except Exception as e:
log.msg(f'{self}: Graceful shutdown failed ({e}). Force destroying domain')
# Don't re-throw to stop propagating shutdown error if destroy was successful.
yield self._pool_do(lambda conn: domain.destroy())
else:
yield self._pool_do(lambda conn: domain.destroy())
if self.base_image:
log.msg(f'{self}: Removing image {self.image}')
os.remove(self.image)
| 9,263 | Python | .py | 239 | 28.916318 | 94 | 0.600825 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
4,899 | manager.py | buildbot_buildbot/master/buildbot/worker/manager.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import annotations
from twisted.internet import defer
from twisted.python import log
from buildbot.process.measured_service import MeasuredBuildbotServiceManager
from buildbot.util import misc
from buildbot.worker.protocols import msgpack as bbmsgpack
from buildbot.worker.protocols import pb as bbpb
class WorkerRegistration:
__slots__ = ['master', 'worker', 'pbReg', 'msgpack_reg']
def __init__(self, master, worker):
self.master = master
self.worker = worker
self.pbReg = None
self.msgpack_reg = None
def __repr__(self):
return f"<{self.__class__.__name__} for {self.worker.workername!r}>"
@defer.inlineCallbacks
def unregister(self):
bs = self.worker
# update with portStr=None to remove any registration in place
if self.pbReg is not None:
yield self.master.workers.pb.updateRegistration(bs.workername, bs.password, None)
if self.msgpack_reg is not None:
yield self.master.workers.msgpack.updateRegistration(bs.workername, bs.password, None)
yield self.master.workers._unregister(self)
@defer.inlineCallbacks
def update(self, worker_config, global_config):
# For most protocols, there's nothing to do, but for PB we must
# update the registration in case the port or password has changed.
if 'pb' in global_config.protocols:
self.pbReg = yield self.master.workers.pb.updateRegistration(
worker_config.workername,
worker_config.password,
global_config.protocols['pb']['port'],
)
if 'msgpack_experimental_v7' in global_config.protocols:
self.msgpack_reg = yield self.master.workers.msgpack.updateRegistration(
worker_config.workername,
worker_config.password,
global_config.protocols['msgpack_experimental_v7']['port'],
)
def getPBPort(self):
return self.pbReg.getPort()
def get_msgpack_port(self):
return self.msgpack_reg.getPort()
class WorkerManager(MeasuredBuildbotServiceManager):
name: str | None = "WorkerManager" # type: ignore[assignment]
managed_services_name = "workers"
config_attr = "workers"
PING_TIMEOUT = 10
reconfig_priority = 127
def __init__(self, master):
super().__init__()
self.pb = bbpb.Listener(master)
self.msgpack = bbmsgpack.Listener(master)
# WorkerRegistration instances keyed by worker name
self.registrations = {}
# connection objects keyed by worker name
self.connections = {}
@property
def workers(self):
# self.workers contains a ready Worker instance for each
# potential worker, i.e. all the ones listed in the config file.
# If the worker is connected, self.workers[workername].worker will
# contain a RemoteReference to their Bot instance. If it is not
# connected, that attribute will hold None.
# workers attribute is actually just an alias to multiService's
# namedService
return self.namedServices
def getWorkerByName(self, workerName):
return self.registrations[workerName].worker
def register(self, worker):
# TODO: doc that reg.update must be called, too
workerName = worker.workername
reg = WorkerRegistration(self.master, worker)
self.registrations[workerName] = reg
return defer.succeed(reg)
def _unregister(self, registration):
del self.registrations[registration.worker.workername]
@defer.inlineCallbacks
def newConnection(self, conn, workerName):
if workerName in self.connections:
log.msg(
f"Got duplication connection from '{workerName}'" " starting arbitration procedure"
)
old_conn = self.connections[workerName]
try:
yield misc.cancelAfter(
self.PING_TIMEOUT,
old_conn.remotePrint("master got a duplicate connection"),
self.master.reactor,
)
# if we get here then old connection is still alive, and new
# should be rejected
raise RuntimeError("rejecting duplicate worker")
except defer.CancelledError:
old_conn.loseConnection()
log.msg(
f"Connected worker '{workerName}' ping timed out after {self.PING_TIMEOUT} "
"seconds"
)
except RuntimeError:
raise
except Exception as e:
old_conn.loseConnection()
log.msg(f"Got error while trying to ping connected worker {workerName}:{e}")
log.msg(f"Old connection for '{workerName}' was lost, accepting new")
try:
yield conn.remotePrint(message="attached")
info = yield conn.remoteGetWorkerInfo()
log.msg(f"Got workerinfo from '{workerName}'")
except Exception as e:
log.msg(f"Failed to communicate with worker '{workerName}'\n{e}".format(workerName, e))
raise
conn.info = info
self.connections[workerName] = conn
def remove():
del self.connections[workerName]
conn.notifyOnDisconnect(remove)
# accept the connection
return True
| 6,170 | Python | .py | 135 | 36.407407 | 99 | 0.657233 | buildbot/buildbot | 5,232 | 1,616 | 728 | GPL-2.0 | 9/5/2024, 5:09:21 PM (Europe/Amsterdam) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.