bh@99: # Copyright (C) 2007, 2008 by Intevation GmbH bh@0: # Authors: bh@0: # Bernhard Herzog bh@0: # bh@0: # This program is free software under the GPL (>=v2) bh@0: # Read the file COPYING coming with the software for details. bh@0: bh@0: """Classes to automatically build debian packages from subversion checkouts""" bh@0: bh@0: import os bh@0: import time bh@0: import re bh@0: import logging bh@0: import shutil bh@16: import datetime bh@113: import new bh@0: bh@0: import util bh@0: import subversion bh@0: import run bh@16: import status bh@128: import debian bh@45: from cmdexpand import cmdexpand bh@112: from builder import PBuilder bh@0: bh@135: def _fromparent(attr): bh@135: """Creates a property that delegates its value to self.parent.""" bh@135: def get(self): bh@135: return getattr(self.parent, attr) bh@135: return property(get) bh@135: bh@0: bh@0: class SourcePackager(object): bh@0: bh@4: # Derived classes must supply the package basename bh@4: pkg_basename = None bh@4: bh@135: track = _fromparent("track") bh@135: revision = _fromparent("revision") bh@135: status = _fromparent("status") bh@136: log_dir = _fromparent("log_dir") bh@135: work_dir = _fromparent("work_dir") bh@135: src_dir = _fromparent("src_dir") bh@135: bh@135: def __init__(self, parent): bh@135: self.parent = parent bh@4: assert(self.pkg_basename) bh@0: bh@0: def determine_package_version(self, directory): bh@4: """Returns the version number of the new package as a string bh@4: bh@4: The directory parameter is the name of the directory containing bh@4: the newly exported sources. The sources were exported with the bh@4: export_sources method. bh@4: bh@4: The default implementation simply returns the revision converted bh@4: to a string. bh@4: """ bh@4: return str(self.revision) bh@0: bh@0: def export_sources(self): bh@4: """Export the sources from the subversion working directory bh@4: bh@4: This method first exports the sources to a temporary directory bh@4: and then renames the directory. The new name is of the form bh@4: bh@4: - bh@4: bh@4: Where pkg_basename is the value of self.pkg_basename and version bh@4: is the return value of the determine_package_version() method. bh@4: """ bh@0: temp_dir = os.path.join(self.work_dir, "temp") bh@53: self.track.export_sources(temp_dir) bh@0: bh@0: pkgbaseversion = self.determine_package_version(temp_dir) bh@4: pkgbasedir = os.path.join(self.work_dir, bh@4: self.pkg_basename + "-" + pkgbaseversion) bh@0: bh@0: os.rename(temp_dir, pkgbasedir) bh@0: return pkgbaseversion, pkgbasedir bh@0: bh@0: bh@0: def update_version_numbers(self, pkgbasedir): bh@4: """Updates the version numbers in the code in pkgbasedir. bh@4: bh@4: The default implementation does nothing. Derived classes should bh@4: override this method if necessary. bh@4: """ bh@0: bh@207: def create_tarball(self, tarballname, workdir, basedir, compression="gz"): bh@4: """Creates a new tarball. bh@4: bh@4: Parameters: bh@4: bh@4: tarballname -- the filename of the new tarball bh@4: workdir -- The directory into which to change before running tar. bh@4: (actually this is done with GNUI tar's -C option) bh@4: basedir -- The basedirectory of the files that are packaged bh@4: into the tarfile. This should be a relative bh@4: filename directly in workdir. bh@207: compression -- The compression method to use as a string. bh@207: Suported are 'gz' for gzip compression (the bh@207: default) and 'bz2' for bzip2. bh@4: """ bh@0: logging.info("Creating tarball %r", tarballname) bh@207: if compression == "gz": bh@207: compression_flag = "z" bh@207: elif compression == "bz2": bh@207: compression_flag = "j" bh@207: else: bh@207: raise ValueError("Unknown compression method %r" % compression) bh@207: bh@207: run.call(cmdexpand("tar c -$compression_flag -f $tarballname" bh@207: " -C $workdir $basedir", **locals())) bh@0: bh@0: def copy_debian_directory(self, pkgbasedir, pkgbaseversion, changemsg): bh@4: """Copies the debian directory and updates the copy's changelog bh@4: bh@4: Parameter: bh@4: pkgbasedir -- The directory holding the unpacked source package bh@4: pkgbaseversion -- The version to update the changelog to bh@4: changemsg -- The message for the changelog bh@4: bh@4: When determining the actual version for the new package, this bh@4: function looks at the previous version in the changelog. If it bh@4: has a prefix separated from the version number by a colon this bh@4: prefix is prepended to the pkgbaseversion parameter. Debian bh@4: uses such prefixes for the kde packages. bh@4: """ bh@0: debian_dir = os.path.join(pkgbasedir, "debian") bh@0: changelog = os.path.join(debian_dir, "changelog") bh@0: bh@53: self.track.copy_debian_directory(debian_dir) bh@0: bh@0: logging.info("Updating %r", changelog) bh@0: oldversion = util.debian_changelog_version(changelog) bh@0: if ":" in oldversion: bh@0: oldversionprefix = oldversion.split(":")[0] + ":" bh@0: else: bh@0: oldversionprefix = "" bh@93: debrev = self.track.debrevision_prefix + "1" bh@45: run.call(cmdexpand("debchange -c $changelog" bh@93: " -v ${oldversionprefix}${pkgbaseversion}-${debrev}" bh@45: " $changemsg", **locals()), bh@53: env=self.track.debian_environment()) bh@0: bh@0: bh@0: def create_source_package(self, pkgbasedir, origtargz): bh@4: """Creates a new source package from pkgbasedir and origtargz""" bh@194: util.ensure_directory(self.log_dir) bh@194: dpkg_source_log = os.path.join(self.log_dir, "dpkg_source.txt") bh@194: logging.info("Creating new source package; logging to %s", bh@194: dpkg_source_log) bh@45: run.call(cmdexpand("dpkg-source -b $directory $tarball", bh@45: directory=os.path.basename(pkgbasedir), bh@45: tarball=os.path.basename(origtargz)), bh@0: cwd=os.path.dirname(pkgbasedir), bh@194: logfile=dpkg_source_log, bh@53: env=self.track.debian_environment()) bh@0: bh@4: def move_source_package(self, pkgbasename): bh@4: """Moves the new source package from the work_dir to the src_dir""" bh@4: logging.info("Moving source package to %r", self.src_dir) bh@4: util.ensure_directory(self.src_dir) bh@4: for filename in [filename for filename in os.listdir(self.work_dir) bh@4: if filename.startswith(pkgbasename)]: bh@4: os.rename(os.path.join(self.work_dir, filename), bh@4: os.path.join(self.src_dir, filename)) bh@4: bh@179: def sign_package(self): bh@179: """Signs the .dsc file created buy the instance""" bh@179: self.track.sign_file(util.listdir_abs(self.src_dir, "*.dsc")[0]) bh@179: bh@0: def package(self): bh@4: """Creates a source package from a subversion checkout. bh@4: bh@4: After setting up the working directory, this method calls the bh@4: do_package method to do the actual packaging. Afterwards the bh@4: work directory is removed. bh@4: """ bh@0: util.ensure_directory(self.work_dir) bh@0: try: bh@41: self.status.creating_source_package() bh@4: self.do_package() bh@179: self.sign_package() bh@41: self.status.source_package_created() bh@0: finally: bh@0: logging.info("Removing workdir %r", self.work_dir) bh@0: shutil.rmtree(self.work_dir) bh@0: bh@4: def do_package(self): bh@4: """Does the work of creating a source package bh@4: This method must be overriden by derived classes. bh@4: bh@4: The method should do the work in self.work_dir. When the bh@4: package is done, the source package files should be in bh@4: self.src_dir. bh@4: """ bh@4: raise NotImplementedError bh@4: bh@0: bh@0: class BinaryPackager(object): bh@0: bh@135: track = _fromparent("track") bh@135: status = _fromparent("status") bh@136: log_dir = _fromparent("log_dir") bh@135: binary_dir = _fromparent("binary_dir") bh@135: bh@135: def __init__(self, parent, dsc_file, logfile): bh@135: self.parent = parent bh@0: self.dsc_file = dsc_file bh@0: self.logfile = logfile bh@0: bh@0: def package(self): bh@41: self.status.creating_binary_package() bh@0: util.ensure_directory(self.binary_dir) bh@149: util.ensure_directory(self.log_dir) bh@54: logging.info("Building binary package; logging to %r", self.logfile) bh@112: self.track.builder.build(self.dsc_file, self.binary_dir, self.logfile) bh@179: self.sign_package() bh@41: self.status.binary_package_created() bh@0: bh@179: def sign_package(self): bh@179: """Signs the .changes file created buy the instance""" bh@179: self.track.sign_file(util.listdir_abs(self.binary_dir, "*.changes")[0]) bh@179: bh@0: bh@0: class RevisionPackager(object): bh@0: bh@4: source_packager_cls = SourcePackager bh@4: binary_packager_cls = BinaryPackager bh@4: bh@53: def __init__(self, track, revision): bh@53: self.track = track bh@0: self.revision = revision bh@53: self.base_dir = self.track.pkg_dir_for_revision(self.revision, 1) bh@36: self.status = status.RevisionStatus(os.path.join(self.base_dir, bh@36: "status")) bh@0: bh@172: log_dir = util.filenameproperty("log") bh@172: work_dir = util.filenameproperty("work") bh@172: binary_dir = util.filenameproperty("binary") bh@172: src_dir = util.filenameproperty("src") bh@172: build_log = util.filenameproperty("build_log.txt", dir_attr="log_dir") bh@0: bh@0: def find_dsc_file(self): bh@0: for filename in os.listdir(self.src_dir): bh@0: if filename.endswith(".dsc"): bh@0: return os.path.join(self.src_dir, filename) bh@0: return None bh@0: bh@18: def has_build_log(self): bh@18: return os.path.exists(self.build_log) bh@18: bh@140: def list_log_files(self): bh@140: """Returns a list describing the logfiles available for the revision. bh@140: Each list item is a tuple of the form (TITLE, FILENAME) where bh@140: TITLE is a string with a title usable in e. g. a web-page, and bh@140: FILENAME is the absolute filename of the log file. bh@140: """ bh@140: files = [] bh@140: if self.has_build_log(): bh@140: files.append(("build log", self.build_log)) bh@140: return files bh@140: bh@88: def list_source_files(self): bh@88: """Returns a list with the names of the files of the source package. bh@88: The implementation assumes that all files in self.src_dir belong bh@88: to the source package. bh@88: """ bh@88: return sorted(util.listdir_abs(self.src_dir)) bh@88: bh@88: def list_binary_files(self): bh@88: """Returns a list with the names of the files of the binary packages. bh@88: The implementation assumes that all files in self.binary_dir belong bh@88: to the binary packages. bh@88: """ bh@88: return sorted(util.listdir_abs(self.binary_dir)) bh@88: bh@0: def package(self): bh@0: try: bh@16: util.ensure_directory(self.work_dir) bh@16: self.status.start = datetime.datetime.utcnow() bh@135: src_packager = self.source_packager_cls(self) bh@0: src_packager.package() bh@0: bh@0: dsc_file = self.find_dsc_file() bh@0: if dsc_file is None: bh@0: raise RuntimeError("Cannot find dsc File in %r" % self.src_dir) bh@0: bh@135: bin_packager = self.binary_packager_cls(self, dsc_file, bh@18: self.build_log) bh@0: bin_packager.package() bh@16: self.status.stop = datetime.datetime.utcnow() bh@0: except: bh@41: self.status.error() bh@16: self.status.stop = datetime.datetime.utcnow() bh@99: # set the notification status last to avoid race conditions. bh@99: # The pending notification is for now the only situation bh@99: # where another process might modify the status file (the bh@99: # listpendingnotifications program will set it to bh@99: # "notification_sent") bh@99: self.status.notification_pending() bh@0: raise bh@0: bh@0: def remove_package_dir(self): bh@0: logging.info("Removing pkgdir %r", self.base_dir) bh@0: shutil.rmtree(self.base_dir) bh@0: bh@0: bh@52: class PackageTrack(object): bh@0: bh@4: revision_packager_cls = RevisionPackager bh@4: bh@4: svn_external_subdirs = [] bh@4: bh@4: extra_config_desc = [] bh@4: bh@47: def __init__(self, name, base_dir, svn_url, root_cmd, pbuilderrc, deb_email, bh@93: deb_fullname, packager_class="treepkg.packager", bh@167: debrevision_prefix="treepkg", handle_dependencies=False, bh@191: signing_key_id="", do_build=True): bh@0: self.name = name bh@0: self.base_dir = base_dir bh@0: self.svn_url = svn_url bh@176: self.builder = PBuilder(pbuilderrc, root_cmd, bh@176: release_signing_keyid=signing_key_id) bh@0: self.deb_email = deb_email bh@0: self.deb_fullname = deb_fullname bh@93: self.debrevision_prefix = debrevision_prefix bh@167: self.signing_key_id = signing_key_id bh@191: self.do_build = do_build bh@128: self.handle_dependencies = handle_dependencies bh@128: self.dependencies = None bh@0: self.pkg_dir_template = "%(revision)d-%(increment)d" bh@0: self.pkg_dir_regex \ bh@0: = re.compile(r"(?P[0-9]+)-(?P[0-9]+)$") bh@0: bh@172: checkout_dir = util.filenameproperty("checkout") bh@172: debian_dir = util.filenameproperty("debian") bh@172: pkg_dir = util.filenameproperty("pkg") bh@0: bh@106: def init_treepkg(self): bh@106: print "Initializing", self.name bh@106: if not os.path.exists(self.base_dir): bh@106: print "creating %s" % (self.base_dir,) bh@106: util.ensure_directory(self.base_dir) bh@106: if not os.path.exists(self.debian_dir): bh@106: print ("TODO: the debian directory %s still has to be created" bh@106: % (self.debian_dir,)) bh@106: bh@128: def determine_dependencies(self): bh@128: if self.dependencies is not None: bh@128: return bh@128: bh@128: requires = () bh@128: provides = () bh@131: # only try to parse the control file if the debian directory bh@131: # exists. If the debian directory doesn't exist yet, the tree bh@131: # packager is likely still being configured and this code may be bh@131: # run indirectly from e. g. bin/inittreepkg.py in which case the bh@131: # init_treepkg method will report the missing debian bh@131: if self.handle_dependencies and os.path.exists(self.debian_dir): bh@128: control = debian.DebianControlFile(os.path.join(self.debian_dir, bh@128: "control")) bh@128: requires = control.build_depends bh@128: provides = (pkg[0] for pkg in control.packages) bh@128: self.dependencies = (set(requires), set(provides)) bh@128: logging.debug("Track %s: build depends: %s", self.name, bh@128: " ".join(self.dependencies[0])) bh@128: logging.debug("Track %s: provides: %s", self.name, bh@128: " ".join(self.dependencies[1])) bh@128: bh@128: def dependencies_required(self): bh@128: """Returns a list of required packages""" bh@128: self.determine_dependencies() bh@128: return self.dependencies[0] bh@128: bh@128: def dependencies_provided(self): bh@128: """Returns a list of provided packages""" bh@128: self.determine_dependencies() bh@128: return self.dependencies[1] bh@128: bh@0: def pkg_dir_for_revision(self, revision, increment): bh@0: return os.path.join(self.pkg_dir, bh@0: self.pkg_dir_template % locals()) bh@0: bh@0: def last_changed_revision(self): bh@4: revisions = [] bh@4: for directory in [self.checkout_dir] + self.svn_external_subdirs: bh@4: directory = os.path.join(self.checkout_dir, directory) bh@4: revisions.append(subversion.last_changed_revision(directory)) bh@4: return max(revisions) bh@0: bh@11: def get_revision_numbers(self): bh@11: """Returns a list of the numbers of the packaged revisions""" bh@11: revisions = [] bh@11: if os.path.exists(self.pkg_dir): bh@11: for filename in os.listdir(self.pkg_dir): bh@11: match = self.pkg_dir_regex.match(filename) bh@11: if match: bh@11: revisions.append(int(match.group("revision"))) bh@85: revisions.sort() bh@11: return revisions bh@11: bh@0: def last_packaged_revision(self): bh@0: """Returns the revision number of the highest packaged revision. bh@0: bh@0: If the revision cannot be determined because no already packaged bh@0: revisions can be found, the function returns -1. bh@0: """ bh@11: return max([-1] + self.get_revision_numbers()) bh@0: bh@80: def update_checkout(self, revision=None): bh@0: """Updates the working copy of self.svn_url in self.checkout_dir. bh@0: bh@0: If self.checkout_dir doesn't exist yet, self.svn_url is checked bh@80: out into that directory. The value of the revision parameter is bh@80: passed through to subversion.update. bh@0: """ bh@0: localdir = self.checkout_dir bh@0: if os.path.exists(localdir): bh@0: logging.info("Updating the working copy in %r", localdir) bh@80: subversion.update(localdir, revision=revision) bh@0: else: bh@0: logging.info("The working copy in %r doesn't exist yet." bh@26: " Checking out from %r", localdir, bh@0: self.svn_url) bh@0: subversion.checkout(self.svn_url, localdir) bh@0: bh@0: def export_sources(self, to_dir): bh@0: logging.info("Exporting sources for tarball to %r", to_dir) bh@0: subversion.export(self.checkout_dir, to_dir) bh@0: # some versions of svn (notably version 1.4.2 shipped with etch) bh@202: # do not export externals such as the admin subdirectory. We bh@202: # may have to do that in an extra step. bh@4: for subdir in self.svn_external_subdirs: bh@4: absdir = os.path.join(to_dir, subdir) bh@4: if not os.path.isdir(absdir): bh@4: subversion.export(os.path.join(self.checkout_dir, subdir), bh@4: absdir) bh@0: bh@0: def copy_debian_directory(self, to_dir): bh@0: logging.info("Copying debian directory to %r", to_dir) bh@0: shutil.copytree(self.debian_dir, to_dir) bh@0: bh@0: def debian_environment(self): bh@0: """Returns the environment variables for the debian commands""" bh@0: env = os.environ.copy() bh@0: env["DEBFULLNAME"] = self.deb_fullname bh@0: env["DEBEMAIL"] = self.deb_email bh@0: return env bh@0: bh@190: def package_if_updated(self, revision=None, do_svn_update=True): bh@190: """Returns a new packager if the working copy has not been packaged yet. bh@190: If do_svn_update is true -- the default -- update the working bh@190: copy to the the revision specified with the revision parameter bh@190: or if revision is None, the latest revision in the repository.""" bh@191: if not self.do_build: bh@191: return None bh@190: if do_svn_update: bh@190: self.update_checkout(revision=revision) bh@0: current_revision = self.last_changed_revision() bh@0: logging.info("New revision is %d", current_revision) bh@98: if current_revision not in self.get_revision_numbers(): bh@98: logging.info("Revision %d has not been packaged yet", bh@98: current_revision) bh@4: return self.revision_packager_cls(self, current_revision) bh@0: else: bh@98: logging.info("Revision %d has already been packaged.", bh@98: current_revision) bh@0: bh@14: def get_revisions(self): bh@14: """Returns RevisionPackager objects for each packaged revision""" bh@14: return [self.revision_packager_cls(self, revision) bh@14: for revision in self.get_revision_numbers()] bh@14: bh@179: def sign_file(self, filename): bh@179: """Signs a file using the debian.sign_file function. bh@179: The file is signed with the key indicated by the track's bh@179: signing_key_id attribute. If that is empty, the file is not bh@179: signed. bh@179: """ bh@179: if self.signing_key_id: bh@179: logging.info("Signing %r with key %r", filename, bh@179: self.signing_key_id) bh@179: debian.sign_file(filename, self.signing_key_id) bh@179: bh@0: bh@113: def import_packager_module(packager_class): bh@113: """Import the packager module named by packager_class. bh@113: bh@113: The packager_class must be the full absolute module name for the bh@125: packager. The function tries to find or create a suitable bh@125: PackageTrack class from this module using the following rules: bh@125: bh@125: - If the module contains a class called PackageTrack, use that. bh@125: bh@125: - Otherwise create one using the module's RevisionPackager class, bh@125: creating RevisionPackager if necessary. bh@125: bh@125: - If RevisionPackager needs to be created, it uses the module's bh@125: SourcePackager as source_packager_cls and if present also the bh@125: module's BinaryPackager as binary_packager_cls. If the module bh@125: does not have a BinaryPackager, the default BinaryPackager is bh@125: used. bh@113: """ bh@113: module = util.import_dotted_name(packager_class) bh@113: if not hasattr(module, "PackageTrack"): bh@125: if not hasattr(module, "RevisionPackager"): bh@125: binary_packager = getattr(module, "BinaryPackager", BinaryPackager) bh@125: module.RevisionPackager \ bh@125: = new.classobj("RevisionPackager", (RevisionPackager,), bh@125: dict(source_packager_cls=module.SourcePackager, bh@125: binary_packager_cls=binary_packager)) bh@113: module.PackageTrack \ bh@113: = new.classobj("PackageTrack", (PackageTrack,), bh@113: dict(revision_packager_cls=module.RevisionPackager)) bh@113: return module bh@113: bh@52: def create_package_track(packager_class, **kw): bh@113: module = import_packager_module(packager_class) bh@52: return module.PackageTrack(**kw) bh@4: bh@0: bh@128: class CyclicDependencyError(Exception): bh@128: bh@128: """Exception thrown when a cycle is detected in the track dependencies""" bh@128: bh@128: def __init__(self, tracks): bh@128: Exception.__init__(self, bh@128: "Cyclic dependencies between" " tracks (%s)" bh@128: % ", ".join([track.name for track in tracks])) bh@128: bh@128: bh@7: class PackagerGroup(object): bh@0: bh@91: def __init__(self, package_tracks, check_interval, revision=None, bh@190: instructions_file=None, do_svn_update=True): bh@52: self.package_tracks = package_tracks bh@0: self.check_interval = check_interval bh@80: self.revision = revision bh@190: self.do_svn_update = do_svn_update bh@91: self.instructions_file = instructions_file bh@91: self.instructions_file_removed = False bh@128: self.sort_tracks() bh@128: bh@128: def sort_tracks(self): bh@128: """Sorts tracks for dependency handling""" bh@128: todo = self.package_tracks[:] bh@128: sorted = [] bh@128: seen = set() bh@128: bh@128: # dependencies that can be solved by one of the tracks bh@128: known = set() bh@128: for track in todo: bh@128: known |= track.dependencies_provided() bh@128: bh@128: while todo: bh@128: todo_again = [] bh@128: for track in todo: bh@128: if not track.handle_dependencies: bh@128: sorted.append(track) bh@128: continue bh@128: bh@128: unmet = (track.dependencies_required() & known) - seen bh@128: if unmet: bh@128: todo_again.append(track) bh@128: else: bh@128: sorted.append(track) bh@128: seen |= track.dependencies_provided() bh@128: if todo_again == todo: bh@128: raise CyclicDependencyError(todo) bh@128: todo = todo_again bh@128: bh@128: self.package_tracks = sorted bh@128: self.needed_binaries = set() bh@128: for track in self.package_tracks: bh@128: self.needed_binaries |= track.dependencies_required() bh@128: self.needed_binaries &= known bh@128: bh@128: logging.info("sorted track order: %s", bh@128: " ".join(track.name for track in sorted)) bh@128: logging.info("binary packages needed as build dependencies: %s", bh@128: " ".join(self.needed_binaries)) bh@128: bh@0: bh@0: def run(self): bh@7: """Runs the packager group indefinitely""" thomas@78: logging.info("Starting in periodic check mode." thomas@78: " Will check every %d seconds", self.check_interval) bh@0: last_check = -1 bh@0: while 1: bh@0: now = time.time() bh@0: if now > last_check + self.check_interval: bh@91: if self.check_package_tracks(): bh@91: break bh@0: last_check = now bh@0: next_check = now + self.check_interval bh@0: to_sleep = next_check - time.time() bh@0: if to_sleep > 0: bh@0: logging.info("Next check at %s", bh@0: time.strftime("%Y-%m-%d %H:%M:%S", bh@0: time.localtime(next_check))) bh@0: time.sleep(to_sleep) bh@0: else: bh@0: logging.info("Next check now") bh@91: if self.should_stop(): bh@91: logging.info("Received stop instruction. Stopping.") bh@91: return bh@0: bh@52: def check_package_tracks(self): bh@52: logging.info("Checking package tracks") bh@91: self.clear_instruction() bh@128: repeat = True bh@128: while repeat: bh@128: repeat = False bh@128: for track in self.package_tracks: bh@128: try: bh@190: packager = track.package_if_updated(revision=self.revision, bh@190: do_svn_update=self.do_svn_update) bh@128: if packager: bh@128: packager.package() bh@128: repeat = self.install_dependencies(track, packager) bh@128: if self.should_stop(): bh@128: logging.info("Received stop instruction. Stopping.") bh@128: return True bh@128: except: bh@128: logging.exception("An error occurred while" bh@128: " checking packager track %r", track.name) bh@128: if repeat: bh@128: logging.info("Built binaries needed by other tracks." bh@128: " Starting over to ensure all dependencies" bh@128: " are met") bh@128: break bh@128: bh@52: logging.info("Checked all package tracks") bh@14: bh@128: bh@128: def install_dependencies(self, track, packager): bh@128: """Add the binaries built by packager to the builder, if necessary. bh@128: It is necessary if any track depends on the packages. The bh@128: method simply installs all binary files built by the packger bh@128: instead of only those which are immediately required by a track. bh@128: This is done because tracks usually depend directly only on the bh@128: -dev packages which usually require another binary package built bh@128: at the same time. bh@128: """ bh@128: if (track.handle_dependencies bh@128: and track.dependencies_provided() & self.needed_binaries): bh@128: # FIXME: this basically assumes that all tracks use the same bh@128: # builder. This is true for now, but it is possible to bh@128: # configure treepkg with different builders for different bh@128: # tracks and we really should be installing the newly built bh@128: # binaries into the builder of the tracks which depends on bh@128: # them bh@128: binaries = packager.list_binary_files() bh@128: track.builder.add_binaries_to_extra_pkg(binaries) bh@128: return True bh@128: return False bh@128: bh@128: bh@52: def get_package_tracks(self): bh@52: return self.package_tracks bh@91: bh@91: def read_instruction(self): bh@91: if not self.instructions_file: bh@91: return "" bh@91: try: bh@91: f = open(self.instructions_file) bh@91: except (IOError, OSError): bh@91: return "" bh@91: try: bh@91: return f.read().strip() bh@91: finally: bh@91: f.close() bh@91: self.clear_instruction() bh@91: bh@91: def clear_instruction(self, force=False): bh@91: if self.instructions_file and (not self.instructions_file_removed bh@91: or force): bh@91: util.writefile(self.instructions_file, "") bh@91: self.instructions_file_removed = True bh@91: bh@91: def should_stop(self): bh@91: return self.read_instruction() == "stop"