Mercurial > treepkg > treepkg
view treepkg/packager.py @ 232:e3cda08d2619
When checking whether to build a new revision, also update the rules
working copy, use it's revision too when comparing the current revision
with the already built revisions and use it when building a new revision.
Update the tests for this.
Part of this change is that now, the default revision for the rules is 0
not 1 as previously. To avoid extra builds, existing treepkg instances
will have to be updated so that the directories end in -0 not -1. There
will be a tool to help with this.
author | Bernhard Herzog <bh@intevation.de> |
---|---|
date | Tue, 13 Jan 2009 15:01:22 +0000 |
parents | 7dbf0a3443bb |
children | eaa696629a91 |
line wrap: on
line source
# Copyright (C) 2007, 2008, 2009 by Intevation GmbH # Authors: # Bernhard Herzog <bh@intevation.de> # # This program is free software under the GPL (>=v2) # Read the file COPYING coming with the software for details. """Classes to automatically build debian packages from subversion checkouts""" import os import time import re import logging import shutil import datetime import new import util from subversion import SvnRepository, SvnWorkingCopy, ManualWorkingCopy import run import status import debian from cmdexpand import cmdexpand from builder import PBuilder def _fromparent(attr): """Creates a property that delegates its value to self.parent.<attr>""" def get(self): return getattr(self.parent, attr) return property(get) class SourcePackager(object): # Derived classes must supply the package basename pkg_basename = None track = _fromparent("track") revision = _fromparent("revision") status = _fromparent("status") log_dir = _fromparent("log_dir") work_dir = _fromparent("work_dir") src_dir = _fromparent("src_dir") def __init__(self, parent): self.parent = parent assert(self.pkg_basename) def determine_package_version(self, directory): """Returns the version number of the new package as a string The directory parameter is the name of the directory containing the newly exported sources. The sources were exported with the export_sources method. The default implementation simply returns the revision converted to a string. """ return str(self.revision) def export_sources(self): """Export the sources from the subversion working directory This method first exports the sources to a temporary directory and then renames the directory. The new name is of the form <pkg_basename>-<version> Where pkg_basename is the value of self.pkg_basename and version is the return value of the determine_package_version() method. """ temp_dir = os.path.join(self.work_dir, "temp") self.track.export_sources(temp_dir) pkgbaseversion = self.determine_package_version(temp_dir) pkgbasedir = os.path.join(self.work_dir, self.pkg_basename + "-" + pkgbaseversion) os.rename(temp_dir, pkgbasedir) return pkgbaseversion, pkgbasedir def update_version_numbers(self, pkgbasedir): """Updates the version numbers in the code in pkgbasedir. The default implementation does nothing. Derived classes should override this method if necessary. """ def create_tarball(self, tarballname, workdir, basedir, compression="gz"): """Creates a new tarball. Parameters: tarballname -- the filename of the new tarball workdir -- The directory into which to change before running tar. (actually this is done with GNUI tar's -C option) basedir -- The basedirectory of the files that are packaged into the tarfile. This should be a relative filename directly in workdir. compression -- The compression method to use as a string. Suported are 'gz' for gzip compression (the default) and 'bz2' for bzip2. """ logging.info("Creating tarball %r", tarballname) if compression == "gz": compression_flag = "z" elif compression == "bz2": compression_flag = "j" else: raise ValueError("Unknown compression method %r" % compression) run.call(cmdexpand("tar c -$compression_flag -f $tarballname" " -C $workdir $basedir", **locals())) def copy_debian_directory(self, pkgbasedir, pkgbaseversion, changemsg): """Copies the debian directory and updates the copy's changelog Parameter: pkgbasedir -- The directory holding the unpacked source package pkgbaseversion -- The version to update the changelog to changemsg -- The message for the changelog When determining the actual version for the new package, this function looks at the previous version in the changelog. If it has a prefix separated from the version number by a colon this prefix is prepended to the pkgbaseversion parameter. Debian uses such prefixes for the kde packages. """ debian_dir = os.path.join(pkgbasedir, "debian") changelog = os.path.join(debian_dir, "changelog") self.track.copy_debian_directory(debian_dir) logging.info("Updating %r", changelog) oldversion = util.debian_changelog_version(changelog) if ":" in oldversion: oldversionprefix = oldversion.split(":")[0] + ":" else: oldversionprefix = "" debrev = self.track.debrevision_prefix + "1" run.call(cmdexpand("debchange -c $changelog" " -v ${oldversionprefix}${pkgbaseversion}-${debrev}" " $changemsg", **locals()), env=self.track.debian_environment()) def create_source_package(self, pkgbasedir, origtargz): """Creates a new source package from pkgbasedir and origtargz""" util.ensure_directory(self.log_dir) dpkg_source_log = os.path.join(self.log_dir, "dpkg_source.txt") logging.info("Creating new source package; logging to %s", dpkg_source_log) run.call(cmdexpand("dpkg-source -b $directory $tarball", directory=os.path.basename(pkgbasedir), tarball=os.path.basename(origtargz)), cwd=os.path.dirname(pkgbasedir), logfile=dpkg_source_log, env=self.track.debian_environment()) def move_source_package(self, pkgbasename): """Moves the new source package from the work_dir to the src_dir""" logging.info("Moving source package to %r", self.src_dir) util.ensure_directory(self.src_dir) for filename in [filename for filename in os.listdir(self.work_dir) if filename.startswith(pkgbasename)]: os.rename(os.path.join(self.work_dir, filename), os.path.join(self.src_dir, filename)) def sign_package(self): """Signs the .dsc file created buy the instance""" self.track.sign_file(util.listdir_abs(self.src_dir, "*.dsc")[0]) def package(self): """Creates a source package from a subversion checkout. After setting up the working directory, this method calls the do_package method to do the actual packaging. Afterwards the work directory is removed. """ util.ensure_directory(self.work_dir) try: self.status.creating_source_package() self.do_package() self.sign_package() self.status.source_package_created() finally: logging.info("Removing workdir %r", self.work_dir) shutil.rmtree(self.work_dir) def do_package(self): """Does the work of creating a source package This method must be overriden by derived classes. The method should do the work in self.work_dir. When the package is done, the source package files should be in self.src_dir. """ raise NotImplementedError class BinaryPackager(object): track = _fromparent("track") status = _fromparent("status") log_dir = _fromparent("log_dir") binary_dir = _fromparent("binary_dir") def __init__(self, parent, dsc_file, logfile): self.parent = parent self.dsc_file = dsc_file self.logfile = logfile def package(self): self.status.creating_binary_package() util.ensure_directory(self.binary_dir) util.ensure_directory(self.log_dir) logging.info("Building binary package; logging to %r", self.logfile) self.track.builder.build(self.dsc_file, self.binary_dir, self.logfile) self.sign_package() self.status.binary_package_created() def sign_package(self): """Signs the .changes file created buy the instance""" self.track.sign_file(util.listdir_abs(self.binary_dir, "*.changes")[0]) class RevisionPackager(object): source_packager_cls = SourcePackager binary_packager_cls = BinaryPackager def __init__(self, track, revision, rules_revision): self.track = track self.revision = revision self.rules_revision = rules_revision self.base_dir = self.track.pkg_dir_for_revision(self.revision, rules_revision) self.status = status.RevisionStatus(os.path.join(self.base_dir, "status")) log_dir = util.filenameproperty("log") work_dir = util.filenameproperty("work") binary_dir = util.filenameproperty("binary") src_dir = util.filenameproperty("src") build_log = util.filenameproperty("build_log.txt", dir_attr="log_dir") def find_dsc_file(self): for filename in os.listdir(self.src_dir): if filename.endswith(".dsc"): return os.path.join(self.src_dir, filename) return None def has_build_log(self): return os.path.exists(self.build_log) def list_log_files(self): """Returns a list describing the logfiles available for the revision. Each list item is a tuple of the form (TITLE, FILENAME) where TITLE is a string with a title usable in e. g. a web-page, and FILENAME is the absolute filename of the log file. """ files = [] if self.has_build_log(): files.append(("build log", self.build_log)) return files def list_source_files(self): """Returns a list with the names of the files of the source package. The implementation assumes that all files in self.src_dir belong to the source package. """ return sorted(util.listdir_abs(self.src_dir)) def list_binary_files(self): """Returns a list with the names of the files of the binary packages. The implementation assumes that all files in self.binary_dir belong to the binary packages. """ return sorted(util.listdir_abs(self.binary_dir)) def package(self): try: util.ensure_directory(self.work_dir) self.status.start = datetime.datetime.utcnow() src_packager = self.source_packager_cls(self) src_packager.package() dsc_file = self.find_dsc_file() if dsc_file is None: raise RuntimeError("Cannot find dsc File in %r" % self.src_dir) bin_packager = self.binary_packager_cls(self, dsc_file, self.build_log) bin_packager.package() self.status.stop = datetime.datetime.utcnow() except: self.status.error() self.status.stop = datetime.datetime.utcnow() # set the notification status last to avoid race conditions. # The pending notification is for now the only situation # where another process might modify the status file (the # listpendingnotifications program will set it to # "notification_sent") self.status.notification_pending() raise def remove_package_dir(self): logging.info("Removing pkgdir %r", self.base_dir) shutil.rmtree(self.base_dir) class PackageTrack(object): revision_packager_cls = RevisionPackager svn_external_subdirs = [] extra_config_desc = [] def __init__(self, name, base_dir, svn_url, root_cmd, pbuilderrc, deb_email, deb_fullname, packager_class="treepkg.packager", debrevision_prefix="treepkg", handle_dependencies=False, signing_key_id="", do_build=True): self.name = name self.base_dir = base_dir self.builder = PBuilder(pbuilderrc, root_cmd, release_signing_keyid=signing_key_id) self.deb_email = deb_email self.deb_fullname = deb_fullname self.debrevision_prefix = debrevision_prefix self.signing_key_id = signing_key_id self.do_build = do_build self.handle_dependencies = handle_dependencies self.dependencies = None self.pkg_dir_template = "%(revision)d-%(rules_revision)d" self.pkg_dir_regex = re.compile(r"(?P<revision>[0-9]+)" r"-(?P<rules_revision>[0-9]+)$") repo = SvnRepository(svn_url, self.svn_external_subdirs) self.working_copy = SvnWorkingCopy(repo, self.checkout_dir, logger=logging) self.rules_working_copy = ManualWorkingCopy(self.debian_dir) checkout_dir = util.filenameproperty("checkout") debian_dir = util.filenameproperty("debian") pkg_dir = util.filenameproperty("pkg") def init_treepkg(self): print "Initializing", self.name if not os.path.exists(self.base_dir): print "creating %s" % (self.base_dir,) util.ensure_directory(self.base_dir) if not os.path.exists(self.debian_dir): print ("TODO: the debian directory %s still has to be created" % (self.debian_dir,)) def determine_dependencies(self): if self.dependencies is not None: return requires = () provides = () # only try to parse the control file if the debian directory # exists. If the debian directory doesn't exist yet, the tree # packager is likely still being configured and this code may be # run indirectly from e. g. bin/inittreepkg.py in which case the # init_treepkg method will report the missing debian if self.handle_dependencies and os.path.exists(self.debian_dir): control = debian.DebianControlFile(os.path.join(self.debian_dir, "control")) requires = control.build_depends provides = (pkg[0] for pkg in control.packages) self.dependencies = (set(requires), set(provides)) logging.debug("Track %s: build depends: %s", self.name, " ".join(self.dependencies[0])) logging.debug("Track %s: provides: %s", self.name, " ".join(self.dependencies[1])) def dependencies_required(self): """Returns a list of required packages""" self.determine_dependencies() return self.dependencies[0] def dependencies_provided(self): """Returns a list of provided packages""" self.determine_dependencies() return self.dependencies[1] def pkg_dir_for_revision(self, revision, rules_revision): return os.path.join(self.pkg_dir, self.pkg_dir_template % locals()) def last_changed_revision(self): return self.working_copy.last_changed_revision() def get_revision_numbers(self): """Returns a list of the numbers of the packaged revisions""" revisions = [] if os.path.exists(self.pkg_dir): for filename in os.listdir(self.pkg_dir): match = self.pkg_dir_regex.match(filename) if match: revisions.append((int(match.group("revision")), int(match.group("rules_revision")))) revisions.sort() return revisions def update_checkout(self, revision=None): """Updates the working copy. If the checkout_dir doesn't exist yet, a new checkout is made into that directory. The value of the revision parameter is passed through to the update method. """ self.working_copy.update_or_checkout(revision=revision) def export_sources(self, to_dir): logging.info("Exporting sources for tarball to %r", to_dir) self.working_copy.export(to_dir) def copy_debian_directory(self, to_dir): logging.info("Copying debian directory to %r", to_dir) self.rules_working_copy.export(to_dir) def debian_environment(self): """Returns the environment variables for the debian commands""" env = os.environ.copy() env["DEBFULLNAME"] = self.deb_fullname env["DEBEMAIL"] = self.deb_email return env def package_if_updated(self, revision=None, do_svn_update=True): """Returns a new packager if the working copy has not been packaged yet. If do_svn_update is true -- the default -- update the working copy to the the revision specified with the revision parameter or if revision is None, the latest revision in the repository.""" if not self.do_build: return None if do_svn_update: self.update_checkout(revision=revision) # TODO: what should happen with the debian checkout, if a # revision for the source checkoute was given? self.rules_working_copy.update_or_checkout() current_revision = (self.last_changed_revision(), self.rules_working_copy.last_changed_revision()) logging.info("New revision is %s", current_revision) if (current_revision, 1) not in self.get_revision_numbers(): logging.info("Revision %s has not been packaged yet", current_revision) return self.revision_packager_cls(self, *current_revision) else: logging.info("Revision %s has already been packaged.", current_revision) def get_revisions(self): """Returns RevisionPackager objects for each packaged revision""" return [self.revision_packager_cls(self, revision, rules_revision) for revision, rules_revision in self.get_revision_numbers()] def sign_file(self, filename): """Signs a file using the debian.sign_file function. The file is signed with the key indicated by the track's signing_key_id attribute. If that is empty, the file is not signed. """ if self.signing_key_id: logging.info("Signing %r with key %r", filename, self.signing_key_id) debian.sign_file(filename, self.signing_key_id) def import_packager_module(packager_class): """Import the packager module named by packager_class. The packager_class must be the full absolute module name for the packager. The function tries to find or create a suitable PackageTrack class from this module using the following rules: - If the module contains a class called PackageTrack, use that. - Otherwise create one using the module's RevisionPackager class, creating RevisionPackager if necessary. - If RevisionPackager needs to be created, it uses the module's SourcePackager as source_packager_cls and if present also the module's BinaryPackager as binary_packager_cls. If the module does not have a BinaryPackager, the default BinaryPackager is used. """ module = util.import_dotted_name(packager_class) if not hasattr(module, "PackageTrack"): if not hasattr(module, "RevisionPackager"): binary_packager = getattr(module, "BinaryPackager", BinaryPackager) module.RevisionPackager \ = new.classobj("RevisionPackager", (RevisionPackager,), dict(source_packager_cls=module.SourcePackager, binary_packager_cls=binary_packager)) module.PackageTrack \ = new.classobj("PackageTrack", (PackageTrack,), dict(revision_packager_cls=module.RevisionPackager)) return module def create_package_track(packager_class, **kw): module = import_packager_module(packager_class) return module.PackageTrack(**kw) class CyclicDependencyError(Exception): """Exception thrown when a cycle is detected in the track dependencies""" def __init__(self, tracks): Exception.__init__(self, "Cyclic dependencies between" " tracks (%s)" % ", ".join([track.name for track in tracks])) class PackagerGroup(object): def __init__(self, package_tracks, check_interval, revision=None, instructions_file=None, do_svn_update=True): self.package_tracks = package_tracks self.check_interval = check_interval self.revision = revision self.do_svn_update = do_svn_update self.instructions_file = instructions_file self.instructions_file_removed = False self.sort_tracks() def sort_tracks(self): """Sorts tracks for dependency handling""" todo = self.package_tracks[:] sorted = [] seen = set() # dependencies that can be solved by one of the tracks known = set() for track in todo: known |= track.dependencies_provided() while todo: todo_again = [] for track in todo: if not track.handle_dependencies: sorted.append(track) continue unmet = (track.dependencies_required() & known) - seen if unmet: todo_again.append(track) else: sorted.append(track) seen |= track.dependencies_provided() if todo_again == todo: raise CyclicDependencyError(todo) todo = todo_again self.package_tracks = sorted self.needed_binaries = set() for track in self.package_tracks: self.needed_binaries |= track.dependencies_required() self.needed_binaries &= known logging.info("sorted track order: %s", " ".join(track.name for track in sorted)) logging.info("binary packages needed as build dependencies: %s", " ".join(self.needed_binaries)) def run(self): """Runs the packager group indefinitely""" logging.info("Starting in periodic check mode." " Will check every %d seconds", self.check_interval) last_check = -1 while 1: now = time.time() if now > last_check + self.check_interval: if self.check_package_tracks(): break last_check = now next_check = now + self.check_interval to_sleep = next_check - time.time() if to_sleep > 0: logging.info("Next check at %s", time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(next_check))) time.sleep(to_sleep) else: logging.info("Next check now") if self.should_stop(): logging.info("Received stop instruction. Stopping.") return def check_package_tracks(self): logging.info("Checking package tracks") self.clear_instruction() repeat = True while repeat: repeat = False for track in self.package_tracks: try: packager = track.package_if_updated(revision=self.revision, do_svn_update=self.do_svn_update) if packager: packager.package() repeat = self.install_dependencies(track, packager) if self.should_stop(): logging.info("Received stop instruction. Stopping.") return True except: logging.exception("An error occurred while" " checking packager track %r", track.name) if repeat: logging.info("Built binaries needed by other tracks." " Starting over to ensure all dependencies" " are met") break logging.info("Checked all package tracks") def install_dependencies(self, track, packager): """Add the binaries built by packager to the builder, if necessary. It is necessary if any track depends on the packages. The method simply installs all binary files built by the packger instead of only those which are immediately required by a track. This is done because tracks usually depend directly only on the -dev packages which usually require another binary package built at the same time. """ if (track.handle_dependencies and track.dependencies_provided() & self.needed_binaries): # FIXME: this basically assumes that all tracks use the same # builder. This is true for now, but it is possible to # configure treepkg with different builders for different # tracks and we really should be installing the newly built # binaries into the builder of the tracks which depends on # them binaries = packager.list_binary_files() track.builder.add_binaries_to_extra_pkg(binaries) return True return False def get_package_tracks(self): return self.package_tracks def read_instruction(self): if not self.instructions_file: return "" try: f = open(self.instructions_file) except (IOError, OSError): return "" try: return f.read().strip() finally: f.close() self.clear_instruction() def clear_instruction(self, force=False): if self.instructions_file and (not self.instructions_file_removed or force): util.writefile(self.instructions_file, "") self.instructions_file_removed = True def should_stop(self): return self.read_instruction() == "stop"