view treepkg/packager.py @ 551:d2b294e4ede7

add short_rules_revision template
author Bjoern Ricks <bricks@intevation.de>
date Tue, 08 Mar 2011 14:02:19 +0000
parents 6b6cd977785c
children 1af20baa532f
line wrap: on
line source
# Copyright (C) 2007-2010 by Intevation GmbH
# Authors:
# Bernhard Herzog <bh@intevation.de>
# Bjoern Ricks    <bjoern.ricks@intevation.de>
# Andre Heinecke  <andre.heinecke@intevation.de>
#
# This program is free software under the GPL (>=v2)
# Read the file COPYING coming with the software for details.

"""Classes to automatically build debian packages from subversion checkouts"""

import os
import os.path
import time
import re
import logging
import shutil
import datetime
import new
import sys

import util
from subversion import SvnRepository, SvnWorkingCopy, ManualWorkingCopy
from git import GitRepository, GitWorkingCopy
import run
import status
import debian
from cmdexpand import cmdexpand
from builder import PBuilder
from sbuilder import SbdmockBuilder

def _fromparent(attr):
    """Creates a property that delegates its value to self.parent.<attr>"""
    def get(self):
        return getattr(self.parent, attr)
    return property(get)



class PackagerError(Exception):

    """Base class for Packager specific errors raised by TreePKG"""

class SourcePackager(object):

    pkg_basename = property(lambda self: self.track.pkg_basename)
    changemsg_template = property(lambda self:
                                  self.track.changelog_msg_template)
    track = _fromparent("track")
    revision = _fromparent("revision")
    pkg_revision = _fromparent("pkg_revision")
    status = _fromparent("status")
    log_dir = _fromparent("log_dir")
    work_dir = _fromparent("work_dir")
    src_dir = _fromparent("src_dir")

    def __init__(self, parent):
        self.parent = parent
        self.rules_revision = self.parent.rules_revision
        # TODO short revision should be determined with scm working copy
        self.short_revision = self.revision
        if len(self.short_revision) > 7:
            self.short_revision = self.short_revision[:7]
        localtime = time.localtime()
        self.pkg_date = time.strftime("%Y%m%d", localtime)
        self.pkg_time = time.strftime("%H%M", localtime)

    def determine_upstream_version(self, directory=None):
        """
            Tries to parse the upstream version from a source directory
            and returns it as a string.
        """

        if not directory:
            directory = self.track.checkout_dir
        # TODO: it should be possible to select which files should be searched
        # for upstream_version

        #if os.path.isfile(os.path.join(directory, "CMakeList.txt")):
        #    return util.extract_cmakefile_version(os.path.join(directory,
        #                                         "CMakeList.txt"))
        if os.path.isfile(os.path.join(directory, "configure.ac")):
            return util.extract_configureac_version(os.path.join(directory,
                                                    "configure.ac"))
        changelog = os.path.join(self.track.debian_dir, "changelog")
        if os.path.isfile(changelog):
            debian_version = util.debian_changelog_version(
                                changelog)
            # upstream version is debian version without epoch and
            # debian revision
            if ":" in debian_version:
                debian_version = debian_version.split(":")[1]
            if "-" in debian_version:
                debian_version = debian_version.split("-")[0]
            upstream_version = debian_version
        else:
            upstream_version = "0"

        return upstream_version

    def determine_package_version(self, directory, additionals=None):
        """Returns the resolved version template of the package as a string

        The directory parameter is the name of the directory containing
        the newly exported sources.  The sources were exported with the
        export_sources method.

        The addionals parameter may contain a dictionary with additional
        variables used in the version template.

        Default variables that can be resolved are:
             revision - The revision of the package
             short_revision - The first seven characters of the revision
             rules_revision - The revision of the packaging rules
             pkg_date - The current date in the form: YYYYMMDD
             pkg_time - The current ime in the form: HHMM
             pkg_revision - The number of times a new package has
                            been created from this track.
             upstream_version - The version parsed from the sources or
                                package descriptions by
                                determine_upstream_version. Default: "0"
        """
        revision = self.revision
        rules_revision = self.rules_revision
        pkg_revision = self.pkg_revision
        short_revision = self.short_revision
        pkg_date = self.pkg_date
        pkg_time = self.pkg_time
        upstream_version = self.determine_upstream_version(directory)
        version_dict = locals().copy()
        if additionals:
            version_dict.update(additionals)
        return self.track.version_template % version_dict

    def export_sources(self):
        """Export the sources from the subversion working directory

        This method first exports the sources to a temporary directory
        and then renames the directory.  The new name is of the form

          <pkg_basename>-<version>

        Where pkg_basename is the value of self.pkg_basename and version
        is the return value of the determine_package_version() method.
        """
        temp_dir = os.path.join(self.work_dir, "temp")
        self.track.export_sources(temp_dir)

        pkgbaseversion = self.determine_package_version(temp_dir)
        pkgbasedir = os.path.join(self.work_dir,
                                  self.pkg_basename + "-" + pkgbaseversion)

        os.rename(temp_dir, pkgbasedir)
        return pkgbaseversion, pkgbasedir


    def prepare_sources_for_tarball(self, pkgbasedir, pkgbaseversion):
        """Prepare the exported sources prior to creating the tarball.

        The default implementation does nothing.  Derived classes should
        override this method if necessary to e.g. update the version
        numbers in the code.
        """

    def create_tarball(self, tarballname, workdir, basedir, compression="gz"):
        """Creates a new tarball.

        Parameters:

          tarballname -- the filename of the new tarball
          workdir -- The directory into which to change before running tar.
                     (actually this is done with GNUI tar's -C option)
          basedir -- The basedirectory of the files that are packaged
                     into the tarfile.  This should be a relative
                     filename directly in workdir.
          compression -- The compression method to use as a string.
                         Suported are 'gz' for gzip compression (the
                         default) and 'bz2' for bzip2.
        """
        logging.info("Creating tarball %r", tarballname)
        if compression == "gz":
            compression_flag = "z"
        elif compression == "bz2":
            compression_flag = "j"
        else:
            raise ValueError("Unknown compression method %r" % compression)

        run.call(cmdexpand("tar c -$compression_flag -f $tarballname"
                           " -C $workdir $basedir", **locals()))

    def copy_debian_directory(self, pkgbasedir, pkgbaseversion, changemsg):
        """Copies the debian directory and updates the copy's changelog

        Parameter:
          pkgbasedir -- The directory holding the unpacked source package
          pkgbaseversion -- The version to update the changelog to
          changemsg -- The message for the changelog

        When determining the actual version for the new package, this
        function looks at the previous version in the changelog.  If it
        has a prefix separated from the version number by a colon this
        prefix is prepended to the pkgbaseversion parameter.  Debian
        uses such prefixes for the kde packages.
        """
        debian_dir = os.path.join(pkgbasedir, "debian")
        changelog = os.path.join(debian_dir, "changelog")

        self.track.copy_debian_directory(debian_dir)

        logging.info("Updating %r", changelog)
        oldversion = util.debian_changelog_version(changelog)
        if ":" in oldversion:
            oldversionprefix = oldversion.split(":")[0] + ":"
        else:
            oldversionprefix = ""
        debrev = self.pkg_revision
        run.call(cmdexpand("debchange -c  $changelog"
                           " -v ${oldversionprefix}${pkgbaseversion}-${debrev}"
                           " $changemsg", **locals()),
                 env=self.track.debian_environment())


    def create_source_package(self, pkgbasedir, origtargz):
        """Creates a new source package from pkgbasedir and origtargz"""
        util.ensure_directory(self.log_dir)
        dpkg_source_log = os.path.join(self.log_dir, "dpkg_source.txt")
        logging.info("Creating new source package; logging to %s",
                     dpkg_source_log)
        
        format = self.get_debian_source_format(pkgbasedir)
        if format == "1.0":
            run.call(cmdexpand("dpkg-source -b $directory $tarball",
                           directory=os.path.basename(pkgbasedir),
                           tarball=os.path.basename(origtargz)),
                 cwd=os.path.dirname(pkgbasedir),
                 logfile=dpkg_source_log,
                 env=self.track.debian_environment())
        elif format == "3.0 (quilt)":
            run.call(cmdexpand("dpkg-source -b $directory",
                           directory=os.path.basename(pkgbasedir)),
                 cwd=os.path.dirname(pkgbasedir),
                 logfile=dpkg_source_log,
                 env=self.track.debian_environment())
        else:
            raise RuntimeError("debian source format %s is not supported by treepkg" % format)

    def get_debian_source_format(self, pkgbasedir):
        formatfile = os.path.join(pkgbasedir, "debian", "source", "format")
        if not os.path.exists(formatfile):
            return "1.0"
        else:
            file = open(formatfile, "r")
            line = file.readline()
            file.close()
            return line.strip()

    def move_source_package(self, pkgbasename):
        """Moves the new source package from the work_dir to the src_dir"""
        logging.info("Moving source package to %r", self.src_dir)
        util.ensure_directory(self.src_dir)
        for filename in [filename for filename in os.listdir(self.work_dir)
                                  if filename.startswith(pkgbasename)]:
            os.rename(os.path.join(self.work_dir, filename),
                      os.path.join(self.src_dir, filename))

    def sign_package(self):
        """Signs the .dsc file created buy the instance"""
        src_files = util.listdir_abs(self.src_dir, "*.dsc")
        if not src_files:
            raise RuntimeError("Could not find .dsc file in source"
                  " directory %s" % self.src_dir)
        self.track.sign_file(src_files[0])

    def package(self):
        """Creates a source package from a subversion checkout.

        After setting up the working directory, this method calls the
        do_package method to do the actual packaging.  Afterwards the
        work directory is removed.
        """
        util.ensure_directory(self.work_dir)
        try:
            self.status.creating_source_package()
            self.do_package()
            self.sign_package()
            self.status.source_package_created()
        finally:
            logging.info("Removing workdir %r", self.work_dir)
            shutil.rmtree(self.work_dir)

    def do_package(self):
        """Does the work of creating a source package."""
        pkgbaseversion, pkgbasedir = self.export_sources()

        pkgbasename = self.pkg_basename + "_" + pkgbaseversion
        origtargz = os.path.join(self.work_dir,
                                 pkgbasename + ".orig.tar.gz")

        self.prepare_sources_for_tarball(pkgbasedir, pkgbaseversion)

        self.create_tarball(origtargz, self.work_dir,
                            os.path.basename(pkgbasedir))

        changemsg = self.get_change_msg()
        self.copy_debian_directory(pkgbasedir, pkgbaseversion,
                                   changemsg)

        self.create_source_package(pkgbasedir, origtargz)
        self.move_source_package(pkgbasename)

    def get_change_msg(self):
        return self.changemsg_template % dict(revision=self.revision, 
                pkg_date=self.pkg_date, pkg_time=self.pkg_time,
                rules_revision=self.rules_revision)

class BinaryPackager(object):

    track = _fromparent("track")
    status = _fromparent("status")
    log_dir = _fromparent("log_dir")
    binary_dir = _fromparent("binary_dir")

    def __init__(self, parent, dsc_file, logfile):
        self.parent = parent
        self.dsc_file = dsc_file
        self.logfile = logfile

    def package(self):
        self.status.creating_binary_package()
        util.ensure_directory(self.binary_dir)
        util.ensure_directory(self.log_dir)
        logging.info("Building binary package; logging to %r", self.logfile)
        self.track.builder.build(self.dsc_file, self.binary_dir, self.logfile,
                                 extra_env=self.track.debian_environment())
        self.sign_package()
        self.status.binary_package_created()

    def sign_package(self):
        """Signs the .changes file created by the instance"""
        dirs = util.listdir_abs(self.binary_dir, "*.changes")
        if not dirs:
            raise RuntimeError("Cannot find changes File in %r" % self.binary_dir)
        self.track.sign_file(dirs[0])


class RevisionPackager(object):

    source_packager_cls = SourcePackager
    binary_packager_cls = BinaryPackager

    def __init__(self, track, revision, rules_revision, pkg_revision=None,
                 tag=""):
        self.track = track
        self.revision = revision
        self.rules_revision = rules_revision
        self.short_rules_revision = rules_revision

        # fixme: short_rules_revision should be determined by scm
        if len(self.short_rules_revision) > 7:
            self.short_rules_revision = self.short_rules_revision[:7]

        if pkg_revision is None:
            pkg_revision = (self.track.pkg_revision_template
                            % dict(pkg_revision=1,
                                   rules_revision=rules_revision,
                                   short_rules_revision=short_rules_revision))
        self.pkg_revision = pkg_revision

        self.base_dir = self.track.pkg_dir_for_revision(self.revision,
                                                        rules_revision)
        self.status = status.RevisionStatus(os.path.join(self.base_dir,
                                                         "status"),
                                                         self.after_setattr)
        if tag:
            util.ensure_directory(self.base_dir)
            self.status.tags = tag

    log_dir = util.filenameproperty("log")
    work_dir = util.filenameproperty("work")
    binary_dir = util.filenameproperty("binary")
    src_dir = util.filenameproperty("src")
    build_log = util.filenameproperty("build_log.txt", dir_attr="log_dir")

    def after_setattr(self, status, attr):
        '''
        Execute a hook set in the source_hook configuration attribute
        every time the status changes.
        '''
        if not self.track.status_hook: return
        logging.info("Executing status hook: %s" % self.track.status_hook )
        status_env = {
                       "TREEPKG_TRACK" : self.track.name,
                       "TREEPKG_BASE_DIR" : self.base_dir,
                       "TREEPKG_STATE" : attr,
                       "TREEPKG_STATENAME" : status.status.name
                      }
        run.call(cmdexpand(self.track.status_hook), extra_env=status_env)

    def find_dsc_file(self):
        for filename in os.listdir(self.src_dir):
            if filename.endswith(".dsc"):
                return os.path.join(self.src_dir, filename)
        return None

    def has_build_log(self):
        return os.path.exists(self.get_log_file())

    def get_log_title(self, f):
        if not os.path.isfile(f):
            return None
        title = os.path.basename(f)
        title = title.replace("_"," ")
        title = title[:title.find(".")]
        title = title.title()
        return title

    def get_log_file(self):
        if os.path.exists(self.build_log + ".gz"):
            return self.build_log + ".gz"
        return self.build_log

    def get_log_files(self, logs=None):
        files = []
        if os.path.isdir(self.log_dir):
            for f in os.listdir(self.log_dir):
                if logs is None or f in logs:
                    f = os.path.join(self.log_dir,f)
                    if os.path.isfile(f):
                        files.append((self.get_log_title(f),f))
        return files

    def list_log_files(self, logs):
        """Returns a list describing the logfiles available for the revision.
        Each list item is a tuple of the form (TITLE, FILENAME) where
        TITLE is a string with the filename without directory or ending in
        which _ is replaced with a whitespace and words are capitalized.
        FILENAME is the absolute filename of the log file.
        """
        files = self.get_log_files(logs)
        if not files:
            return []
        return files

    def list_source_files(self):
        """Returns a list with the names of the files of the source package.
        The implementation assumes that all files in self.src_dir belong
        to the source package.
        """
        files = []
        if os.path.isdir(self.src_dir):
            files = sorted(util.listdir_abs(self.src_dir))
        return files

    def list_binary_files(self):
        """Returns a list with the names of the files of the binary packages.
        The implementation assumes that all files in self.binary_dir belong
        to the binary packages.
        """
        files = []
        if os.path.isdir(self.binary_dir):
            files = sorted(util.listdir_abs(self.binary_dir))
        return files

    def package(self):
        try:
            try:
                util.ensure_directory(self.work_dir)
                self.status.start = datetime.datetime.utcnow()
                src_packager = self.source_packager_cls(self)
                src_packager.package()

                dsc_file = self.find_dsc_file()
                if dsc_file is None:
                    raise RuntimeError("Cannot find dsc File in %r" % self.src_dir)

                bin_packager = self.binary_packager_cls(self, dsc_file, self.build_log)
                bin_packager.package()
            finally:
                util.compress_all_logs(self.log_dir)
                self.status.stop = datetime.datetime.utcnow()
        except:
            self.status.error()
            self.status.stop = datetime.datetime.utcnow()
            # set the notification status last to avoid race conditions.
            # The pending notification is for now the only situation
            # where another process might modify the status file (the
            # listpendingnotifications program will set it to
            # "notification_sent")
            self.status.notification_pending()
            raise

    def remove_package_dir(self):
        logging.info("Removing pkgdir %r", self.base_dir)
        shutil.rmtree(self.base_dir)


class PackageTrack(object):

    revision_packager_cls = RevisionPackager

    svn_external_subdirs = []

    extra_config_desc = []

    def __init__(self, name, base_dir, root_cmd, builderconfig, deb_email,
                 deb_fullname, url="", packager_class="treepkg.packager",
                 version_template="%(revision)s", builder_cls="PBuilder",
                 pkg_revision_template="treepkg%(pkg_revision)s",
                 handle_dependencies=False, signing_key_id="", do_build=True,
                 rules_url=None, deb_build_options="", pkg_basename="",
                 changelog_msg_template="Update to r%(revision)s",
                 svn_subset=(), svn_externals=(), branch="",
                 scm_type="svn", rules_scm_type="svn",
                 os="", status_hook="", svn_url=None):
        self.name = name

        # Convert the builder_cls option to a class
        if builder_cls.upper() == "SBDMOCKBUILDER" or \
            builder_cls.upper() == "SBDMOCK":
                builder_class = SbdmockBuilder
        elif builder_cls.upper() == "PBUILDER":
            builder_class = PBuilder
        else:
            # If the builder option is explicitly set with an unknown builder
            # a warning is printed.
            logging.warning("Track: %s Builder option %s could not be parsed \
                             defaulting to pbuilder" % (name, builder_cls))
            builder_class = PBuilder
        if not pkg_basename:
            pkg_basename = name
        self.pkg_basename = pkg_basename
        self.changelog_msg_template = changelog_msg_template
        self.base_dir = base_dir
        self.builder = builder_class(builderconfig, root_cmd,
                                release_signing_keyid=signing_key_id)
        self.deb_email = deb_email
        self.deb_fullname = deb_fullname
        self.deb_build_options = deb_build_options
        self.version_template = version_template
        self.pkg_revision_template = pkg_revision_template
        self.signing_key_id = signing_key_id
        self.do_build = do_build
        self.handle_dependencies = handle_dependencies
        self.dependencies = None
        self.os = os
        self.pkg_dir_template = "%(revision)s-%(rules_revision)s"
        self.pkg_dir_regex = re.compile(r"(?P<revision>[0-9a-f]+)"
                                        r"-(?P<rules_revision>[0-9a-f]+)$")
        self.status_hook = status_hook
        self.scm_type = scm_type
        self.rules_scm_type = rules_scm_type
        
        if svn_url:
            url = svn_url
            scm_type = "svn"
            logging.warning("Track: %s options contain svn_url which is " \
                    "deprecated. Please use url together with scm_type " \
                    "svn instead." % name)

        # use local debian dir if rules url is not set
        if not rules_url:
            rules_scm_type = "local"

        externals = svn_externals
        if not externals:
            externals = self.svn_external_subdirs
        if scm_type == "svn":
            repo = SvnRepository(url, externals, subset=svn_subset)
            self.working_copy = SvnWorkingCopy(repo, self.checkout_dir,
                                              logger=logging)
        elif scm_type == "git":
            repo = GitRepository(url, branch=branch)
            self.working_copy = GitWorkingCopy(repo, self.checkout_dir,
                                              logger=logging) 
        else:
            raise PackagerError("Unknown scm type \"%s\" for sources" %
                    scm_type)

        if rules_scm_type == "svn":
            repo = SvnRepository(rules_url)
            self.rules_working_copy = SvnWorkingCopy(repo, self.debian_dir,
                                                     logger=logging)
        elif rules_scm_type == "git":
            repo = GitRepository(rules_url)
            self.rules_working_copy = GitWorkingCopy(repo, self.debian_dir,
                                                     logger=logging)
        elif rules_scm_type == "local":
            self.rules_working_copy = ManualWorkingCopy(self.debian_dir)

        else:
            raise PackagerError("Unknown scm type \"%s\" for rules" %
                    scm_type)

    checkout_dir = util.filenameproperty("checkout")
    debian_dir = util.filenameproperty("debian")
    pkg_dir = util.filenameproperty("pkg")

    def init_treepkg(self):
        print "Initializing", self.name
        if not os.path.exists(self.base_dir):
            print "creating %s" % (self.base_dir,)
            util.ensure_directory(self.base_dir)
        # TODO: handle case where debian directory is in version control
        if not os.path.exists(self.debian_dir):
            print ("TODO: the debian directory %s still has to be created"
                   % (self.debian_dir,))

    def determine_dependencies(self):
        if self.dependencies is not None:
            return

        requires = ()
        provides = ()
        # only try to parse the control file if the debian directory
        # exists.  If the debian directory doesn't exist yet, the tree
        # packager is likely still being configured and this code may be
        # run indirectly from e. g. bin/inittreepkg.py in which case the
        # init_treepkg method will report the missing debian
        if self.handle_dependencies and os.path.exists(self.debian_dir):
            control = debian.DebianControlFile(os.path.join(self.debian_dir,
                                                            "control"))
            requires = control.build_depends
            provides = (pkg[0] for pkg in control.packages)
        self.dependencies = (set(requires), set(provides))
        logging.debug("Track %s: build depends: %s", self.name,
                      " ".join(self.dependencies[0]))
        logging.debug("Track %s: provides: %s", self.name,
                      " ".join(self.dependencies[1]))

    def dependencies_required(self):
        """Returns a list of required packages"""
        self.determine_dependencies()
        return self.dependencies[0]

    def dependencies_provided(self):
        """Returns a list of provided packages"""
        self.determine_dependencies()
        return self.dependencies[1]

    def pkg_dir_for_revision(self, revision, rules_revision):
        return os.path.join(self.pkg_dir,
                            self.pkg_dir_template % locals())

    def last_changed_revision(self):
        return self.working_copy.last_changed_revision()

    def get_revision_numbers(self):
        """Returns a list of the packaged revisions"""
        revisions = []
        if os.path.exists(self.pkg_dir):
            for filename in os.listdir(self.pkg_dir):
                match = self.pkg_dir_regex.match(filename)
                if match:
                    revisions.append((match.group("revision"),
                                      match.group("rules_revision")))
        return sorted(revisions)

    def update_checkout(self, revision=None):
        """Updates the working copy.

        If the checkout_dir doesn't exist yet, a new checkout is made
        into that directory.  The value of the revision parameter is
        passed through to the update method.
        """
        self.working_copy.update_or_checkout(revision=revision)

    def export_sources(self, to_dir):
        logging.info("Exporting sources for tarball to %r", to_dir)
        self.working_copy.export(to_dir)

    def copy_debian_directory(self, to_dir):
        logging.info("Copying debian directory to %r", to_dir)
        self.rules_working_copy.export(to_dir)

    def debian_environment(self):
        """Returns the environment variables for the debian commands"""
        env = os.environ.copy()
        env["DEBFULLNAME"] = self.deb_fullname
        env["DEBEMAIL"] = self.deb_email
        env["DEB_BUILD_OPTIONS"] = self.deb_build_options
        # cdbs requires DEB_BUILD_PARALLEL set to something non-empty,
        # otherwise it will ignore any parallel=<n> setting in
        # DEB_BUILD_OPTIONS.
        env["DEB_BUILD_PARALLEL"] = "true"
        return env

    def new_revsision_packager(self):
        """ Checks if a new revision is available and returns a new
        revision packager class. Don't override this method in a subclass.
        Use packager_for_new_revision() instead."""
        current_revision = (self.last_changed_revision(),
                            self.rules_working_copy.last_changed_revision())
        logging.info("New revision is %s", current_revision)
        if current_revision not in self.get_revision_numbers():
            logging.info("Revision %s has not been packaged yet",
                         current_revision)
            return self.revision_packager_cls(self, *current_revision)
        else:
            logging.info("Revision %s has already been packaged.",
                         current_revision)

    def packager_for_new_revision(self):
        return self.new_revsision_packager()

    def package_if_updated(self, revision=None, do_update=True):
        """Returns a new packager if the working copy has not been packaged yet.
        If do_update is true -- the default -- update the working
        copy to the revision specified with the revision parameter
        or if revision is None, the latest revision in the repository."""
        if not self.do_build:
            return None
        if do_update:
            self.update_checkout(revision=revision)
            # TODO: what should happen with the debian checkout, if a
            # revision for the source checkoute was given?
            self.rules_working_copy.update_or_checkout()
        return self.packager_for_new_revision()

    def get_revisions(self):
        """Returns RevisionPackager objects for each packaged revision"""
        return [self.revision_packager_cls(self, revision, rules_revision)
                for revision, rules_revision in self.get_revision_numbers()]

    def sign_file(self, filename):
        """Signs a file using the debian.sign_file function.
        The file is signed with the key indicated by the track's
        signing_key_id attribute.  If that is empty, the file is not
        signed.
        """
        if self.signing_key_id:
            logging.info("Signing %r with key %r", filename,
                         self.signing_key_id)
            debian.sign_file(filename, self.signing_key_id)


def import_packager_module(packager_class):
    """Import the packager module named by packager_class.

    The packager_class must be the full absolute module name for the
    packager.  The function tries to find or create a suitable
    PackageTrack class from this module using the following rules:

      - If the module contains a class called PackageTrack, use that.

      - Otherwise create one using the module's RevisionPackager class,
        creating RevisionPackager if necessary.

      - If RevisionPackager needs to be created, it uses the module's
        SourcePackager as source_packager_cls and if present also the
        module's BinaryPackager as binary_packager_cls.  If the module
        does not have a BinaryPackager, the default BinaryPackager is
        used.
    """
    module = util.import_dotted_name(packager_class)
    if not hasattr(module, "PackageTrack"):
        if not hasattr(module, "RevisionPackager"):
            binary_packager = getattr(module, "BinaryPackager", BinaryPackager)
            module.RevisionPackager \
                = new.classobj("RevisionPackager", (RevisionPackager,),
                               dict(source_packager_cls=module.SourcePackager,
                                    binary_packager_cls=binary_packager))
        module.PackageTrack \
            = new.classobj("PackageTrack", (PackageTrack,),
                           dict(revision_packager_cls=module.RevisionPackager))
    return module

def create_package_track(packager_class, **kw):
    module = import_packager_module(packager_class)
    return module.PackageTrack(**kw)


class CyclicDependencyError(Exception):

    """Exception thrown when a cycle is detected in the track dependencies"""

    def __init__(self, tracks):
        Exception.__init__(self,
                           "Cyclic dependencies between" " tracks (%s)"
                           % ", ".join([track.name for track in tracks]))


class PackagerGroup(object):

    def __init__(self, package_tracks, check_interval, revision=None,
                 instructions_file=None, do_update=True,
                 stop_on_error=False, name="", treepkg_dir=None, 
                 tracks_dir=None):
        self.package_tracks = package_tracks
        self.check_interval = check_interval
        self.revision = revision
        self.do_update = do_update
        self.stop_on_error = stop_on_error
        self.instructions_file = instructions_file
        self.instructions_file_removed = False
        self.name = name
        self.treepkg_dir = treepkg_dir
        self.tracks_dir = tracks_dir
        self.sort_tracks()

    def sort_tracks(self):
        """Sorts tracks for dependency handling"""
        todo = self.package_tracks[:]
        sorted = []
        seen = set()

        # dependencies that can be solved by one of the tracks
        known = set()
        for track in todo:
            known |= track.dependencies_provided()

        while todo:
            todo_again = []
            for track in todo:
                if not track.handle_dependencies:
                    sorted.append(track)
                    continue

                unmet = (track.dependencies_required() & known) - seen
                if unmet:
                    todo_again.append(track)
                else:
                    sorted.append(track)
                    seen |= track.dependencies_provided()
            if todo_again == todo:
                raise CyclicDependencyError(todo)
            todo = todo_again

        self.package_tracks = sorted
        self.needed_binaries = set()
        for track in self.package_tracks:
            self.needed_binaries |= track.dependencies_required()
        self.needed_binaries &= known

        logging.info("sorted track order: %s",
                     " ".join(track.name for track in sorted))
        logging.info("binary packages needed as build dependencies: %s",
                     " ".join(self.needed_binaries))

    def run(self):
        """Runs the packager group indefinitely"""
        logging.info("Starting in periodic check mode."
                     "  Will check every %d seconds", self.check_interval)
        next_check = time.time()
        while 1:
            if self.should_stop():
                logging.info("Received stop instruction.  Stopping.")
                return

            this_check = time.time()
            if this_check >= next_check:
                logging.info("Next check is now")
                if self.check_package_tracks():
                    break
                last_check = this_check
                next_check = this_check + self.check_interval
            else:
                to_sleep = next_check - this_check
                logging.info("Next check at %s",
                             time.strftime("%Y-%m-%d %H:%M:%S",
                                           time.localtime(next_check)))
                time.sleep(to_sleep)

    def check_package_tracks(self):
        logging.info("Checking package tracks")
        self.clear_instruction()
        repeat = True
        while repeat:
            repeat = False
            for track in self.package_tracks:
                try:
                    packager = track.package_if_updated(revision=self.revision,
                                              do_update=self.do_update)
                    if packager:
                        packager.package()
                        repeat = self.install_dependencies(track, packager)
                except:
                    logging.exception("An error occurred while"
                                      " checking packager track %r", track.name)
                    if self.stop_on_error:
                        logging.info("Stopping because of errors.")
                        return True
                if self.should_stop():
                    logging.info("Received stop instruction.  Stopping.")
                    return True
                if repeat:
                    logging.info("Built binaries needed by other tracks."
                                 " Starting over to ensure all dependencies"
                                 " are met")
                    break

        logging.info("Checked all package tracks")


    def install_dependencies(self, track, packager):
        """Add the binaries built by packager to the builder, if necessary.
        It is necessary if any track depends on the packages.  The
        method simply installs all binary files built by the packger
        instead of only those which are immediately required by a track.
        This is done because tracks usually depend directly only on the
        -dev packages which usually require another binary package built
        at the same time.
        """
        if (track.handle_dependencies
            and track.dependencies_provided() & self.needed_binaries):
            # FIXME: this basically assumes that all tracks use the same
            # builder.  This is true for now, but it is possible to
            # configure treepkg with different builders for different
            # tracks and we really should be installing the newly built
            # binaries into the builder of the tracks which depends on
            # them
            binaries = packager.list_binary_files()
            track.builder.add_binaries_to_extra_pkg(binaries)
            return True
        return False


    def get_package_tracks(self):
        return self.package_tracks

    def read_instruction(self):
        if not self.instructions_file:
            return ""
        try:
            f = open(self.instructions_file)
        except (IOError, OSError):
            return ""
        try:
            return f.read().strip()
        finally:
            f.close()
            self.clear_instruction()

    def clear_instruction(self, force=False):
        if self.instructions_file and (not self.instructions_file_removed
                                       or force):
            util.writefile(self.instructions_file, "")
            self.instructions_file_removed = True

    def should_stop(self):
        return self.read_instruction() == "stop"
This site is hosted by Intevation GmbH (Datenschutzerklärung und Impressum | Privacy Policy and Imprint)