bricks@495: # Copyright (C) 2007-2010 by Intevation GmbH bh@0: # Authors: bh@0: # Bernhard Herzog bricks@495: # Bjoern Ricks bricks@495: # Andre Heinecke bh@0: # bh@0: # This program is free software under the GPL (>=v2) bh@0: # Read the file COPYING coming with the software for details. bh@0: bh@0: """Classes to automatically build debian packages from subversion checkouts""" bh@0: bh@0: import os bricks@437: import os.path bh@0: import time bh@0: import re bh@0: import logging bh@0: import shutil bh@16: import datetime bh@113: import new aheinecke@464: import sys bh@0: bh@0: import util bh@231: from subversion import SvnRepository, SvnWorkingCopy, ManualWorkingCopy aheinecke@321: from git import GitRepository, GitWorkingCopy bh@0: import run bh@16: import status bh@128: import debian bh@45: from cmdexpand import cmdexpand bh@112: from builder import PBuilder aheinecke@378: from sbuilder import SbdmockBuilder bh@0: bh@135: def _fromparent(attr): bh@135: """Creates a property that delegates its value to self.parent.""" bh@135: def get(self): bh@135: return getattr(self.parent, attr) bh@135: return property(get) bh@135: bh@0: bricks@511: bricks@511: class PackagerError(Exception): bricks@511: bricks@511: """Base class for Packager specific errors raised by TreePKG""" bricks@511: bh@0: class SourcePackager(object): bh@0: bh@299: pkg_basename = property(lambda self: self.track.pkg_basename) bh@300: changemsg_template = property(lambda self: bh@300: self.track.changelog_msg_template) bh@135: track = _fromparent("track") bh@135: revision = _fromparent("revision") bh@281: pkg_revision = _fromparent("pkg_revision") bh@135: status = _fromparent("status") bh@136: log_dir = _fromparent("log_dir") bh@135: work_dir = _fromparent("work_dir") bh@135: src_dir = _fromparent("src_dir") bh@135: bh@135: def __init__(self, parent): bh@135: self.parent = parent bricks@532: self.rules_revision = self.parent.rules_revision bricks@541: # TODO short revision should be determined with scm working copy bricks@532: self.short_revision = self.revision bricks@544: if len(self.short_revision) > 7: bricks@532: self.short_revision = self.short_revision[:7] bricks@532: localtime = time.localtime() bricks@532: self.pkg_date = time.strftime("%Y%m%d", localtime) bricks@532: self.pkg_time = time.strftime("%H%M", localtime) bh@0: bricks@495: def determine_upstream_version(self, directory=None): bricks@494: """ bricks@494: Tries to parse the upstream version from a source directory bricks@494: and returns it as a string. bricks@494: """ bricks@495: bricks@495: if not directory: bricks@495: directory = self.track.checkout_dir bricks@494: # TODO: it should be possible to select which files should be searched bricks@494: # for upstream_version bricks@494: bricks@494: #if os.path.isfile(os.path.join(directory, "CMakeList.txt")): bricks@494: # return util.extract_cmakefile_version(os.path.join(directory, bricks@494: # "CMakeList.txt")) bricks@494: if os.path.isfile(os.path.join(directory, "configure.ac")): bricks@494: return util.extract_configureac_version(os.path.join(directory, bricks@494: "configure.ac")) bricks@494: changelog = os.path.join(self.track.debian_dir, "changelog") bricks@494: if os.path.isfile(changelog): bricks@494: debian_version = util.debian_changelog_version( bricks@494: changelog) bricks@494: # upstream version is debian version without epoch and bricks@494: # debian revision bricks@494: if ":" in debian_version: bricks@494: debian_version = debian_version.split(":")[1] bricks@494: if "-" in debian_version: bricks@494: debian_version = debian_version.split("-")[0] bricks@494: upstream_version = debian_version bricks@494: else: bricks@494: upstream_version = "0" bricks@494: bricks@494: return upstream_version bricks@494: bricks@494: def determine_package_version(self, directory, additionals=None): bricks@494: """Returns the resolved version template of the package as a string bh@4: bh@4: The directory parameter is the name of the directory containing bh@4: the newly exported sources. The sources were exported with the bh@4: export_sources method. bh@4: bricks@494: The addionals parameter may contain a dictionary with additional bricks@494: variables used in the version template. bricks@494: bricks@494: Default variables that can be resolved are: bricks@494: revision - The revision of the package aheinecke@503: short_revision - The first seven characters of the revision bricks@494: rules_revision - The revision of the packaging rules bricks@494: pkg_date - The current date in the form: YYYYMMDD aheinecke@502: pkg_time - The current ime in the form: HHMM bricks@494: pkg_revision - The number of times a new package has bricks@494: been created from this track. bricks@494: upstream_version - The version parsed from the sources or bricks@494: package descriptions by bricks@494: determine_upstream_version. Default: "0" bh@4: """ aheinecke@450: revision = self.revision bricks@532: rules_revision = self.rules_revision bricks@532: pkg_revision = self.pkg_revision bricks@532: short_revision = self.short_revision bricks@532: pkg_date = self.pkg_date bricks@532: pkg_time = self.pkg_time bricks@495: upstream_version = self.determine_upstream_version(directory) bricks@494: version_dict = locals().copy() aheinecke@504: if additionals: bricks@494: version_dict.update(additionals) bricks@494: return self.track.version_template % version_dict bh@0: bh@0: def export_sources(self): bh@4: """Export the sources from the subversion working directory bh@4: bh@4: This method first exports the sources to a temporary directory bh@4: and then renames the directory. The new name is of the form bh@4: bh@4: - bh@4: bh@4: Where pkg_basename is the value of self.pkg_basename and version bh@4: is the return value of the determine_package_version() method. bh@4: """ bh@0: temp_dir = os.path.join(self.work_dir, "temp") bh@53: self.track.export_sources(temp_dir) bh@0: bh@0: pkgbaseversion = self.determine_package_version(temp_dir) bh@4: pkgbasedir = os.path.join(self.work_dir, bh@4: self.pkg_basename + "-" + pkgbaseversion) bh@0: bh@0: os.rename(temp_dir, pkgbasedir) bh@0: return pkgbaseversion, pkgbasedir bh@0: bh@0: bh@276: def prepare_sources_for_tarball(self, pkgbasedir, pkgbaseversion): bh@274: """Prepare the exported sources prior to creating the tarball. bh@4: bh@4: The default implementation does nothing. Derived classes should bh@274: override this method if necessary to e.g. update the version bh@274: numbers in the code. bh@4: """ bh@0: bh@207: def create_tarball(self, tarballname, workdir, basedir, compression="gz"): bh@4: """Creates a new tarball. bh@4: bh@4: Parameters: bh@4: bh@4: tarballname -- the filename of the new tarball bh@4: workdir -- The directory into which to change before running tar. bh@4: (actually this is done with GNUI tar's -C option) bh@4: basedir -- The basedirectory of the files that are packaged bh@4: into the tarfile. This should be a relative bh@4: filename directly in workdir. bh@207: compression -- The compression method to use as a string. bh@207: Suported are 'gz' for gzip compression (the bh@207: default) and 'bz2' for bzip2. bh@4: """ bh@0: logging.info("Creating tarball %r", tarballname) bh@207: if compression == "gz": bh@207: compression_flag = "z" bh@207: elif compression == "bz2": bh@207: compression_flag = "j" bh@207: else: bh@207: raise ValueError("Unknown compression method %r" % compression) bh@207: bh@207: run.call(cmdexpand("tar c -$compression_flag -f $tarballname" bh@207: " -C $workdir $basedir", **locals())) bh@0: bh@0: def copy_debian_directory(self, pkgbasedir, pkgbaseversion, changemsg): bh@4: """Copies the debian directory and updates the copy's changelog bh@4: bh@4: Parameter: bh@4: pkgbasedir -- The directory holding the unpacked source package bh@4: pkgbaseversion -- The version to update the changelog to bh@4: changemsg -- The message for the changelog bh@4: bh@4: When determining the actual version for the new package, this bh@4: function looks at the previous version in the changelog. If it bh@4: has a prefix separated from the version number by a colon this bh@4: prefix is prepended to the pkgbaseversion parameter. Debian bh@4: uses such prefixes for the kde packages. bh@4: """ bh@0: debian_dir = os.path.join(pkgbasedir, "debian") bh@0: changelog = os.path.join(debian_dir, "changelog") bh@0: bh@53: self.track.copy_debian_directory(debian_dir) bh@0: bh@0: logging.info("Updating %r", changelog) bh@0: oldversion = util.debian_changelog_version(changelog) bh@0: if ":" in oldversion: bh@0: oldversionprefix = oldversion.split(":")[0] + ":" bh@0: else: bh@0: oldversionprefix = "" bh@281: debrev = self.pkg_revision bh@45: run.call(cmdexpand("debchange -c $changelog" bh@93: " -v ${oldversionprefix}${pkgbaseversion}-${debrev}" bh@45: " $changemsg", **locals()), bh@53: env=self.track.debian_environment()) bh@0: bh@0: bh@0: def create_source_package(self, pkgbasedir, origtargz): bh@4: """Creates a new source package from pkgbasedir and origtargz""" bh@194: util.ensure_directory(self.log_dir) bh@194: dpkg_source_log = os.path.join(self.log_dir, "dpkg_source.txt") bh@194: logging.info("Creating new source package; logging to %s", bh@194: dpkg_source_log) bricks@367: bricks@367: format = self.get_debian_source_format(pkgbasedir) bricks@367: if format == "1.0": bricks@367: run.call(cmdexpand("dpkg-source -b $directory $tarball", bh@45: directory=os.path.basename(pkgbasedir), bh@45: tarball=os.path.basename(origtargz)), bh@0: cwd=os.path.dirname(pkgbasedir), bh@194: logfile=dpkg_source_log, bh@53: env=self.track.debian_environment()) bricks@367: elif format == "3.0 (quilt)": bricks@367: run.call(cmdexpand("dpkg-source -b $directory", bricks@367: directory=os.path.basename(pkgbasedir)), bricks@367: cwd=os.path.dirname(pkgbasedir), bricks@367: logfile=dpkg_source_log, bricks@367: env=self.track.debian_environment()) bricks@367: else: bricks@367: raise RuntimeError("debian source format %s is not supported by treepkg" % format) bricks@367: bricks@367: def get_debian_source_format(self, pkgbasedir): bricks@367: formatfile = os.path.join(pkgbasedir, "debian", "source", "format") bricks@367: if not os.path.exists(formatfile): bricks@367: return "1.0" bricks@367: else: bricks@367: file = open(formatfile, "r") bricks@367: line = file.readline() bricks@367: file.close() bricks@367: return line.strip() bh@0: bh@4: def move_source_package(self, pkgbasename): bh@4: """Moves the new source package from the work_dir to the src_dir""" bh@4: logging.info("Moving source package to %r", self.src_dir) bh@4: util.ensure_directory(self.src_dir) bh@4: for filename in [filename for filename in os.listdir(self.work_dir) bh@4: if filename.startswith(pkgbasename)]: bh@4: os.rename(os.path.join(self.work_dir, filename), bh@4: os.path.join(self.src_dir, filename)) bh@4: bh@179: def sign_package(self): bh@179: """Signs the .dsc file created buy the instance""" bricks@344: src_files = util.listdir_abs(self.src_dir, "*.dsc") bricks@344: if not src_files: bricks@344: raise RuntimeError("Could not find .dsc file in source" bricks@344: " directory %s" % self.src_dir) bricks@344: self.track.sign_file(src_files[0]) bh@179: bh@0: def package(self): bh@4: """Creates a source package from a subversion checkout. bh@4: bh@4: After setting up the working directory, this method calls the bh@4: do_package method to do the actual packaging. Afterwards the bh@4: work directory is removed. bh@4: """ bh@0: util.ensure_directory(self.work_dir) bh@0: try: bh@41: self.status.creating_source_package() bh@4: self.do_package() bh@179: self.sign_package() bh@41: self.status.source_package_created() bh@0: finally: bh@0: logging.info("Removing workdir %r", self.work_dir) bh@0: shutil.rmtree(self.work_dir) bh@0: bh@4: def do_package(self): bh@274: """Does the work of creating a source package.""" bh@274: pkgbaseversion, pkgbasedir = self.export_sources() bh@4: bh@274: pkgbasename = self.pkg_basename + "_" + pkgbaseversion bh@274: origtargz = os.path.join(self.work_dir, bh@274: pkgbasename + ".orig.tar.gz") bh@274: bh@276: self.prepare_sources_for_tarball(pkgbasedir, pkgbaseversion) bh@274: bh@274: self.create_tarball(origtargz, self.work_dir, bh@274: os.path.basename(pkgbasedir)) bh@274: bricks@344: changemsg = self.get_change_msg() bh@274: self.copy_debian_directory(pkgbasedir, pkgbaseversion, bh@274: changemsg) bh@274: bh@274: self.create_source_package(pkgbasedir, origtargz) bh@274: self.move_source_package(pkgbasename) bh@4: bricks@344: def get_change_msg(self): bricks@532: return self.changemsg_template % dict(revision=self.revision, bricks@532: pkg_date=self.pkg_date, pkg_time=self.pkg_time, bricks@532: rules_revision=self.rules_revision) bh@0: bh@0: class BinaryPackager(object): bh@0: bh@135: track = _fromparent("track") bh@135: status = _fromparent("status") bh@136: log_dir = _fromparent("log_dir") bh@135: binary_dir = _fromparent("binary_dir") bh@135: bh@135: def __init__(self, parent, dsc_file, logfile): bh@135: self.parent = parent bh@0: self.dsc_file = dsc_file bh@0: self.logfile = logfile bh@0: bh@0: def package(self): bh@41: self.status.creating_binary_package() bh@0: util.ensure_directory(self.binary_dir) bh@149: util.ensure_directory(self.log_dir) bh@54: logging.info("Building binary package; logging to %r", self.logfile) bh@297: self.track.builder.build(self.dsc_file, self.binary_dir, self.logfile, bh@297: extra_env=self.track.debian_environment()) bh@179: self.sign_package() bh@41: self.status.binary_package_created() bh@0: bh@179: def sign_package(self): bh@370: """Signs the .changes file created by the instance""" bricks@344: dirs = util.listdir_abs(self.binary_dir, "*.changes") bricks@344: if not dirs: bricks@344: raise RuntimeError("Cannot find changes File in %r" % self.binary_dir) bricks@344: self.track.sign_file(dirs[0]) bh@179: bh@0: bh@0: class RevisionPackager(object): bh@0: bh@4: source_packager_cls = SourcePackager bh@4: binary_packager_cls = BinaryPackager bh@4: bh@281: def __init__(self, track, revision, rules_revision, pkg_revision=None, bh@281: tag=""): bh@53: self.track = track bh@0: self.revision = revision bh@229: self.rules_revision = rules_revision bricks@551: self.short_rules_revision = rules_revision bricks@551: bricks@551: # fixme: short_rules_revision should be determined by scm bricks@551: if len(self.short_rules_revision) > 7: bricks@551: self.short_rules_revision = self.short_rules_revision[:7] bh@281: bh@281: if pkg_revision is None: bh@293: pkg_revision = (self.track.pkg_revision_template aheinecke@338: % dict(pkg_revision=1, bricks@551: rules_revision=rules_revision, bricks@551: short_rules_revision=short_rules_revision)) bh@281: self.pkg_revision = pkg_revision bh@281: bh@229: self.base_dir = self.track.pkg_dir_for_revision(self.revision, bh@229: rules_revision) bh@36: self.status = status.RevisionStatus(os.path.join(self.base_dir, aheinecke@464: "status"), aheinecke@464: self.after_setattr) bh@260: if tag: bh@260: util.ensure_directory(self.base_dir) bh@260: self.status.tags = tag bh@0: bh@172: log_dir = util.filenameproperty("log") bh@172: work_dir = util.filenameproperty("work") bh@172: binary_dir = util.filenameproperty("binary") bh@172: src_dir = util.filenameproperty("src") bh@172: build_log = util.filenameproperty("build_log.txt", dir_attr="log_dir") bh@0: aheinecke@464: def after_setattr(self, status, attr): aheinecke@464: ''' aheinecke@464: Execute a hook set in the source_hook configuration attribute aheinecke@464: every time the status changes. aheinecke@464: ''' aheinecke@464: if not self.track.status_hook: return aheinecke@464: logging.info("Executing status hook: %s" % self.track.status_hook ) aheinecke@464: status_env = { aheinecke@464: "TREEPKG_TRACK" : self.track.name, aheinecke@464: "TREEPKG_BASE_DIR" : self.base_dir, aheinecke@464: "TREEPKG_STATE" : attr, aheinecke@464: "TREEPKG_STATENAME" : status.status.name aheinecke@464: } aheinecke@464: run.call(cmdexpand(self.track.status_hook), extra_env=status_env) aheinecke@464: bh@0: def find_dsc_file(self): bh@0: for filename in os.listdir(self.src_dir): bh@0: if filename.endswith(".dsc"): bh@0: return os.path.join(self.src_dir, filename) bh@0: return None bh@0: bh@18: def has_build_log(self): aheinecke@336: return os.path.exists(self.get_log_file()) aheinecke@336: aheinecke@372: def get_log_title(self, f): aheinecke@372: if not os.path.isfile(f): aheinecke@372: return None aheinecke@372: title = os.path.basename(f) aheinecke@372: title = title.replace("_"," ") aheinecke@372: title = title[:title.find(".")] aheinecke@372: title = title.title() aheinecke@372: return title aheinecke@372: aheinecke@336: def get_log_file(self): aheinecke@336: if os.path.exists(self.build_log + ".gz"): aheinecke@336: return self.build_log + ".gz" aheinecke@336: return self.build_log bh@18: bricks@393: def get_log_files(self, logs=None): aheinecke@372: files = [] bricks@437: if os.path.isdir(self.log_dir): bricks@437: for f in os.listdir(self.log_dir): bricks@437: if logs is None or f in logs: bricks@437: f = os.path.join(self.log_dir,f) bricks@437: if os.path.isfile(f): bricks@437: files.append((self.get_log_title(f),f)) aheinecke@372: return files aheinecke@464: aheinecke@372: def list_log_files(self, logs): bh@140: """Returns a list describing the logfiles available for the revision. bh@140: Each list item is a tuple of the form (TITLE, FILENAME) where aheinecke@372: TITLE is a string with the filename without directory or ending in aheinecke@372: which _ is replaced with a whitespace and words are capitalized. bh@140: FILENAME is the absolute filename of the log file. bh@140: """ aheinecke@372: files = self.get_log_files(logs) aheinecke@372: if not files: aheinecke@372: return [] bh@140: return files bh@140: bh@88: def list_source_files(self): bh@88: """Returns a list with the names of the files of the source package. bh@88: The implementation assumes that all files in self.src_dir belong bh@88: to the source package. bh@88: """ bricks@437: files = [] bricks@437: if os.path.isdir(self.src_dir): bricks@437: files = sorted(util.listdir_abs(self.src_dir)) bricks@437: return files bh@88: bh@88: def list_binary_files(self): bh@88: """Returns a list with the names of the files of the binary packages. bh@88: The implementation assumes that all files in self.binary_dir belong bh@88: to the binary packages. bh@88: """ bricks@437: files = [] bricks@437: if os.path.isdir(self.binary_dir): bricks@437: files = sorted(util.listdir_abs(self.binary_dir)) bricks@437: return files bh@88: bh@0: def package(self): bh@0: try: aheinecke@462: try: aheinecke@462: util.ensure_directory(self.work_dir) aheinecke@462: self.status.start = datetime.datetime.utcnow() aheinecke@462: src_packager = self.source_packager_cls(self) aheinecke@462: src_packager.package() bh@0: aheinecke@462: dsc_file = self.find_dsc_file() aheinecke@462: if dsc_file is None: aheinecke@462: raise RuntimeError("Cannot find dsc File in %r" % self.src_dir) bh@0: aheinecke@462: bin_packager = self.binary_packager_cls(self, dsc_file, self.build_log) aheinecke@462: bin_packager.package() aheinecke@462: finally: aheinecke@462: util.compress_all_logs(self.log_dir) aheinecke@469: self.status.stop = datetime.datetime.utcnow() bh@0: except: bh@41: self.status.error() bh@16: self.status.stop = datetime.datetime.utcnow() bh@99: # set the notification status last to avoid race conditions. bh@99: # The pending notification is for now the only situation bh@99: # where another process might modify the status file (the bh@99: # listpendingnotifications program will set it to bh@99: # "notification_sent") bh@99: self.status.notification_pending() bh@0: raise bh@0: bh@0: def remove_package_dir(self): bh@0: logging.info("Removing pkgdir %r", self.base_dir) bh@0: shutil.rmtree(self.base_dir) bh@0: bh@0: bh@52: class PackageTrack(object): bh@0: bh@4: revision_packager_cls = RevisionPackager bh@4: bh@4: svn_external_subdirs = [] bh@4: bh@4: extra_config_desc = [] bh@4: bricks@344: def __init__(self, name, base_dir, root_cmd, builderconfig, deb_email, bricks@511: deb_fullname, url="", packager_class="treepkg.packager", aheinecke@378: version_template="%(revision)s", builder_cls="PBuilder", aheinecke@340: pkg_revision_template="treepkg%(pkg_revision)s", bh@293: handle_dependencies=False, signing_key_id="", do_build=True, bricks@511: rules_url=None, deb_build_options="", pkg_basename="", bh@304: changelog_msg_template="Update to r%(revision)s", bricks@511: svn_subset=(), svn_externals=(), branch="", bricks@511: scm_type="svn", rules_scm_type="svn", bricks@511: os="", status_hook="", svn_url=None): bh@0: self.name = name aheinecke@378: aheinecke@378: # Convert the builder_cls option to a class aheinecke@378: if builder_cls.upper() == "SBDMOCKBUILDER" or \ aheinecke@378: builder_cls.upper() == "SBDMOCK": aheinecke@378: builder_class = SbdmockBuilder aheinecke@378: elif builder_cls.upper() == "PBUILDER": aheinecke@378: builder_class = PBuilder aheinecke@378: else: aheinecke@378: # If the builder option is explicitly set with an unknown builder aheinecke@378: # a warning is printed. aheinecke@378: logging.warning("Track: %s Builder option %s could not be parsed \ aheinecke@378: defaulting to pbuilder" % (name, builder_cls)) aheinecke@378: builder_class = PBuilder bh@299: if not pkg_basename: bh@299: pkg_basename = name bh@299: self.pkg_basename = pkg_basename bh@300: self.changelog_msg_template = changelog_msg_template bh@0: self.base_dir = base_dir aheinecke@378: self.builder = builder_class(builderconfig, root_cmd, bh@176: release_signing_keyid=signing_key_id) bh@0: self.deb_email = deb_email bh@0: self.deb_fullname = deb_fullname bh@297: self.deb_build_options = deb_build_options bh@305: self.version_template = version_template bh@293: self.pkg_revision_template = pkg_revision_template bh@167: self.signing_key_id = signing_key_id bh@191: self.do_build = do_build bh@128: self.handle_dependencies = handle_dependencies bh@128: self.dependencies = None bricks@401: self.os = os aheinecke@328: self.pkg_dir_template = "%(revision)s-%(rules_revision)s" aheinecke@328: self.pkg_dir_regex = re.compile(r"(?P[0-9a-f]+)" aheinecke@328: r"-(?P[0-9a-f]+)$") aheinecke@464: self.status_hook = status_hook bricks@515: self.scm_type = scm_type bricks@515: self.rules_scm_type = rules_scm_type bricks@511: bricks@511: if svn_url: bricks@511: url = svn_url bricks@511: scm_type = "svn" bricks@511: logging.warning("Track: %s options contain svn_url which is " \ bricks@511: "deprecated. Please use url together with scm_type " \ bricks@511: "svn instead." % name) bricks@511: bricks@511: # use local debian dir if rules url is not set bricks@511: if not rules_url: bricks@511: rules_scm_type = "local" bricks@511: bh@306: externals = svn_externals bh@306: if not externals: bh@306: externals = self.svn_external_subdirs bricks@511: if scm_type == "svn": bricks@511: repo = SvnRepository(url, externals, subset=svn_subset) aheinecke@321: self.working_copy = SvnWorkingCopy(repo, self.checkout_dir, aheinecke@321: logger=logging) bricks@511: elif scm_type == "git": bricks@511: repo = GitRepository(url, branch=branch) aheinecke@321: self.working_copy = GitWorkingCopy(repo, self.checkout_dir, aheinecke@321: logger=logging) bricks@511: else: bricks@511: raise PackagerError("Unknown scm type \"%s\" for sources" % bricks@511: scm_type) bricks@511: bricks@511: if rules_scm_type == "svn": bricks@511: repo = SvnRepository(rules_url) bh@234: self.rules_working_copy = SvnWorkingCopy(repo, self.debian_dir, bh@234: logger=logging) bricks@511: elif rules_scm_type == "git": bricks@511: repo = GitRepository(rules_url) bricks@510: self.rules_working_copy = GitWorkingCopy(repo, self.debian_dir, bricks@548: logger=logging) bricks@511: elif rules_scm_type == "local": bricks@511: self.rules_working_copy = ManualWorkingCopy(self.debian_dir) bricks@511: bh@234: else: bricks@511: raise PackagerError("Unknown scm type \"%s\" for rules" % bricks@511: scm_type) bh@0: bh@172: checkout_dir = util.filenameproperty("checkout") bh@172: debian_dir = util.filenameproperty("debian") bh@172: pkg_dir = util.filenameproperty("pkg") bh@0: bh@106: def init_treepkg(self): bh@106: print "Initializing", self.name bh@106: if not os.path.exists(self.base_dir): bh@106: print "creating %s" % (self.base_dir,) bh@106: util.ensure_directory(self.base_dir) bh@234: # TODO: handle case where debian directory is in version control bh@106: if not os.path.exists(self.debian_dir): bh@106: print ("TODO: the debian directory %s still has to be created" bh@106: % (self.debian_dir,)) bh@106: bh@128: def determine_dependencies(self): bh@128: if self.dependencies is not None: bh@128: return bh@128: bh@128: requires = () bh@128: provides = () bh@131: # only try to parse the control file if the debian directory bh@131: # exists. If the debian directory doesn't exist yet, the tree bh@131: # packager is likely still being configured and this code may be bh@131: # run indirectly from e. g. bin/inittreepkg.py in which case the bh@131: # init_treepkg method will report the missing debian bh@131: if self.handle_dependencies and os.path.exists(self.debian_dir): bh@128: control = debian.DebianControlFile(os.path.join(self.debian_dir, bh@128: "control")) bh@128: requires = control.build_depends bh@128: provides = (pkg[0] for pkg in control.packages) bh@128: self.dependencies = (set(requires), set(provides)) bh@128: logging.debug("Track %s: build depends: %s", self.name, bh@128: " ".join(self.dependencies[0])) bh@128: logging.debug("Track %s: provides: %s", self.name, bh@128: " ".join(self.dependencies[1])) bh@128: bh@128: def dependencies_required(self): bh@128: """Returns a list of required packages""" bh@128: self.determine_dependencies() bh@128: return self.dependencies[0] bh@128: bh@128: def dependencies_provided(self): bh@128: """Returns a list of provided packages""" bh@128: self.determine_dependencies() bh@128: return self.dependencies[1] bh@128: bh@229: def pkg_dir_for_revision(self, revision, rules_revision): bh@0: return os.path.join(self.pkg_dir, bh@0: self.pkg_dir_template % locals()) bh@0: bh@0: def last_changed_revision(self): bh@224: return self.working_copy.last_changed_revision() bh@0: bh@11: def get_revision_numbers(self): aheinecke@328: """Returns a list of the packaged revisions""" bh@11: revisions = [] bh@11: if os.path.exists(self.pkg_dir): bh@11: for filename in os.listdir(self.pkg_dir): bh@11: match = self.pkg_dir_regex.match(filename) bh@11: if match: aheinecke@328: revisions.append((match.group("revision"), aheinecke@328: match.group("rules_revision"))) aheinecke@328: return sorted(revisions) bh@11: bh@80: def update_checkout(self, revision=None): bh@224: """Updates the working copy. bh@0: bh@224: If the checkout_dir doesn't exist yet, a new checkout is made bh@224: into that directory. The value of the revision parameter is bh@224: passed through to the update method. bh@0: """ bh@224: self.working_copy.update_or_checkout(revision=revision) bh@0: bh@0: def export_sources(self, to_dir): bh@0: logging.info("Exporting sources for tarball to %r", to_dir) bh@224: self.working_copy.export(to_dir) bh@0: bh@0: def copy_debian_directory(self, to_dir): bh@0: logging.info("Copying debian directory to %r", to_dir) bh@231: self.rules_working_copy.export(to_dir) bh@0: bh@0: def debian_environment(self): bh@0: """Returns the environment variables for the debian commands""" bh@0: env = os.environ.copy() bh@0: env["DEBFULLNAME"] = self.deb_fullname bh@0: env["DEBEMAIL"] = self.deb_email bh@297: env["DEB_BUILD_OPTIONS"] = self.deb_build_options bh@297: # cdbs requires DEB_BUILD_PARALLEL set to something non-empty, bh@297: # otherwise it will ignore any parallel= setting in bh@297: # DEB_BUILD_OPTIONS. bh@297: env["DEB_BUILD_PARALLEL"] = "true" bh@0: return env bh@0: bricks@499: def new_revsision_packager(self): bricks@499: """ Checks if a new revision is available and returns a new bricks@499: revision packager class. Don't override this method in a subclass. bricks@499: Use packager_for_new_revision() instead.""" bh@261: current_revision = (self.last_changed_revision(), bh@261: self.rules_working_copy.last_changed_revision()) bh@261: logging.info("New revision is %s", current_revision) bh@261: if current_revision not in self.get_revision_numbers(): bh@261: logging.info("Revision %s has not been packaged yet", bh@261: current_revision) bh@261: return self.revision_packager_cls(self, *current_revision) bh@261: else: bh@261: logging.info("Revision %s has already been packaged.", bh@261: current_revision) bh@261: bricks@499: def packager_for_new_revision(self): bricks@499: return self.new_revsision_packager() bricks@499: bricks@517: def package_if_updated(self, revision=None, do_update=True): bh@190: """Returns a new packager if the working copy has not been packaged yet. bricks@517: If do_update is true -- the default -- update the working bh@370: copy to the revision specified with the revision parameter bh@190: or if revision is None, the latest revision in the repository.""" bh@191: if not self.do_build: bh@191: return None bricks@517: if do_update: bh@190: self.update_checkout(revision=revision) bh@232: # TODO: what should happen with the debian checkout, if a bh@232: # revision for the source checkoute was given? bh@232: self.rules_working_copy.update_or_checkout() bh@261: return self.packager_for_new_revision() bh@0: bh@14: def get_revisions(self): bh@14: """Returns RevisionPackager objects for each packaged revision""" bh@229: return [self.revision_packager_cls(self, revision, rules_revision) bh@229: for revision, rules_revision in self.get_revision_numbers()] bh@14: bh@179: def sign_file(self, filename): bh@179: """Signs a file using the debian.sign_file function. bh@179: The file is signed with the key indicated by the track's bh@179: signing_key_id attribute. If that is empty, the file is not bh@179: signed. bh@179: """ bh@179: if self.signing_key_id: bh@179: logging.info("Signing %r with key %r", filename, bh@179: self.signing_key_id) bh@179: debian.sign_file(filename, self.signing_key_id) bh@179: bh@0: bh@113: def import_packager_module(packager_class): bh@113: """Import the packager module named by packager_class. bh@113: bh@113: The packager_class must be the full absolute module name for the bh@125: packager. The function tries to find or create a suitable bh@125: PackageTrack class from this module using the following rules: bh@125: bh@125: - If the module contains a class called PackageTrack, use that. bh@125: bh@125: - Otherwise create one using the module's RevisionPackager class, bh@125: creating RevisionPackager if necessary. bh@125: bh@125: - If RevisionPackager needs to be created, it uses the module's bh@125: SourcePackager as source_packager_cls and if present also the bh@125: module's BinaryPackager as binary_packager_cls. If the module bh@125: does not have a BinaryPackager, the default BinaryPackager is bh@125: used. bh@113: """ bh@113: module = util.import_dotted_name(packager_class) bh@113: if not hasattr(module, "PackageTrack"): bh@125: if not hasattr(module, "RevisionPackager"): bh@125: binary_packager = getattr(module, "BinaryPackager", BinaryPackager) bh@125: module.RevisionPackager \ bh@125: = new.classobj("RevisionPackager", (RevisionPackager,), bh@125: dict(source_packager_cls=module.SourcePackager, bh@125: binary_packager_cls=binary_packager)) bh@113: module.PackageTrack \ bh@113: = new.classobj("PackageTrack", (PackageTrack,), bh@113: dict(revision_packager_cls=module.RevisionPackager)) bh@113: return module bh@113: bh@52: def create_package_track(packager_class, **kw): bh@113: module = import_packager_module(packager_class) bh@52: return module.PackageTrack(**kw) bh@4: bh@0: bh@128: class CyclicDependencyError(Exception): bh@128: bh@128: """Exception thrown when a cycle is detected in the track dependencies""" bh@128: bh@128: def __init__(self, tracks): bh@128: Exception.__init__(self, bh@128: "Cyclic dependencies between" " tracks (%s)" bh@128: % ", ".join([track.name for track in tracks])) bh@128: bh@128: bh@7: class PackagerGroup(object): bh@0: bh@91: def __init__(self, package_tracks, check_interval, revision=None, bricks@517: instructions_file=None, do_update=True, bricks@389: stop_on_error=False, name="", treepkg_dir=None, bricks@389: tracks_dir=None): bh@52: self.package_tracks = package_tracks bh@0: self.check_interval = check_interval bh@80: self.revision = revision bricks@517: self.do_update = do_update bh@307: self.stop_on_error = stop_on_error bh@91: self.instructions_file = instructions_file bh@91: self.instructions_file_removed = False bricks@387: self.name = name bricks@389: self.treepkg_dir = treepkg_dir bricks@389: self.tracks_dir = tracks_dir bh@128: self.sort_tracks() bh@128: bh@128: def sort_tracks(self): bh@128: """Sorts tracks for dependency handling""" bh@128: todo = self.package_tracks[:] bh@128: sorted = [] bh@128: seen = set() bh@128: bh@128: # dependencies that can be solved by one of the tracks bh@128: known = set() bh@128: for track in todo: bh@128: known |= track.dependencies_provided() bh@128: bh@128: while todo: bh@128: todo_again = [] bh@128: for track in todo: bh@128: if not track.handle_dependencies: bh@128: sorted.append(track) bh@128: continue bh@128: bh@128: unmet = (track.dependencies_required() & known) - seen bh@128: if unmet: bh@128: todo_again.append(track) bh@128: else: bh@128: sorted.append(track) bh@128: seen |= track.dependencies_provided() bh@128: if todo_again == todo: bh@128: raise CyclicDependencyError(todo) bh@128: todo = todo_again bh@128: bh@128: self.package_tracks = sorted bh@128: self.needed_binaries = set() bh@128: for track in self.package_tracks: bh@128: self.needed_binaries |= track.dependencies_required() bh@128: self.needed_binaries &= known bh@128: bh@128: logging.info("sorted track order: %s", bh@128: " ".join(track.name for track in sorted)) bh@128: logging.info("binary packages needed as build dependencies: %s", bh@128: " ".join(self.needed_binaries)) bh@128: bh@0: def run(self): bh@7: """Runs the packager group indefinitely""" thomas@78: logging.info("Starting in periodic check mode." thomas@78: " Will check every %d seconds", self.check_interval) bh@312: next_check = time.time() bh@0: while 1: bh@312: if self.should_stop(): bh@312: logging.info("Received stop instruction. Stopping.") bh@312: return bh@312: bh@312: this_check = time.time() bh@312: if this_check >= next_check: bh@312: logging.info("Next check is now") bh@91: if self.check_package_tracks(): bh@91: break bh@312: last_check = this_check bh@312: next_check = this_check + self.check_interval bh@312: else: bh@312: to_sleep = next_check - this_check bh@312: logging.info("Next check at %s", bh@312: time.strftime("%Y-%m-%d %H:%M:%S", bh@312: time.localtime(next_check))) bh@312: time.sleep(to_sleep) bh@0: bh@52: def check_package_tracks(self): bh@52: logging.info("Checking package tracks") bh@91: self.clear_instruction() bh@128: repeat = True bh@128: while repeat: bh@128: repeat = False bh@128: for track in self.package_tracks: bh@128: try: bh@190: packager = track.package_if_updated(revision=self.revision, bricks@517: do_update=self.do_update) bh@128: if packager: bh@128: packager.package() bh@128: repeat = self.install_dependencies(track, packager) bh@128: except: bh@128: logging.exception("An error occurred while" bh@128: " checking packager track %r", track.name) bh@307: if self.stop_on_error: bh@310: logging.info("Stopping because of errors.") bh@310: return True bh@308: if self.should_stop(): bh@308: logging.info("Received stop instruction. Stopping.") bh@308: return True bh@128: if repeat: bh@128: logging.info("Built binaries needed by other tracks." bh@128: " Starting over to ensure all dependencies" bh@128: " are met") bh@128: break bh@128: bh@52: logging.info("Checked all package tracks") bh@14: bh@128: bh@128: def install_dependencies(self, track, packager): bh@128: """Add the binaries built by packager to the builder, if necessary. bh@128: It is necessary if any track depends on the packages. The bh@128: method simply installs all binary files built by the packger bh@128: instead of only those which are immediately required by a track. bh@128: This is done because tracks usually depend directly only on the bh@128: -dev packages which usually require another binary package built bh@128: at the same time. bh@128: """ bh@128: if (track.handle_dependencies bh@128: and track.dependencies_provided() & self.needed_binaries): bh@128: # FIXME: this basically assumes that all tracks use the same bh@128: # builder. This is true for now, but it is possible to bh@128: # configure treepkg with different builders for different bh@128: # tracks and we really should be installing the newly built bh@128: # binaries into the builder of the tracks which depends on bh@128: # them bh@128: binaries = packager.list_binary_files() bh@128: track.builder.add_binaries_to_extra_pkg(binaries) bh@128: return True bh@128: return False bh@128: bh@128: bh@52: def get_package_tracks(self): bh@52: return self.package_tracks bh@91: bh@91: def read_instruction(self): bh@91: if not self.instructions_file: bh@91: return "" bh@91: try: bh@91: f = open(self.instructions_file) bh@91: except (IOError, OSError): bh@91: return "" bh@91: try: bh@91: return f.read().strip() bh@91: finally: bh@91: f.close() bh@91: self.clear_instruction() bh@91: bh@91: def clear_instruction(self, force=False): bh@91: if self.instructions_file and (not self.instructions_file_removed bh@91: or force): bh@91: util.writefile(self.instructions_file, "") bh@91: self.instructions_file_removed = True bh@91: bh@91: def should_stop(self): bh@91: return self.read_instruction() == "stop"