view bin/publishdebianpackages.py @ 423:855829a4a2db treepkg-status

check if upload hook is empty
author Bjoern Ricks <bricks@intevation.de>
date Tue, 27 Jul 2010 16:28:22 +0000
parents 7780bde2ec68
children a2ba58ffbfbe
line wrap: on
line source
#! /usr/bin/python
# Copyright (C) 2007 - 2010 by Intevation GmbH
# Authors:
# Bernhard Herzog <bh@intevation.de>
# Bjoern Ricks    <bjoern.ricks@intevation.de>
#
# This program is free software under the GPL (>=v2)
# Read the file COPYING coming with the software for details.

"""Publishes selected packages created by treepkg"""

import os
import os.path
import re
import sys
import shlex

from optparse import OptionParser
from ConfigParser import SafeConfigParser

import treepkgcmd
from treepkg.readconfig import read_config_section, convert_bool
from treepkg.run import call, capture_output
from treepkg.cmdexpand import cmdexpand
from treepkg.publish import *
from treepkg.util import md5sum
from treepkg.info.status import TreepkgInfo
from treepkg.info.data import Package
from treepkg.info.data import CacheDb

EMPTY = re.compile(r'\s*')

config_desc = ["distribution", "section", "num_newest",
               "build_user", "build_host", "build_listpackages",
               "publish_user", "publish_host", 
               ("architectures", shlex.split, "armel i386 source"),
               ("after_upload_hook", shlex.split),
               ("publish_remove_old_packages", convert_bool),
               ("publish_dir", remove_trailing_slashes),
               ("cachedb",
                lambda s: expand_filename(remove_trailing_slashes(s))),
               ("cachedir",
                lambda s: expand_filename(remove_trailing_slashes(s)))]


def read_config(filename):
    if not os.path.exists(filename):
        print >>sys.stderr, "Config file %s does not exist" % filename
        sys.exit(1)
    parser = SafeConfigParser()
    parser.read([filename])
    return read_config_section(parser, "publishpackages", config_desc)

def parse_commandline():
    parser = OptionParser()
    parser.set_defaults(config_file=os.path.join(treepkgcmd.topdir,
                                                 "publishpackages.cfg"),
                        quiet=False)
    parser.add_option("--config-file",
                      help=("The configuration file."
                            " Default is publishpackages.cfg"))
    parser.add_option("--dist",
                      help=("The debian distribution name to use on"
                            " the publishing system"))
    parser.add_option("--section",
                      help=("The debian distribution section name to use on"
                            " the publishing system"))
    parser.add_option("--track",
                      help=("The package track whose files are to be"
                            " published. If not given, files of all tracks"
                            " will be published"))
    parser.add_option("--quiet", action="store_true",
                      help=("Do not print progress meters or other"
                            " informational output"))
    return parser.parse_args()

def get_treepkg_info(variables):
    runremote = prefix_for_remote_command(variables["build_user"],
                                          variables["build_host"])
    xml = capture_output(cmdexpand("@runremote $build_listpackages"
                                     " --newest=$num_newest"
                                     " --only-successful",
                                     runremote=runremote,
                                     **variables))
    return TreepkgInfo.fromxml(xml)

def get_binary_arch(arch): 
    if not arch is None and not arch.startswith("binary") and \
            not arch == "source":
            arch = "binary-" + arch
    return arch

def check_package_is_new(packagename, destdir, packagemd5sum):
    destpackage = os.path.join(destdir, packagename)
    if not os.path.isfile(destpackage):
        return True
    destmd5sum = md5sum(destpackage)
    return (destmd5sum != packagemd5sum)

def get_md5sum(packageinfo):
    md5sum = ""
    if packageinfo:
        for checksum in packageinfo.checksums:
            if checksum.type == "md5":
                md5sum = checksum.checksum
                break
    return md5sum

def sort_trackname_arch(a, b):
    if a.trackname < b.trackname: return -1
    if a.trackname > b.trackname: return +1
    return cmp(a.arch, b.arch)

def copy_files_to_destdir(destdir, files, variables, quiet = False):
    scp_flags = []
    if quiet:
        scp_flags.append("-q")

    if not os.path.exists(destdir):
        os.makedirs(destdir)
    if files:
        if variables["build_host"]:
            userhost = "%(build_user)s@%(build_host)s:" % variables
            files = [userhost + filename for filename in files]
        # scp the packages to the cache dir 
        call(cmdexpand("scp -p @scp_flags @files $cachedir/", files=files,
                        scp_flags=scp_flags, cachedir=destdir))

def remove_old_packages(cachedb, newpackages, quiet):
    newfiles = [package.filename for package in newpackages]
    oldpackages = cachedb.get_old_packages(newfiles)
    for package in oldpackages:
        # better check if the file really exists
        if os.path.isfile(package.filename):
            if not quiet:
                print "removing file %s" % package.filename
            os.remove(package.filename)
    cachedb.remove_packages(oldpackages)

def copy_packages_to_destdir(cachedb, dir, packages, variables, quiet = False):
    packages.sort(cmp=sort_trackname_arch)
    package = packages[0]
    trackname = package.trackname
    arch = package.arch
    destdir = os.path.join(dir, arch, trackname)
    files = []
    for package in packages:
        cachedb.add_package(package)
        if package.trackname != trackname or \
           package.arch != arch:
            copy_files_to_destdir(destdir, files, variables, quiet)
            trackname = package.trackname
            arch = package.arch
            destdir = os.path.join(dir, arch, trackname)
            files = []
        # add only to copy files list if the packages differ
        if check_package_is_new(package.name, destdir, package.md5sum):
            files.append(package.sourcepath)
            if not quiet:
                print "copy new file: %s" % package.name
    copy_files_to_destdir(destdir, files, variables, quiet)
           
def copy_to_cachedir(variables, track, revision, quiet = False, architectures=None):
    cachedir = variables["cachedir"]
    cachdebfilename = variables["cachedb"]
    if not quiet:
        print "using cachedb: %s" % cachdebfilename
    cachedb = CacheDb(cachdebfilename)
    newpackages = []
    treepkginfo = get_treepkg_info(variables)
    #allowedarchs = set([]) # contains all wanted architectures (incl. source)
    allarchs = set([]) # contains all present architectures (incl. source)
    binaryallpackages = []
    # change e.g. armel in binary-armel
    if not architectures is None:
        allowedarchs = set([get_binary_arch(a) for a in architectures])
    else:
        allowedarchs = set([])
    for track in treepkginfo.tracks:
        for rev in track.revisions:
            for packageinfo in rev.packages:
                arch = get_binary_arch(packageinfo.arch)
                if packageinfo.type == "binary":
                    # skip other files
                    if packageinfo.arch is None:
                        continue
                    # handle binary-all
                    if arch == "binary-all":
                        # add trackname for subdir name
                        packageinfo.trackname = track.name
                        binaryallpackages.append(packageinfo)
                        continue
                    allarchs.add(arch)
                elif packageinfo.type == "source":
                    arch = packageinfo.type
                # only copy requested archs
                if len(allowedarchs) == 0 or \
                   arch in allowedarchs:
                    filename = os.path.join(cachedir, arch, track.name,
                                            packageinfo.name)
                    newpackage = Package(filename, track.name, packageinfo.name,
                                         packageinfo.path, arch,
                                         get_md5sum(packageinfo))
                    newpackages.append(newpackage)
    # copy binary-all packages
    sourcearch = set(["source"])
    if len(allowedarchs) == 0:
        binallarchs = allarchs - sourcearch 
    elif len(allarchs) == 0:
        binallarchs = allowedarchs - sourcearch
    else:
        binallarchs = (allowedarchs & allarchs) - sourcearch
    for packageinfo in binaryallpackages:
        for arch in binallarchs:
            filename = os.path.join(cachedir, arch, packageinfo.trackname,
                                    packageinfo.name)
            newpackage = Package(filename, packageinfo.trackname, packageinfo.name,
                                 packageinfo.path, arch, get_md5sum(packageinfo))
            newpackages.append(newpackage)
    copy_packages_to_destdir(cachedb, cachedir, newpackages, variables, quiet)
    remove_old_packages(cachedb, newpackages, quiet)

def publish_packages(config_filename, track, revision, dist, section, quiet):
    config = read_config(config_filename)

    if dist is None:
        dist = config["distribution"]
    if section is None:
        section = config["section"]

    architectures = config["architectures"]
    copy_to_cachedir(config, track, revision, quiet, architectures)
    copy_to_publishdir(config, dist, section, None, quiet)

    # update apt archive
    if not EMPTY.match(config["after_upload_hook"]):
        if not quiet:
            print "running after upload hook"
        call(config["after_upload_hook"])

def main():
    options, args = parse_commandline()
    revision = None # for future use cases
    publish_packages(options.config_file, options.track, revision,
                     options.dist, options.section, options.quiet)

if __name__ == "__main__":
    main()
This site is hosted by Intevation GmbH (Datenschutzerklärung und Impressum | Privacy Policy and Imprint)