Initial commit.

Importing tito code from Spacewalk as is.
This commit is contained in:
Devan Goodwin 2009-07-18 15:17:14 -03:00
commit e87345d7b7
10 changed files with 2507 additions and 0 deletions

92
bin/bump-version.pl Normal file
View file

@ -0,0 +1,92 @@
#!/usr/bin/perl
use strict;
use warnings FATAL => 'all';
my $command = shift @ARGV;
if (not defined $command
or ($command ne 'bump-version' and $command ne 'bump-release')) {
usage();
}
my $specfile = 0;
if (@ARGV and $ARGV[0] eq '--specfile') {
$specfile = 1;
shift @ARGV;
}
if (not @ARGV) {
usage();
}
sub usage {
die "usage: $0 { bump-version | bump-release } [--specfile] file [ files ... ]\n";
}
my $newfile;
my @content;
while (<ARGV>) {
if ($specfile) {
if ($command eq 'bump-version') {
s/^(version:\s*)(.+)/ $1 . bump_version($2) /ei;
s/^(release:\s*)(.+)/ $1 . reset_release($2) /ei;
} else {
s/^(release:\s*)(.+)/ $1 . bump_version($2) /ei;
}
push @content, $_;
} else {
chomp;
my ($version, $release, $rest) = split /\s/, $_, 3;
if ($command eq 'bump-version') {
$version = bump_version($version);
$release = reset_release($release);
} else {
$release = bump_version($release);
}
if (defined $rest) {
$release .= ' ' . $rest;
}
push @content, "$version $release\n";
# slurp the rest of the file
while (not eof(ARGV)) {
push @content, scalar <ARGV>;
}
}
} continue {
if (eof(ARGV)) {
local *OUT;
undef $newfile;
if ($ARGV eq '-') {
*OUT = \*STDOUT;
} else {
$newfile = $ARGV . ".$$";
open OUT, "> $newfile" or die "Error writing [$newfile]: $!\n";
}
print OUT @content;
if (defined $newfile) {
close OUT;
rename $newfile, $ARGV;
}
}
}
sub bump_version {
local $_ = shift;
no warnings 'uninitialized';
s/^(.+\.)?([0-9]+)(\.|%|$)/$1 . ($2 + 1) . $3/e;
$_;
}
sub reset_release {
local $_ = shift;
s/(^|\.)([.0-9]+)(\.|%|$)/${1}1$3/;
$_;
}
__END__ {
if (defined $newfile and -f $newfile) {
unlink $newfile;
}
}
1;

114
bin/tar-fixup-stamp-comment.pl Executable file
View file

@ -0,0 +1,114 @@
#!/usr/bin/perl
use strict;
use warnings FATAL => 'all';
use IO::Handle ();
use constant RECORD_SIZE => 512;
use constant GIT_BLOCK_SIZE => RECORD_SIZE * 20;
my $stamp = shift;
if (not defined $stamp) {
die "Please specify stamp to put into the tar as the first parameter.\n";
}
my $stamp_octal = sprintf "%011o", $stamp;
my $comment = shift;
if (defined $comment) {
if (not $comment =~ /^[0-9a-f]{40}$/) {
die "The comment we will put into the tar should be SHA1 in hex (40 characters).\n";
}
}
my $chunk;
my $handle = \*STDIN;
my $read;
my $need_header = 1;
my $total_len = 0;
while ($read = $handle->sysread($chunk, RECORD_SIZE)) {
# print STDERR "read [$read]\n";
if ($read < RECORD_SIZE) {
my $rest = RECORD_SIZE - $read;
while (my $read = $handle->sysread($chunk, $rest, length($chunk))) {
# print STDERR " plus [$read]\n";
$rest -= $read;
}
}
if ($chunk eq "\0" x 512) {
# look for the second record full of zeroes
my $pad;
my $read = $handle->sysread($pad, RECORD_SIZE);
if ($read) {
if ($read < RECORD_SIZE) {
my $rest = RECORD_SIZE - $read;
while (my $read = $handle->sysread($pad, $rest, length($pad))) {
$rest -= $read;
}
}
}
if ($pad ne "\0" x 512) {
die "Failed to find second stop record.\n";
}
print $chunk;
print $pad;
$total_len += length($chunk) + length($pad);
print "\0" x (padded_record_size($total_len, GIT_BLOCK_SIZE) - $total_len);
exit;
}
my ($name, $data1, $size, $mtime, $checksum, $link, $name2, $data2) = unpack 'A100 A24 A12 A12 A8 A1 A100 a*', $chunk;
my $block_size = $size ? padded_record_size( oct $size ) : $size;
# print STDERR "[$name] [$size] [$mtime] [$checksum] [$link] [$name2] [$block_size]\n";
if ($need_header and $link ne 'g' and defined $comment) {
my $header = pack 'a100 a8 a8 a8 a12 a12 A8 a1 a100 a6 a2 a32 a32 a8 a8 a155 x12',
'pax_global_header', (sprintf "%07o", 0666), '0000000', '0000000',
'00000000064', $stamp_octal, '', 'g', '',
'ustar', '00', 'root', 'root', '0000000', '0000000', '';
substr($header, 148, 8) = sprintf("%07o\0", unpack("%16C*", $header));
print $header;
print pack "a512", "52 comment=$comment\n";
$need_header = 0;
$total_len += 2 * 512;
}
my $out = $chunk;
my $write_comment = 0;
if ($mtime) {
substr($out, 136, 12) = pack "a12", $stamp_octal;
substr($out, 148, 8) = pack "A8", "";
substr($out, 148, 8) = sprintf("%07o\0", unpack("%16C*", $out));
if ($link eq 'g' and oct $size == 52) {
$write_comment = 1;
}
}
print $out;
$total_len += length $out;
my $payload;
while (my $read = $handle->sysread( $payload, $block_size )) {
if (defined $comment and $write_comment) {
if ($read < 52) {
die "Would like to put SHA1 into header but did not read at least 52 bytes.\n";
}
if (not $payload =~ /^52 comment=/) {
die "The header payload is not [52 comment=].\n";
}
substr($payload, 0, 52) = "52 comment=$comment\n";
}
# print STDERR " payload [@{[ length $payload ]}]\n";
print $payload;
$total_len += length $payload;
$block_size -= $read;
last unless $block_size;
}
}
sub padded_record_size {
my $len = shift;
my $pad_size = shift || RECORD_SIZE;
my $out = int($len / $pad_size);
$out++ if $len % $pad_size;
return $out * $pad_size;
}

View file

@ -0,0 +1,52 @@
#!/usr/bin/perl
use strict;
use warnings FATAL => 'all';
my ($IN, $SHA1, $DIR, $TAR_GZ) = @ARGV;
open IN, $IN or die "Error reading [$IN]\n";
my @lines = <IN>;
close IN;
my ($have_release, $have_source, $have_setup) = (0, 0, 0);
my $i = 0;
for (@lines) {
no warnings 'uninitialized';
if (s/^(Release:\s*)(.+?)(%{\?dist})?\s*\n$/$1$2.git.$SHA1$3\n/i) {
if ($have_release) {
die "Duplicate Release line found in [$IN] at line [$i]\n";
}
$have_release++;
}
if (defined $TAR_GZ and s/^(Source0?:\s*)(.+?)\n$/$1$TAR_GZ\n/i) {
if ($have_source) {
die "Duplicate Source (or Source0) line found in [$IN] at line [$i]\n";
}
$have_source++;
}
if (defined $DIR and /^%setup/) {
if (not s/\s+-n\s+\S+(\s*)/ -n $DIR$1/) {
s/\n/ -n $DIR\n/;
}
$have_setup++;
}
$i++;
}
if (not $have_release) {
die "The specfile [$IN] does not seem to have Release: line we could use\n";
}
if (defined $TAR_GZ and not $have_source) {
die "The specfile [$IN] does not seem to have Source: line we could use\n";
}
if (defined $DIR and not $have_setup) {
die "The specfile [$IN] does not seem to have %setup line we could use\n";
}
my $OUT = "$IN.$SHA1";
open OUT, "> $OUT" or die "Error writing [$OUT]\n";
print OUT @lines;
close OUT;
rename $OUT, $IN;

26
bin/tito Executable file
View file

@ -0,0 +1,26 @@
#!/usr/bin/python
#
# Copyright (c) 2008,2009 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import sys
import os.path
# Python libraries are one level up from where this script lives in rel-eng:
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), "../lib/"))
from spacewalk.releng.cli import CLI
if __name__ == "__main__":
CLI().main()

View file

View file

@ -0,0 +1,19 @@
#
# Copyright (c) 2008 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
"""
Infrastructure for building Spacewalk and Satellite packages from git tags.
"""

View file

@ -0,0 +1,970 @@
#
# Copyright (c) 2009 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
""" Code for building Spacewalk/Satellite tarballs, srpms, and rpms. """
import os
import re
import sys
import string
import commands
from spacewalk.releng.common import *
DEFAULT_KOJI_OPTS = "build --nowait"
DEFAULT_CVS_BUILD_DIR = "cvswork"
# List of CVS files to protect when syncing git with a CVS module:
CVS_PROTECT_FILES = ('branch', 'CVS', '.cvsignore', 'Makefile', 'sources')
class Builder(object):
"""
Parent builder class.
Includes functionality for a standard Spacewalk package build. Packages
which require other unusual behavior can subclass this to inject the
desired behavior.
"""
def __init__(self, name=None, version=None, tag=None, build_dir=None,
pkg_config=None, global_config=None, user_config=None, dist=None,
test=False, offline=False):
self.git_root = find_git_root()
self.rel_eng_dir = os.path.join(self.git_root, "rel-eng")
self.project_name = name
self.build_tag = tag
self.build_version = version
self.dist = dist
self.test = test
self.global_config = global_config
self.user_config = user_config
self.offline=offline
self.no_cleanup = False
self.rpmbuild_basedir = build_dir
self.display_version = self._get_display_version()
self.git_commit_id = get_build_commit(tag=self.build_tag,
test=self.test)
self.project_name_and_sha1 = "%s-%s" % (self.project_name,
self.git_commit_id)
self.relative_project_dir = get_relative_project_dir(
project_name=self.project_name, commit=self.git_commit_id)
tgz_base = self._get_tgz_name_and_ver()
self.tgz_filename = tgz_base + ".tar.gz"
self.tgz_dir = tgz_base
temp_dir = "rpmbuild-%s" % self.project_name_and_sha1
self.rpmbuild_dir = os.path.join(self.rpmbuild_basedir, temp_dir)
if os.path.exists(self.rpmbuild_dir):
print("WARNING: rpmbuild directory already exists, removing...")
run_command("rm -rf self.rpmbuild_dir")
self.rpmbuild_sourcedir = os.path.join(self.rpmbuild_dir, "SOURCES")
self.rpmbuild_builddir = os.path.join(self.rpmbuild_dir, "BUILD")
# A copy of the git code from commit we're building:
self.rpmbuild_gitcopy = os.path.join(self.rpmbuild_sourcedir,
self.tgz_dir)
# Set to true if we've already created a tgz:
self.ran_tgz = False
# NOTE: These are defined later when/if we actually dump a copy of the
# project source at the tag we're building. Only then can we search for
# a spec file.
self.spec_file_name = None
self.spec_file = None
# List of full path to all sources for this package.
self.sources = []
# Set to path to srpm once we build one.
self.srpm_location = None
# Configure CVS variables if possible. Will check later that
# they're actually defined if the user requested CVS work be done.
if self.global_config.has_section("cvs"):
if self.global_config.has_option("cvs", "cvsroot"):
self.cvs_root = self.global_config.get("cvs", "cvsroot")
debug("cvs_root = %s" % self.cvs_root)
if self.global_config.has_option("cvs", "branches"):
self.cvs_branches = \
global_config.get("cvs", "branches").split(" ")
# TODO: if it looks like we need custom CVSROOT's for different users,
# allow setting of a property to lookup in ~/.spacewalk-build-rc to
# use instead. (if defined)
self.cvs_workdir = os.path.join(self.rpmbuild_basedir,
DEFAULT_CVS_BUILD_DIR)
debug("cvs_workdir = %s" % self.cvs_workdir)
self.cvs_package_workdir = os.path.join(self.cvs_workdir,
self.project_name)
# When syncing files with CVS, only copy files with these extensions:
self.cvs_copy_extensions = (".spec", ".patch")
def run(self, options):
"""
Perform the actions requested of the builder.
NOTE: this method may do nothing if the user requested no build actions
be performed. (i.e. only release tagging, etc)
"""
print("Building package [%s]" % (self.build_tag))
self.no_cleanup = options.no_cleanup
if options.tgz:
self.tgz()
if options.srpm:
self._srpm()
if options.rpm:
self._rpm()
if options.release:
self.release()
elif options.cvs_release:
self._cvs_release()
elif options.koji_release:
self._koji_release()
self.cleanup()
def tgz(self):
"""
Create the .tar.gz required to build this package.
Returns full path to the created tarball.
"""
self._setup_sources()
run_command("cp %s/%s %s/" % \
(self.rpmbuild_sourcedir, self.tgz_filename,
self.rpmbuild_basedir))
self.ran_tgz = True
full_path = os.path.join(self.rpmbuild_basedir, self.tgz_filename)
print "Wrote: %s" % full_path
self.sources.append(full_path)
return full_path
# TODO: reuse_cvs_checkout isn't needed here, should be cleaned up:
def _srpm(self, dist=None, reuse_cvs_checkout=False):
"""
Build a source RPM.
"""
self._create_build_dirs()
if not self.ran_tgz:
self.tgz()
if self.test:
self._setup_test_specfile()
debug("Creating srpm from spec file: %s" % self.spec_file)
define_dist = ""
if self.dist:
define_dist = "--define 'dist %s'" % self.dist
elif dist:
define_dist = "--define 'dist %s'" % dist
cmd = "rpmbuild %s %s --nodeps -bs %s" % \
(self._get_rpmbuild_dir_options(), define_dist, self.spec_file)
output = run_command(cmd)
print(output)
self.srpm_location = self._find_wrote_in_rpmbuild_output(output)[0]
def _rpm(self):
""" Build an RPM. """
self._create_build_dirs()
if not self.ran_tgz:
self.tgz()
if self.test:
self._setup_test_specfile()
define_dist = ""
if self.dist:
define_dist = "--define 'dist %s'" % self.dist
cmd = "rpmbuild %s %s --clean -ba %s" % \
(self._get_rpmbuild_dir_options(), define_dist, self.spec_file)
output = run_command(cmd)
print output
files_written = self._find_wrote_in_rpmbuild_output(output)
if len(files_written) < 2:
error_out("Error parsing rpmbuild output")
self.srpm_location = files_written[0]
def release(self):
"""
Release this package via configuration for this git repo and branch.
Check if CVS support is configured in rel-eng/global.build.py.props
and initiate CVS import/tag/build if so.
Check for configured Koji branches also, if found create srpms and
submit to those branches with proper disttag's.
"""
if self._can_build_in_cvs():
self._cvs_release()
if self._can_build_in_koji():
self._koji_release()
def _cvs_release(self):
"""
Sync spec file/patches with CVS, create tags, and submit to brew/koji.
"""
self._verify_cvs_module_not_already_checked_out()
print("Building release in CVS...")
commands.getoutput("mkdir -p %s" % self.cvs_workdir)
debug("cvs_branches = %s" % self.cvs_branches)
self._cvs_checkout_module()
self._cvs_verify_branches_exist()
# Get the list of all sources from the builder:
self.tgz()
self._cvs_sync_files()
# Important step here, ends up populating several important members
# on the builder object so some of the below lines will not work
# if moved above this one.
self._cvs_upload_sources()
self._cvs_user_confirm_commit()
self._cvs_make_tag()
self._cvs_make_build()
def _koji_release(self):
"""
Lookup autobuild Koji tags from global config, create srpms with
appropriate disttags, and submit builds to Koji.
"""
autobuild_tags = self.global_config.get("koji", "autobuild_tags")
print("Building release in Koji...")
debug("Koji tags: %s" % autobuild_tags)
koji_tags = autobuild_tags.strip().split(" ")
koji_opts = DEFAULT_KOJI_OPTS
if self.user_config.has_key('KOJI_OPTIONS'):
koji_opts = self.user_config['KOJI_OPTIONS']
for koji_tag in koji_tags:
# Lookup the disttag configured for this Koji tag:
disttag = self.global_config.get(koji_tag, "disttag")
if self.global_config.has_option(koji_tag, "whitelist"):
# whitelist implies only those packages can be built to the
# tag,regardless if blacklist is also defined.
if self.project_name not in self.global_config.get(koji_tag,
"whitelist").strip().split(" "):
print("WARNING: %s not specified in whitelist for %s" % (
self.project_name, koji_tag))
print(" Package *NOT* submitted to Koji.")
continue
elif self.global_config.has_option(koji_tag, "blacklist"):
if self.project_name in self.global_config.get(koji_tag,
"blacklist").strip().split(" "):
print("WARNING: %s specified in blacklist for %s" % (
self.project_name, koji_tag))
print(" Package *NOT* submitted to Koji.")
continue
# Getting tricky here, normally Builder's are only used to
# create one rpm and then exit. Here we're going to try
# to run multiple srpm builds:
self._srpm(dist=disttag, reuse_cvs_checkout=True)
self._submit_build("koji", koji_opts, koji_tag)
def _setup_sources(self):
"""
Create a copy of the git source for the project at the point in time
our build tag was created.
Created in the temporary rpmbuild SOURCES directory.
"""
self._create_build_dirs()
debug("Creating %s from git tag: %s..." % (self.tgz_filename,
self.git_commit_id))
create_tgz(self.git_root, self.tgz_dir, self.git_commit_id,
self.relative_project_dir, self.rel_eng_dir,
os.path.join(self.rpmbuild_sourcedir, self.tgz_filename))
# Extract the source so we can get at the spec file, etc.
debug("Copying git source to: %s" % self.rpmbuild_gitcopy)
run_command("cd %s/ && tar xzf %s" % (self.rpmbuild_sourcedir,
self.tgz_filename))
# NOTE: The spec file we actually use is the one exported by git
# archive into the temp build directory. This is done so we can
# modify the version/release on the fly when building test rpms
# that use a git SHA1 for their version.
self.spec_file_name = find_spec_file(in_dir=self.rpmbuild_gitcopy)
self.spec_file = os.path.join(self.rpmbuild_gitcopy, self.spec_file_name)
def _verify_cvs_module_not_already_checked_out(self):
""" Exit if CVS module appears to already be checked out. """
# Make sure the cvs checkout directory doesn't already exist:
cvs_co_dir = os.path.join(self.cvs_workdir, self.project_name)
if os.path.exists(cvs_co_dir):
error_out("CVS workdir exists, please remove and try again: %s"
% cvs_co_dir)
def _cvs_checkout_module(self):
print("Checking out cvs module [%s]" % self.project_name)
os.chdir(self.cvs_workdir)
run_command("cvs -d %s co %s" % (self.cvs_root, self.project_name))
def _cvs_verify_branches_exist(self):
""" Check that CVS checkout contains the branches we expect. """
os.chdir(self.cvs_package_workdir)
for branch in self.cvs_branches:
if not os.path.exists(os.path.join(self.cvs_workdir,
self.project_name, branch)):
error_out("%s CVS checkout is missing branch: %s" %
(self.project_name, branch))
def _cvs_upload_sources(self):
"""
Upload any tarballs to the CVS lookaside directory. (if necessary)
Uses the "make new-sources" target in common.
"""
if len(self.sources) == 0:
debug("No sources need to be uploaded.")
return
print("Uploading sources to dist-cvs lookaside:")
for branch in self.cvs_branches:
branch_dir = os.path.join(self.cvs_workdir, self.project_name,
branch)
os.chdir(branch_dir)
cmd = 'make new-sources FILES="%s"' % string.join(self.sources, " ")
debug(cmd)
output = run_command(cmd)
debug(output)
def _cvs_sync_files(self):
"""
Copy files from git into each CVS branch and add them. Extra files
found in CVS will then be deleted.
A list of CVS safe files is used to protect critical files both from
being overwritten by a git file of the same name, as well as being
deleted after.
"""
# Build the list of all files we will copy from git to CVS.
debug("Searching for git files to copy to CVS:")
# Include the spec file explicitly, in the case of SatelliteBuilder
# we modify and then use a spec file copy from a different location.
files_to_copy = [self.spec_file] # full paths
filenames_to_copy = [os.path.basename(self.spec_file)] # just filenames
for filename in os.listdir(self.rpmbuild_gitcopy):
full_filepath = os.path.join(self.rpmbuild_gitcopy, filename)
if os.path.isdir(full_filepath):
# skip it
continue
if filename in CVS_PROTECT_FILES:
debug(" skipping: %s (protected file)" % filename)
continue
elif filename.endswith(".spec"):
# Skip the spec file, we already copy this explicitly as it
# can come from a couple different locations depending on which
# builder is in use.
continue
# Check if file ends with something this builder subclass wants
# to copy:
copy_it = False
for extension in self.cvs_copy_extensions:
if filename.endswith(extension):
copy_it = True
continue
if copy_it:
debug(" copying: %s" % filename)
files_to_copy.append(full_filepath)
filenames_to_copy.append(filename)
for branch in self.cvs_branches:
branch_dir = os.path.join(self.cvs_workdir, self.project_name,
branch)
os.chdir(branch_dir)
print("Syncing files with CVS branch [%s]" % branch)
for copy_me in files_to_copy:
base_filename = os.path.basename(copy_me)
dest_path = os.path.join(branch_dir, base_filename)
# Check if file we're about to copy already exists in CVS so
# we know if we need to run 'cvs add' or not:
cvs_add = True
if os.path.exists(dest_path):
cvs_add = False
cmd = "cp %s %s" % (copy_me, dest_path)
run_command(cmd)
if cvs_add:
print(" added: %s" % base_filename)
commands.getstatusoutput("cvs add %s" % base_filename)
else:
print(" copied: %s" % base_filename)
# Now delete any extraneous files in the CVS branch.
for filename in os.listdir(branch_dir):
if filename not in CVS_PROTECT_FILES and \
filename not in filenames_to_copy:
print(" deleted: %s" % filename)
# Can't delete via full path, must not chdir:
run_command("cvs rm -Rf %s" % filename)
def _cvs_user_confirm_commit(self):
""" Prompt user if they wish to proceed with commit. """
print("")
text = "Running 'cvs diff -u' in: %s" % self.cvs_package_workdir
print("#" * len(text))
print(text)
print("#" * len(text))
print("")
os.chdir(self.cvs_package_workdir)
(status, output) = commands.getstatusoutput("cvs diff -u")
print(output)
print("")
print("##### Please review the above diff #####")
answer = raw_input("Do you wish to proceed with commit? [y/n] ")
if answer.lower() not in ['y', 'yes', 'ok', 'sure']:
print("Fine, you're on your own!")
self.cleanup()
sys.exit(1)
else:
print("Proceeding with commit.")
os.chdir(self.cvs_package_workdir)
cmd = 'cvs commit -m "Update %s to %s"' % \
(self.project_name, self.build_version)
debug("CVS commit command: %s" % cmd)
output = run_command(cmd)
def _cvs_make_tag(self):
""" Create a CVS tag based on what we just committed. """
os.chdir(self.cvs_package_workdir)
print("Creating CVS tags...")
for branch in self.cvs_branches:
branch_dir = os.path.join(self.cvs_workdir, self.project_name,
branch)
os.chdir(branch_dir)
(status, output) = commands.getstatusoutput("make tag")
print(output)
if status > 1:
self.cleanup()
sys.exit(1)
def _cvs_make_build(self):
""" Build srpm and submit to build system. """
os.chdir(self.cvs_package_workdir)
print("Submitting CVS builds...")
for branch in self.cvs_branches:
branch_dir = os.path.join(self.cvs_workdir, self.project_name,
branch)
os.chdir(branch_dir)
output = run_command("BUILD_FLAGS=--nowait make build")
print(output)
def _can_build_in_cvs(self):
"""
Return True if this repo and branch is configured to build in CVS.
"""
if not self.global_config.has_section("cvs"):
debug("Cannot build from CVS, no 'cvs' section found in global.build.py.props")
return False
if not self.global_config.has_option("cvs", "cvsroot"):
debug("Cannot build from CVS, no 'cvsroot' defined in global.build.py.props")
return False
if not self.global_config.has_option("cvs", "branches"):
debug("Cannot build from CVS no branches defined in global.build.py.props")
return False
return True
def _can_build_in_koji(self):
"""
Return True if this repo and branch are configured to auto build in
any Koji tags.
"""
if not self.global_config.has_section("koji"):
debug("No 'koji' section found in global.build.py.props")
return False
if not self.global_config.has_option("koji", "autobuild_tags"):
debug("Cannot build in Koji, no autobuild_tags defined in global.build.py.props")
return False
return True
def _submit_build(self, executable, koji_opts, tag):
""" Submit srpm to brew/koji. """
cmd = "%s %s %s %s" % (executable, koji_opts, tag, self.srpm_location)
print("\nSubmitting build with: %s" % cmd)
output = run_command(cmd)
print(output)
def _find_wrote_in_rpmbuild_output(self, output):
"""
Parse the output from rpmbuild looking for lines beginning with
"Wrote:". Return a list of file names for each path found.
"""
paths = []
look_for = "Wrote: "
for line in output.split("\n"):
if line.startswith(look_for):
paths.append(line[len(look_for):])
debug("Found wrote line: %s" % paths[-1])
if (len(paths) == 0):
error_out("Unable to locate 'Wrote: ' lines in rpmbuild output")
return paths
def cleanup(self):
"""
Remove all temporary files and directories.
"""
if not self.no_cleanup:
debug("Cleaning up [%s]" % self.rpmbuild_dir)
commands.getoutput("rm -rf %s" % self.rpmbuild_dir)
debug("Cleaning up [%s]" % self.cvs_package_workdir)
run_command("rm -rf %s" % self.cvs_package_workdir)
def _create_build_dirs(self):
"""
Create the build directories. Can safely be called multiple times.
"""
commands.getoutput("mkdir -p %s %s %s %s" % (self.rpmbuild_basedir,
self.rpmbuild_dir, self.rpmbuild_sourcedir, self.rpmbuild_builddir))
def _setup_test_specfile(self):
if self.test:
# If making a test rpm we need to get a little crazy with the spec
# file we're building off. (note that this is a temp copy of the
# spec) Swap out the actual release for one that includes the git
# SHA1 we're building for our test package:
setup_specfile_script = os.path.join(SCRIPT_DIR,
"test-setup-specfile.pl")
cmd = "perl %s %s %s %s-%s %s" % \
(
setup_specfile_script,
self.spec_file,
self.git_commit_id,
self.project_name,
self.display_version,
self.tgz_filename
)
run_command(cmd)
def _get_rpmbuild_dir_options(self):
return """--define "_sourcedir %s" --define "_builddir %s" --define "_srcrpmdir %s" --define "_rpmdir %s" """ % \
(self.rpmbuild_sourcedir, self.rpmbuild_builddir,
self.rpmbuild_basedir, self.rpmbuild_basedir)
def _get_tgz_name_and_ver(self):
"""
Returns the project name for the .tar.gz to build. Normally this is
just the project name, but in the case of Satellite packages it may
be different.
"""
return "%s-%s" % (self.project_name, self.display_version)
def _get_display_version(self):
"""
Get the package display version to build.
Normally this is whatever is rel-eng/packages/. In the case of a --test
build it will be the SHA1 for the HEAD commit of the current git
branch.
"""
if self.test:
version = "git-" + get_git_head_commit()
else:
version = self.build_version.split("-")[0]
return version
class NoTgzBuilder(Builder):
"""
Builder for packages that do not require the creation of a tarball.
Usually these packages have source tarballs checked directly into git.
i.e. most of the packages in spec-tree.
"""
def __init__(self, name=None, version=None, tag=None, build_dir=None,
pkg_config=None, global_config=None, user_config=None, dist=None,
test=False, offline=False):
Builder.__init__(self, name=name, version=version, tag=tag,
build_dir=build_dir, pkg_config=pkg_config,
global_config=global_config, user_config=user_config, dist=dist,
test=test, offline=offline)
# When syncing files with CVS, copy everything from git:
self.cvs_copy_extensions = ("",)
def tgz(self):
""" Override parent behavior, we already have a tgz. """
# TODO: Does it make sense to allow user to create a tgz for this type
# of project?
self._setup_sources()
self.ran_tgz = True
source_suffixes = ('.tar.gz', '.tar', '.zip', '.jar')
debug("Scanning for sources.")
for filename in os.listdir(self.rpmbuild_gitcopy):
for suffix in source_suffixes:
if filename.endswith(suffix):
self.sources.append(os.path.join(self.rpmbuild_gitcopy,
filename))
debug(" Sources: %s" % self.sources)
def _get_rpmbuild_dir_options(self):
"""
Override parent behavior slightly.
These packages store tar's, patches, etc, directly in their project
dir, use the git copy we create as the sources directory when
building package so everything can be found:
"""
return """--define "_sourcedir %s" --define "_builddir %s" --define "_srcrpmdir %s" --define "_rpmdir %s" """ % \
(self.rpmbuild_gitcopy, self.rpmbuild_builddir,
self.rpmbuild_basedir, self.rpmbuild_basedir)
def _setup_test_specfile(self):
""" Override parent behavior. """
if self.test:
# If making a test rpm we need to get a little crazy with the spec
# file we're building off. (note that this is a temp copy of the
# spec) Swap out the actual release for one that includes the git
# SHA1 we're building for our test package:
script = os.path.join(SCRIPT_DIR, "test-setup-specfile.pl")
cmd = "perl %s %s %s" % \
(
script,
self.spec_file,
self.git_commit_id
)
run_command(cmd)
class CvsBuilder(NoTgzBuilder):
"""
CVS Builder
Builder for packages whose sources are managed in dist-cvs/Fedora CVS.
"""
def __init__(self, name=None, version=None, tag=None, build_dir=None,
pkg_config=None, global_config=None, user_config=None, dist=None,
test=False, offline=False):
NoTgzBuilder.__init__(self, name=name, version=version, tag=tag,
build_dir=build_dir, pkg_config=pkg_config,
global_config=global_config, user_config=user_config, dist=dist,
test=test, offline=offline)
# TODO: Hack to override here, patches are in a weird place with this
# builder.
self.patch_dir = self.rpmbuild_gitcopy
def run(self, options):
""" Override parent to validate any new sources that. """
# Convert new sources to full paths right now, before we chdir:
if options.cvs_new_sources is not None:
for new_source in options.cvs_new_sources:
self.sources.append(os.path.abspath(os.path.expanduser(new_source)))
debug("CvsBuilder sources: %s" % self.sources)
NoTgzBuilder.run(self, options)
def _srpm(self, dist=None, reuse_cvs_checkout=False):
""" Build an srpm from CVS. """
rpms = self._cvs_rpm_common(target="test-srpm", dist=dist,
reuse_cvs_checkout=reuse_cvs_checkout)
# Should only be one rpm returned for srpm:
self.srpm_location = rpms[0]
def _rpm(self):
# Lookup the architecture of the system for the correct make target:
arch = run_command("uname -i")
self._cvs_rpm_common(target=arch, all_branches=True)
def _cvs_rpm_common(self, target, all_branches=False, dist=None,
reuse_cvs_checkout=False):
""" Code common to building both rpms and srpms with CVS tools. """
self._create_build_dirs()
if not self.ran_tgz:
self.tgz()
if not self._can_build_in_cvs():
error_out("Repo not properly configured to build in CVS. (--debug for more info)")
if not reuse_cvs_checkout:
self._verify_cvs_module_not_already_checked_out()
commands.getoutput("mkdir -p %s" % self.cvs_workdir)
self._cvs_checkout_module()
self._cvs_verify_branches_exist()
if self.test:
self._setup_test_specfile()
# Copy latest spec so we build that version, even if it isn't the
# latest actually committed to CVS:
self._cvs_sync_files()
self._cvs_upload_sources()
# Use "make srpm" target to create our source RPM:
os.chdir(self.cvs_package_workdir)
print("Building with CVS make %s..." % target)
# Only running on the last branch, good enough?
branch = self.cvs_branches[-1]
branch_dir = os.path.join(self.cvs_workdir, self.project_name,
branch)
os.chdir(branch_dir)
disttag = ""
if self.dist is not None:
disttag = "DIST=%s" % self.dist
elif dist is not None:
disttag = "DIST=%s" % dist
output = run_command("make %s %s" % (disttag, target))
debug(output)
rpms = []
for line in output.split("\n"):
if line.startswith("Wrote: "):
srpm_path = line.strip().split(" ")[1]
filename = os.path.basename(srpm_path)
run_command("mv %s %s" % (srpm_path, self.rpmbuild_basedir))
final_rpm_path = os.path.join(self.rpmbuild_basedir, filename)
print("Wrote: %s" % final_rpm_path)
rpms.append(final_rpm_path)
if not self.test:
print("Please be sure to run --release to commit/tag/build this package in CVS.")
return rpms
class SatelliteBuilder(NoTgzBuilder):
"""
Builder for packages that are based off some upstream version in Spacewalk
git. Commits applied in Satellite git become patches applied to the
upstream Spacewalk tarball.
i.e. satellite-java-0.4.0-5 built from spacewalk-java-0.4.0-1 and any
patches applied in satellite git.
i.e. spacewalk-setup-0.4.0-20 built from spacewalk-setup-0.4.0-1 and any
patches applied in satellite git.
"""
def __init__(self, name=None, version=None, tag=None, build_dir=None,
pkg_config=None, global_config=None, user_config=None, dist=None,
test=False, offline=False):
NoTgzBuilder.__init__(self, name=name, version=version, tag=tag,
build_dir=build_dir, pkg_config=pkg_config,
global_config=global_config, user_config=user_config, dist=dist,
test=test, offline=offline)
if not pkg_config or not pkg_config.has_option("buildconfig",
"upstream_name"):
# No upstream_name defined, assume we're keeping the project name:
self.upstream_name = self.project_name
else:
self.upstream_name = pkg_config.get("buildconfig", "upstream_name")
# Need to assign these after we've exported a copy of the spec file:
self.upstream_version = None
self.upstream_tag = None
self.patch_filename = None
self.patch_file = None
# When syncing files with CVS, only copy files with these extensions:
self.cvs_copy_extensions = (".spec", ".patch")
def tgz(self):
"""
Override parent behavior, we need a tgz from the upstream spacewalk
project we're based on.
"""
# TODO: Wasteful step here, all we really need is a way to look for a
# spec file at the point in time this release was tagged.
NoTgzBuilder._setup_sources(self)
# If we knew what it was named at that point in time we could just do:
# Export a copy of our spec file at the revision to be built:
# cmd = "git show %s:%s%s > %s" % (self.git_commit_id,
# self.relative_project_dir, self.spec_file_name,
# self.spec_file)
# debug(cmd)
self._create_build_dirs()
self.upstream_version = self._get_upstream_version()
self.upstream_tag = "%s-%s-1" % (self.upstream_name,
self.upstream_version)
print("Building upstream tgz for tag [%s]" % (self.upstream_tag))
if self.upstream_tag != self.build_tag:
check_tag_exists(self.upstream_tag, offline=self.offline)
self.spec_file = os.path.join(self.rpmbuild_sourcedir,
self.spec_file_name)
run_command("cp %s %s" % (os.path.join(self.rpmbuild_gitcopy,
self.spec_file_name), self.spec_file))
# Create the upstream tgz:
prefix = "%s-%s" % (self.upstream_name, self.upstream_version)
tgz_filename = "%s.tar.gz" % prefix
commit = get_build_commit(tag=self.upstream_tag)
relative_dir = get_relative_project_dir(
project_name=self.upstream_name, commit=commit)
tgz_fullpath = os.path.join(self.rpmbuild_sourcedir, tgz_filename)
print("Creating %s from git tag: %s..." % (tgz_filename, commit))
create_tgz(self.git_root, prefix, commit, relative_dir,
self.rel_eng_dir, tgz_fullpath)
self.ran_tgz = True
self.sources.append(tgz_fullpath)
# If these are equal then the tag we're building was likely created in
# Spacewalk and thus we don't need to do any patching.
if (self.upstream_tag == self.build_tag and not self.test):
return
self._generate_patches()
self._insert_patches_into_spec_file()
def _generate_patches(self):
"""
Generate patches for any differences between our tag and the
upstream tag.
"""
self.patch_filename = "%s-to-%s-%s.patch" % (self.upstream_tag,
self.project_name, self.build_version)
self.patch_file = os.path.join(self.rpmbuild_gitcopy,
self.patch_filename)
os.chdir(os.path.join(self.git_root, self.relative_project_dir))
print("Generating patch [%s]" % self.patch_filename)
debug("Patch: %s" % self.patch_file)
patch_command = "git diff --relative %s..%s > %s" % \
(self.upstream_tag, self.git_commit_id, self.patch_file)
debug("Generating patch with: %s" % patch_command)
output = run_command(patch_command)
print(output)
# Creating two copies of the patch here in the temp build directories
# just out of laziness. Some builders need sources in SOURCES and
# others need them in the git copy. Being lazy here avoids one-off
# hacks and both copies get cleaned up anyhow.
run_command("cp %s %s" % (self.patch_file, self.rpmbuild_sourcedir))
def _insert_patches_into_spec_file(self):
"""
Insert the generated patches into the copy of the spec file we'll be
building with.
"""
f = open(self.spec_file, 'r')
lines = f.readlines()
patch_pattern = re.compile('^Patch(\d+):')
source_pattern = re.compile('^Source\d+:')
# Find the largest PatchX: line, or failing that SourceX:
patch_number = 0 # What number should we use for our PatchX line
patch_insert_index = 0 # Where to insert our PatchX line in the list
patch_apply_index = 0 # Where to insert our %patchX line in the list
array_index = 0 # Current index in the array
for line in lines:
match = source_pattern.match(line)
if match:
patch_insert_index = array_index + 1
match = patch_pattern.match(line)
if match:
patch_insert_index = array_index + 1
patch_number = int(match.group(1)) + 1
if line.startswith("%prep"):
# We'll apply patch right after prep if there's no %setup line
patch_apply_index = array_index + 2
elif line.startswith("%setup"):
patch_apply_index = array_index + 2 # already added a line
array_index += 1
debug("patch_insert_index = %s" % patch_insert_index)
debug("patch_apply_index = %s" % patch_apply_index)
if patch_insert_index == 0 or patch_apply_index == 0:
error_out("Unable to insert PatchX or %patchX lines in spec file")
lines.insert(patch_insert_index, "Patch%s: %s\n" % (patch_number,
self.patch_filename))
lines.insert(patch_apply_index, "%%patch%s -p1\n" % (patch_number))
f.close()
# Now write out the modified lines to the spec file copy:
f = open(self.spec_file, 'w')
for line in lines:
f.write(line)
f.close()
def _get_upstream_version(self):
"""
Get the upstream version. Checks for "upstreamversion" in the spec file
and uses it if found. Otherwise assumes the upstream version is equal
to the version we're building.
i.e. satellite-java-0.4.15 will be built on spacewalk-java-0.4.15
with just the package release being incremented on rebuilds.
"""
# Use upstreamversion if defined in the spec file:
(status, output) = commands.getstatusoutput(
"cat %s | grep 'define upstreamversion' | awk '{ print $3 ; exit }'" %
self.spec_file)
if status == 0 and output != "":
return output
if self.test:
return self.build_version.split("-")[0]
# Otherwise, assume we use our version:
else:
return self.display_version
def _get_rpmbuild_dir_options(self):
"""
Override parent behavior slightly.
These packages store tar's, patches, etc, directly in their project
dir, use the git copy we create as the sources directory when
building package so everything can be found:
"""
return """--define "_sourcedir %s" --define "_builddir %s" --define "_srcrpmdir %s" --define "_rpmdir %s" """ % \
(self.rpmbuild_sourcedir, self.rpmbuild_builddir,
self.rpmbuild_basedir, self.rpmbuild_basedir)

630
src/spacewalk/releng/cli.py Normal file
View file

@ -0,0 +1,630 @@
#
# Copyright (c) 2009 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
"""
Command line interface for building Spacewalk and Satellite packages from git tags.
"""
import sys
import os
import random
import commands
import ConfigParser
from optparse import OptionParser
from string import strip
SCRIPT_DIR = os.path.abspath(os.path.join(os.path.dirname(
os.path.abspath(sys.argv[0])), "../"))
from spacewalk.releng.builder import Builder, NoTgzBuilder
from spacewalk.releng.tagger import VersionTagger, ReleaseTagger
from spacewalk.releng.common import DEFAULT_BUILD_DIR
from spacewalk.releng.common import find_git_root, run_command, \
error_out, debug, get_project_name, get_relative_project_dir, \
check_tag_exists, get_latest_tagged_version
BUILD_PROPS_FILENAME = "build.py.props"
GLOBAL_BUILD_PROPS_FILENAME = "tito.props"
GLOBALCONFIG_SECTION = "globalconfig"
DEFAULT_BUILDER = "default_builder"
DEFAULT_TAGGER = "default_tagger"
ASSUMED_NO_TAR_GZ_PROPS = """
[buildconfig]
builder = spacewalk.releng.builder.NoTgzBuilder
tagger = spacewalk.releng.tagger.ReleaseTagger
"""
def get_class_by_name(name):
"""
Get a Python class specified by it's fully qualified name.
NOTE: Does not actually create an instance of the object, only returns
a Class object.
"""
# Split name into module and class name:
tokens = name.split(".")
class_name = tokens[-1]
module = ""
for s in tokens[0:-1]:
if len(module) > 0:
module = module + "."
module = module + s
mod = __import__(tokens[0])
components = name.split('.')
for comp in components[1:-1]:
mod = getattr(mod, comp)
debug("Importing %s" % name)
c = getattr(mod, class_name)
return c
def read_user_config():
config = {}
file_loc = os.path.expanduser("~/.spacewalk-build-rc")
try:
f = open(file_loc)
except:
# File doesn't exist but that's ok because it's optional.
return config
for line in f.readlines():
if line.strip() == "":
continue
tokens = line.split("=")
if len(tokens) != 2:
raise Exception("Error parsing ~/.spacewalk-build-rc: %s" % line)
config[tokens[0]] = strip(tokens[1])
return config
def lookup_build_dir(user_config):
"""
Read build_dir in from ~/.spacewalk-build-rc if it exists, otherwise
return the current working directory.
"""
build_dir = DEFAULT_BUILD_DIR
if user_config.has_key('RPMBUILD_BASEDIR'):
build_dir = user_config["RPMBUILD_BASEDIR"]
return build_dir
class CLI:
"""
Parent command line interface class.
Simply delegated to sub-modules which group appropriate command line
options together.
"""
def main(self):
if len(sys.argv) < 2 or not CLI_MODULES.has_key(sys.argv[1]):
self._usage()
sys.exit(1)
module_class = CLI_MODULES[sys.argv[1]]
module = module_class()
module.main()
def _usage(self):
print("Usage: %s MODULENAME --help" %
(os.path.basename(sys.argv[0])))
print("Supported modules:")
print(" tag - Tag package releases.")
print(" build - Build packages.")
print(" report - Display various reports on the repo.")
print(" init - Initialize directory for use by tito.")
class BaseCliModule(object):
""" Common code used amongst all CLI modules. """
def __init__(self):
self.parser = None
self.global_config = None
self.options = None
self.pkg_config = None
self.user_config = read_user_config()
def _add_common_options(self):
"""
Add options to the command line parser which are relevant to all
modules.
"""
# Options used for many different activities:
self.parser.add_option("--debug", dest="debug", action="store_true",
help="print debug messages", default=False)
self.parser.add_option("--offline", dest="offline", action="store_true",
help="do not attempt any remote communication (avoid using " +
"this please)",
default=False)
def main(self):
(self.options, args) = self.parser.parse_args()
self._validate_options()
if len(sys.argv) < 2:
print parser.error("Must supply an argument. Try -h for help.")
self.global_config = self._read_global_config()
if self.options.debug:
os.environ['DEBUG'] = "true"
def _read_global_config(self):
"""
Read global build.py configuration from the rel-eng dir of the git
repository we're being run from.
"""
rel_eng_dir = os.path.join(find_git_root(), "rel-eng")
filename = os.path.join(rel_eng_dir, GLOBAL_BUILD_PROPS_FILENAME)
if not os.path.exists(filename):
# HACK: Try the old filename location, pre-tito rename:
oldfilename = os.path.join(rel_eng_dir, "global.build.py.props")
if not os.path.exists(oldfilename):
error_out("Unable to locate branch configuration: %s\nPlease run 'tito init'" %
filename)
config = ConfigParser.ConfigParser()
config.read(filename)
# Verify the config contains what we need from it:
required_global_config = [
(GLOBALCONFIG_SECTION, DEFAULT_BUILDER),
(GLOBALCONFIG_SECTION, DEFAULT_TAGGER),
]
for section, option in required_global_config:
if not config.has_section(section) or not \
config.has_option(section, option):
error_out("%s missing required config: %s %s" % (
filename, section, option))
return config
def _read_project_config(self, project_name, build_dir, tag, no_cleanup):
"""
Read and return project build properties if they exist.
This is done by checking for a build.py.props in the projects
directory at the time the tag was made.
To accomodate older tags prior to build.py, we also check for
the presence of a Makefile with NO_TAR_GZ, and include a hack to
assume build properties in this scenario.
If no project specific config can be found, use the global config.
"""
debug("Determined package name to be: %s" % project_name)
properties_file = None
wrote_temp_file = False
# Use the properties file in the current project directory, if it
# exists:
current_props_file = os.path.join(os.getcwd(), BUILD_PROPS_FILENAME)
if (os.path.exists(current_props_file)):
properties_file = current_props_file
# Check for a build.py.props back when this tag was created and use it
# instead. (if it exists)
if tag:
relative_dir = get_relative_project_dir(project_name, tag)
cmd = "git show %s:%s%s" % (tag, relative_dir,
BUILD_PROPS_FILENAME)
debug(cmd)
(status, output) = commands.getstatusoutput(cmd)
temp_filename = "%s-%s" % (random.randint(1, 10000),
BUILD_PROPS_FILENAME)
temp_props_file = os.path.join(build_dir, temp_filename)
if status == 0:
properties_file = temp_props_file
f = open(properties_file, 'w')
f.write(output)
f.close()
wrote_temp_file = True
else:
# HACK: No build.py.props found, but to accomodate packages
# tagged before they existed, check for a Makefile with
# NO_TAR_GZ defined and make some assumptions based on that.
cmd = "git show %s:%s%s | grep NO_TAR_GZ" % \
(tag, relative_dir, "Makefile")
debug(cmd)
(status, output) = commands.getstatusoutput(cmd)
if status == 0 and output != "":
properties_file = temp_props_file
debug("Found Makefile with NO_TAR_GZ")
f = open(properties_file, 'w')
f.write(ASSUMED_NO_TAR_GZ_PROPS)
f.close()
wrote_temp_file = True
config = ConfigParser.ConfigParser()
if properties_file != None:
debug("Using build properties: %s" % properties_file)
config.read(properties_file)
else:
debug("Unable to locate custom build properties for this package.")
debug(" Using global.build.py.props")
# TODO: Not thrilled with this:
if wrote_temp_file and not no_cleanup:
# Delete the temp properties file we created.
run_command("rm %s" % properties_file)
return config
def _validate_options(self):
"""
Subclasses can implement if they need to check for any
incompatible cmd line options.
"""
pass
class BuildModule(BaseCliModule):
def __init__(self):
BaseCliModule.__init__(self)
usage = "usage: %prog build [options]"
self.parser = OptionParser(usage)
self._add_common_options()
self.parser.add_option("--tgz", dest="tgz", action="store_true",
help="Build .tar.gz")
self.parser.add_option("--srpm", dest="srpm", action="store_true",
help="Build srpm")
self.parser.add_option("--rpm", dest="rpm", action="store_true",
help="Build rpm")
self.parser.add_option("--dist", dest="dist", metavar="DISTTAG",
help="Dist tag to apply to srpm and/or rpm. (i.e. .el5)")
self.parser.add_option("--test", dest="test", action="store_true",
help="use current branch HEAD instead of latest package tag")
self.parser.add_option("--no-cleanup", dest="no_cleanup",
action="store_true",
help="do not clean up temporary build directories/files")
self.parser.add_option("--tag", dest="tag", metavar="PKGTAG",
help="build a specific tag instead of the latest version " +
"(i.e. spacewalk-java-0.4.0-1)")
self.parser.add_option("--release", dest="release",
action="store_true", help="%s %s %s" % (
"Release package according to repo configuration.",
"(import into CVS and submit to build system, or create ",
"src.rpm's and submit directly to koji)"
))
self.parser.add_option("--cvs-release", dest="cvs_release",
action="store_true", help="Release package only in CVS. (if possible)"
)
self.parser.add_option("--koji-release", dest="koji_release",
action="store_true", help="Release package only in Koji. (if possible)"
)
self.parser.add_option("--upload-new-source", dest="cvs_new_sources",
action="append",
help="Upload a new source tarball to CVS lookaside. (i.e. runs 'make new-sources') Must be " \
"used until 'sources' file is committed to CVS.")
def main(self):
BaseCliModule.main(self)
build_dir = lookup_build_dir(self.user_config)
package_name = get_project_name(tag=self.options.tag)
build_tag = None
build_version = None
# Determine which package version we should build:
if self.options.tag:
build_tag = self.options.tag
build_version = build_tag[len(package_name + "-"):]
else:
build_version = get_latest_tagged_version(package_name)
if build_version == None:
error_out(["Unable to lookup latest package info.",
"Perhaps you need to tag first?"])
build_tag = "%s-%s" % (package_name, build_version)
if not self.options.test:
check_tag_exists(build_tag, offline=self.options.offline)
self.pkg_config = self._read_project_config(package_name, build_dir,
self.options.tag, self.options.no_cleanup)
builder = self._create_builder(package_name, build_tag,
build_version, self.options, self.pkg_config,
build_dir)
builder.run(self.options)
def _create_builder(self, package_name, build_tag, build_version, options,
pkg_config, build_dir):
"""
Create (but don't run) the builder class. Builder object may be
used by other objects without actually having run() called.
"""
builder_class = None
if pkg_config.has_option("buildconfig", "builder"):
builder_class = get_class_by_name(pkg_config.get("buildconfig",
"builder"))
else:
builder_class = get_class_by_name(self.global_config.get(
GLOBALCONFIG_SECTION, DEFAULT_BUILDER))
debug("Using builder class: %s" % builder_class)
# Instantiate the builder:
builder = builder_class(
name=package_name,
version=build_version,
tag=build_tag,
build_dir=build_dir,
pkg_config=pkg_config,
global_config=self.global_config,
user_config=self.user_config,
dist=options.dist,
test=options.test,
offline=options.offline)
return builder
def _validate_options(self):
if self.options.srpm and self.options.rpm:
error_out("Cannot combine --srpm and --rpm")
if self.options.test and self.options.tag:
error_out("Cannot build test version of specific tag.")
if (self.options.srpm or self.options.rpm) and self.options.release:
error_out("Cannot combine --srpm/--rpm with --release.")
if self.options.release and (self.options.cvs_release or
self.options.koji_release):
error_out(["Cannot combine --cvs-release/--koji-release with --release.",
"(--release includes both)"])
if self.options.release and self.options.test:
error_out("Cannot combine --release with --test.")
class TagModule(BaseCliModule):
def __init__(self):
BaseCliModule.__init__(self)
usage = "usage: %prog tag [options]"
self.parser = OptionParser(usage)
self._add_common_options()
# Options for tagging new package releases:
# NOTE: deprecated and no longer needed:
self.parser.add_option("--tag-release", dest="tag_release",
action="store_true",
help="Deprecated, no longer required.")
self.parser.add_option("--keep-version", dest="keep_version",
action="store_true",
help="Use spec file version/release exactly as specified in spec file to tag package.")
self.parser.add_option("--no-auto-changelog", action="store_true",
default=False,
help="Don't automatically create a changelog entry for this tag if none is found")
def main(self):
BaseCliModule.main(self)
if self.global_config.has_option(GLOBALCONFIG_SECTION,
"block_tagging"):
debug("block_tagging defined in tito.props")
error_out("Tagging has been disabled in this git branch.")
build_dir = lookup_build_dir(self.user_config)
package_name = get_project_name(tag=None)
self.pkg_config = self._read_project_config(package_name, build_dir,
None, None)
tagger_class = None
if self.pkg_config.has_option("buildconfig", "tagger"):
tagger_class = get_class_by_name(self.pkg_config.get("buildconfig",
"tagger"))
else:
tagger_class = get_class_by_name(self.global_config.get(
GLOBALCONFIG_SECTION, DEFAULT_TAGGER))
debug("Using tagger class: %s" % tagger_class)
tagger = tagger_class(global_config=self.global_config,
keep_version=self.options.keep_version)
tagger.run(self.options)
class InitModule(BaseCliModule):
""" CLI Module for initializing a project for use with tito. """
def __init__(self):
BaseCliModule.__init__(self)
usage = "usage: %prog init [options]"
self.parser = OptionParser(usage)
self._add_common_options()
def main(self):
# DO NOT CALL BaseCliModule.main(self)
# we are initializing tito to work in this module and
# calling main will result in a configuration error.
rel_eng_dir = os.path.join(find_git_root(), "rel-eng")
filename = os.path.join(rel_eng_dir, GLOBAL_BUILD_PROPS_FILENAME)
if not os.path.exists(filename):
if not os.path.exists(rel_eng_dir):
commands.getoutput("mkdir -p %s" % rel_eng_dir)
# write out tito.props
out_f = open(filename, 'w')
out_f.write("[globalconfig]\n")
out_f.write("default_builder = spacewalk.releng.builder.Builder\n")
out_f.write("default_tagger = spacewalk.releng.tagger.VersionTagger\n")
out_f.close()
commands.getoutput('git commit -m "Initialized to use tito. "')
pkg_dir = os.path.join(rel_eng_dir, "packages")
if not os.path.exists(pkg_dir):
commands.getoutput("mkdir -p %s" % pkg_dir)
print("Initialized tito in %s" % rel_eng_dir)
class ReportModule(BaseCliModule):
""" CLI Module For Various Reports. """
def __init__(self):
BaseCliModule.__init__(self)
usage = "usage: %prog report [options]"
self.parser = OptionParser(usage)
self._add_common_options()
self.parser.add_option("--untagged-diffs", dest="untagged_report",
action="store_true",
help= "%s %s %s" % (
"Print out diffs for all packages with changes between",
"their most recent tag and HEAD. Useful for determining",
"which packages are in need of a re-tag."
))
self.parser.add_option("--untagged-commits", dest="untagged_commits",
action="store_true",
help= "%s %s %s" % (
"Print out the list for all packages with changes between",
"their most recent tag and HEAD. Useful for determining",
"which packages are in need of a re-tag."
))
def main(self):
BaseCliModule.main(self)
if self.options.untagged_report:
self._run_untagged_report(self.global_config)
sys.exit(1)
if self.options.untagged_commits:
self._run_untagged_commits(self.global_config)
sys.exit(1)
def _run_untagged_commits(self, global_config):
"""
Display a report of all packages with differences between HEAD and
their most recent tag, as well as a patch for that diff. Used to
determine which packages are in need of a rebuild.
"""
print("Scanning for packages that may need to be tagged...")
print("")
git_root = find_git_root()
rel_eng_dir = os.path.join(git_root, "rel-eng")
os.chdir(git_root)
package_metadata_dir = os.path.join(rel_eng_dir, "packages")
for root, dirs, files in os.walk(package_metadata_dir):
for md_file in files:
if md_file[0] == '.':
continue
f = open(os.path.join(package_metadata_dir, md_file))
(version, relative_dir) = f.readline().strip().split(" ")
project_dir = os.path.join(git_root, relative_dir)
self._print_log(global_config, md_file, version, project_dir)
def _run_untagged_report(self, global_config):
"""
Display a report of all packages with differences between HEAD and
their most recent tag, as well as a patch for that diff. Used to
determine which packages are in need of a rebuild.
"""
print("Scanning for packages that may need to be tagged...")
print("")
git_root = find_git_root()
rel_eng_dir = os.path.join(git_root, "rel-eng")
os.chdir(git_root)
package_metadata_dir = os.path.join(rel_eng_dir, "packages")
for root, dirs, files in os.walk(package_metadata_dir):
for md_file in files:
if md_file[0] == '.':
continue
f = open(os.path.join(package_metadata_dir, md_file))
(version, relative_dir) = f.readline().strip().split(" ")
project_dir = os.path.join(git_root, relative_dir)
self._print_diff(global_config, md_file, version, project_dir,
relative_dir)
def _print_log(self, global_config, package_name, version, project_dir):
"""
Print the log between the most recent package tag and HEAD, if
necessary.
"""
last_tag = "%s-%s" % (package_name, version)
try:
os.chdir(project_dir)
patch_command = "git log --pretty=oneline --relative %s..%s -- %s" % \
(last_tag, "HEAD", ".")
output = run_command(patch_command)
if (output):
print("-" * (len(last_tag) + 8))
print("%s..%s:" % (last_tag, "HEAD"))
print(output)
except:
print("%s no longer exists" % project_dir)
def _print_diff(self, global_config, package_name, version,
full_project_dir, relative_project_dir):
"""
Print a diff between the most recent package tag and HEAD, if
necessary.
"""
last_tag = "%s-%s" % (package_name, version)
os.chdir(full_project_dir)
patch_command = "git diff --relative %s..%s" % \
(last_tag, "HEAD")
output = run_command(patch_command)
# If the diff contains 1 line then there is no diff:
linecount = len(output.split("\n"))
if linecount == 1:
return
name_and_version = "%s %s" % (package_name, relative_project_dir)
# Otherwise, print out info on the diff for this package:
print("#" * len(name_and_version))
print(name_and_version)
print("#" * len(name_and_version))
print("")
print patch_command
print("")
print(output)
print("")
print("")
print("")
print("")
print("")
CLI_MODULES = {
"build": BuildModule,
"tag": TagModule,
"report": ReportModule,
"init": InitModule
}

View file

@ -0,0 +1,233 @@
#
# Copyright (c) 2008 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
import os
import re
import os.path
import sys
import commands
from spacewalk.releng.cli import SCRIPT_DIR
DEFAULT_BUILD_DIR = "/tmp/spacewalk-build"
def error_out(error_msgs):
"""
Print the given error message (or list of messages) and exit.
"""
if isinstance(error_msgs, list):
for line in error_msgs:
print "ERROR: %s" % line
else:
print "ERROR: %s" % error_msgs
sys.exit(1)
def find_spec_file(in_dir=None):
"""
Find the first spec file in the current directory. (hopefully there's
only one)
Returns only the file name, rather than the full path.
"""
if in_dir == None:
in_dir = os.getcwd()
for f in os.listdir(in_dir):
if f.endswith(".spec"):
return f
error_out(["Unable to locate a spec file in %s" % in_dir])
def find_git_root():
"""
Find the top-level directory for this git repository.
Returned as a full path.
"""
(status, cdup) = commands.getstatusoutput("git rev-parse --show-cdup")
if status > 0:
error_out(["%s does not appear to be within a git checkout." % \
os.getcwd()])
if cdup == "":
cdup = "./"
return os.path.abspath(cdup)
def run_command(command):
(status, output) = commands.getstatusoutput(command)
if status > 0:
sys.stderr.write("\n########## ERROR ############\n")
sys.stderr.write("Error running command: %s\n" % command)
sys.stderr.write("Status code: %s\n" % status)
sys.stderr.write("Command output: %s\n" % output)
raise Exception("Error running command")
return output
def check_tag_exists(tag, offline=False):
"""
Check that the given git tag exists in a git repository.
"""
(status, output) = commands.getstatusoutput("git tag | grep %s" % tag)
if status > 0:
error_out("Tag does not exist locally: [%s]" % tag)
tag_sha1 = run_command(
"git ls-remote ./. --tag %s | awk '{ print $1 ; exit }'"
% tag)
debug("Local tag SHA1: %s" % tag_sha1)
if offline:
return
repo_url = get_git_repo_url()
print("Checking for tag [%s] in git repo [%s]" % (tag, repo_url))
upstream_tag_sha1 = run_command(
"git ls-remote %s --tag %s | awk '{ print $1 ; exit }'" %
(repo_url, tag))
if upstream_tag_sha1 == "":
error_out(["Tag does not exist in remote git repo: %s" % tag,
"You must tag, then git push and git push --tags"])
debug("Remote tag SHA1: %s" % upstream_tag_sha1)
if upstream_tag_sha1 != tag_sha1:
error_out("Tag %s references %s locally but %s upstream." % (tag,
tag_sha1, upstream_tag_sha1))
def debug(text):
"""
Print the text if --debug was specified.
"""
if os.environ.has_key('DEBUG'):
print text
def get_spec_version_and_release(sourcedir, spec_file_name):
command = """rpm -q --qf '%%{version}-%%{release}\n' --define "_sourcedir %s" --define 'dist %%undefined' --specfile %s 2> /dev/null | head -1""" % (sourcedir, spec_file_name)
return run_command(command)
def get_project_name(tag=None):
"""
Extract the project name from the specified tag or a spec file in the
current working directory. Error out if neither is present.
"""
if tag != None:
p = re.compile('(.*?)-(\d.*)')
m = p.match(tag)
if not m:
error_out("Unable to determine project name in tag: %s" % tag)
return m.group(1)
else:
spec_file_path = os.path.join(os.getcwd(), find_spec_file())
if not os.path.exists(spec_file_path):
error_out("Unable to get project name from spec file: %s" %
spec_file_path)
output = run_command(
"cat %s | grep 'Name:' | awk '{ print $2 ; exit }'" %
spec_file_path)
return output
def get_relative_project_dir(project_name, commit):
"""
Return the project's sub-directory relative to the git root.
This could be a different directory than where the project currently
resides, so we export a copy of the project's metadata from
rel-eng/packages/ at the point in time of the tag we are building.
"""
cmd = "git show %s:rel-eng/packages/%s" % (commit,
project_name)
pkg_metadata = run_command(cmd).strip()
tokens = pkg_metadata.split(" ")
debug("Got package metadata: %s" % tokens)
return tokens[1]
def get_build_commit(tag, test=False):
""" Return the git commit we should build. """
if test:
return get_git_head_commit()
else:
tag_sha1 = run_command(
"git ls-remote ./. --tag %s | awk '{ print $1 ; exit }'"
% tag)
commit_id = run_command('git rev-list --max-count=1 %s' %
tag_sha1)
return commit_id
def get_git_head_commit():
""" Return the SHA1 of the HEAD commit on the current git branch. """
return commands.getoutput('git rev-parse --verify HEAD')
def get_commit_timestamp(sha1_or_tag):
"""
Get the timestamp of the git commit or tag we're building. Used to
keep the hash the same on all .tar.gz's we generate for a particular
version regardless of when they are generated.
"""
output = run_command(
"git rev-list --timestamp --max-count=1 %s | awk '{print $1}'"
% sha1_or_tag)
return output
def create_tgz(git_root, prefix, commit, relative_dir, rel_eng_dir,
dest_tgz):
"""
Create a .tar.gz from a projects source in git.
"""
os.chdir(os.path.abspath(git_root))
timestamp = get_commit_timestamp(commit)
timestamp_script = os.path.join(SCRIPT_DIR,
"tar-fixup-stamp-comment.pl")
if not os.path.exists(timestamp_script):
error_out("Unable to locate required script: %s" % timestamp_script)
archive_cmd = "git archive --format=tar --prefix=%s/ %s:%s | perl %s %s %s | gzip -n -c - | tee %s" % \
(
prefix,
commit,
relative_dir,
timestamp_script,
timestamp,
commit,
dest_tgz
)
debug(archive_cmd)
run_command(archive_cmd)
def get_git_repo_url():
"""
Return the url of this git repo.
Uses ~/.git/config remote origin url.
"""
return run_command("git config remote.origin.url")
def get_latest_tagged_version(package_name):
"""
Return the latest git tag for this package in the current branch.
Uses the info in rel-eng/packages/package-name.
Returns None if file does not exist.
"""
git_root = find_git_root()
rel_eng_dir = os.path.join(git_root, "rel-eng")
file_path = "%s/packages/%s" % (rel_eng_dir, package_name)
debug("Getting latest package info from: %s" % file_path)
if not os.path.exists(file_path):
return None
output = run_command("awk '{ print $1 ; exit }' %s" % file_path)
if output == None or output.strip() == "":
error_out("Error looking up latest tagged version in: %s" % file_path)
return output

View file

@ -0,0 +1,371 @@
#
# Copyright (c) 2008 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
""" Code for tagging Spacewalk/Satellite packages. """
import os
import re
import sys
import commands
import StringIO
import shutil
import subprocess
import tempfile
import textwrap
from time import strftime
from spacewalk.releng.cli import SCRIPT_DIR
from spacewalk.releng.common import *
class VersionTagger(object):
"""
Standard Tagger class, used for tagging packages built from source in
git. (as opposed to packages which commit a tarball directly into git).
Releases will be tagged by incrementing the package version,
and the actual RPM "release" will always be set to 1.
"""
def __init__(self, global_config=None, keep_version=False):
self.git_root = find_git_root()
self.rel_eng_dir = os.path.join(self.git_root, "rel-eng")
self.global_config = global_config
self.full_project_dir = os.getcwd()
self.spec_file_name = find_spec_file()
self.project_name = get_project_name(tag=None)
self.relative_project_dir = self._get_relative_project_dir(
self.git_root) # i.e. java/
self.spec_file = os.path.join(self.full_project_dir,
self.spec_file_name)
self.keep_version = keep_version
self.today = strftime("%a %b %d %Y")
(self.git_user, self.git_email) = self._get_git_user_info()
self.changelog_regex = re.compile('\\*\s%s\s%s(\s<%s>)?' % (self.today,
self.git_user, self.git_email))
self._no_auto_changelog = False
def run(self, options):
"""
Perform the actions requested of the tagger.
NOTE: this method may do nothing if the user requested no build actions
be performed. (i.e. only release tagging, etc)
"""
if options.tag_release:
print("WARNING: --tag-release option no longer necessary, 'tito tag' will accomplish the same thing.")
if options.no_auto_changelog:
self._no_auto_changelog=True
self._tag_release()
def _tag_release(self):
"""
Tag a new version of the package. (i.e. x.y.z+1)
"""
self._check_today_in_changelog()
new_version = self._bump_version()
self._check_tag_does_not_exist(self._get_new_tag(new_version))
self._update_changelog(new_version)
self._update_package_metadata(new_version)
def _check_today_in_changelog(self):
"""
Verify that there is a changelog entry for today's date and the git
user's name and email address.
i.e. * Thu Nov 27 2008 My Name <me@example.com>
"""
f = open(self.spec_file, 'r')
found_changelog = False
for line in f.readlines():
match = self.changelog_regex.match(line)
if not found_changelog and match:
found_changelog = True
f.close()
if not found_changelog:
if self._no_auto_changelog:
error_out("No changelog entry found: '* %s %s <%s>'" % (
self.today, self.git_user, self.git_email))
else:
self._make_changelog()
else:
debug("Found changelog entry.")
def _make_changelog(self):
"""
Create a new changelog entry in the spec, with line items from git
"""
in_f = open(self.spec_file, 'r')
out_f = open(self.spec_file + ".new", 'w')
found_changelog = False
for line in in_f.readlines():
out_f.write(line)
if not found_changelog and line.startswith("%changelog"):
found_changelog = True
old_version = get_latest_tagged_version(self.project_name)
# don't die if this is a new package with no history
if old_version != None:
last_tag = "%s-%s" % (self.project_name, old_version)
patch_command = \
"git log --pretty=format:%%s\ \(%%ae\)" \
" --relative %s..%s -- %s" % \
(last_tag, "HEAD", ".")
output = run_command(patch_command)
else:
output = "new package"
fd, name = tempfile.mkstemp()
os.write(fd, "# No changelog entry found; please edit the following\n")
header = "* %s %s <%s>\n" % (self.today, self.git_user,
self.git_email)
os.write(fd, header)
for cmd_out in output.split("\n"):
os.write(fd, "- ")
os.write(fd, "\n ".join(textwrap.wrap(cmd_out, 77)))
os.write(fd, "\n")
os.write(fd, "\n")
editor = 'vi'
if os.environ.has_key("EDITOR"):
editor = os.environ["EDITOR"]
subprocess.call([editor, name])
os.lseek(fd, 0, 0)
file = os.fdopen(fd)
for line in file.readlines():
if not line.startswith("#"):
out_f.write(line)
output = file.read()
file.close()
os.unlink(name)
in_f.close()
out_f.close()
shutil.move(self.spec_file + ".new", self.spec_file)
def _update_changelog(self, new_version):
"""
Update the changelog with the new version.
"""
# Not thrilled about having to re-read the file here but we need to
# check for the changelog entry before making any modifications, then
# bump the version, then update the changelog.
f = open(self.spec_file, 'r')
buf = StringIO.StringIO()
found_match = False
for line in f.readlines():
match = self.changelog_regex.match(line)
if match and not found_match:
buf.write("%s %s\n" % (match.group(), new_version))
found_match = True
else:
buf.write(line)
f.close()
# Write out the new file contents with our modified changelog entry:
f = open(self.spec_file, 'w')
f.write(buf.getvalue())
f.close()
buf.close()
def _get_relative_project_dir(self, git_root):
"""
Returns the patch to the project we're working with relative to the
git root.
*MUST* be called before doing any os.cwd().
i.e. java/, satellite/install/Spacewalk-setup/, etc.
"""
current_dir = os.getcwd()
relative = current_dir[len(git_root) + 1:] + "/"
return relative
def _bump_version(self, release=False):
"""
Bump up the package version in the spec file.
Set release to True to bump the package release instead.
Checks for the keep version option and if found, won't actually
bump the version or release.
"""
old_version = get_latest_tagged_version(self.project_name)
if old_version == None:
old_version = "untagged"
# TODO: Do this here instead of calling out to an external Perl script:
if not self.keep_version:
bump_type = "bump-version"
if release:
bump_type = "bump-release"
script_path = os.path.join(SCRIPT_DIR, "bump-version.pl")
cmd = "perl %s %s --specfile %s" % \
(script_path, bump_type, self.spec_file)
run_command(cmd)
new_version = self._get_spec_version_and_release()
if new_version.strip() == "":
error_out("Error getting bumped package version. (can spec file be parsed?")
print "Tagging new version of %s: %s -> %s" % (self.project_name,
old_version, new_version)
return new_version
def _update_package_metadata(self, new_version, release=False):
"""
We track package metadata in the rel-eng/packages/ directory. Each
file here stores the latest package version (for the git branch you
are on) as well as the relative path to the project's code. (from the
git root)
Set release to True when bumping the package release. (as opposed to
it's version)
"""
self._clear_package_metadata()
suffix = ""
# If global config specifies a tag suffix, use it:
if self.global_config.has_option("globalconfig", "tag_suffix"):
suffix = self.global_config.get("globalconfig", "tag_suffix")
new_version_w_suffix = "%s%s" % (new_version, suffix)
# Write out our package metadata:
metadata_file = os.path.join(self.rel_eng_dir, "packages",
self.project_name)
f = open(metadata_file, 'w')
f.write("%s %s\n" % (new_version_w_suffix, self.relative_project_dir))
f.close()
# Git add it (in case it's a new file):
run_command("git add %s" % metadata_file)
run_command("git add %s" % os.path.join(self.full_project_dir,
self.spec_file_name))
# Just an informative message appearing in the commit log:
release_type = "release"
if release:
release_type = "minor release"
run_command('git commit -m "Automatic commit of package ' +
'[%s] %s [%s]."' % (self.project_name, release_type,
new_version_w_suffix))
tag_msg = "Tagging package [%s] version [%s] in directory [%s]." % \
(self.project_name, new_version_w_suffix,
self.relative_project_dir)
new_tag = self._get_new_tag(new_version)
print "Creating new tag [%s]" % new_tag
run_command('git tag -m "%s" %s' % (tag_msg, new_tag))
print("You must run [git push && git push --tags] before this " +
"tag can be used")
def _check_tag_does_not_exist(self, new_tag):
status, output = commands.getstatusoutput('git tag | grep %s' % new_tag)
if status == 0:
raise Exception("Tag %s already exists!" % new_tag)
def _clear_package_metadata(self):
"""
Remove all rel-eng/packages/ files that have a relative path
matching the package we're tagging a new version of. Normally
this just removes the previous package file but if we were
renaming oldpackage to newpackage, this would git rm
rel-eng/packages/oldpackage and add
rel-eng/packages/spacewalk-newpackage.
"""
metadata_dir = os.path.join(self.rel_eng_dir, "packages")
for filename in os.listdir(metadata_dir):
metadata_file = os.path.join(metadata_dir, filename) # full path
if os.path.isdir(metadata_file) or filename.startswith("."):
continue
temp_file = open(metadata_file, 'r')
(version, relative_dir) = temp_file.readline().split(" ")
relative_dir = relative_dir.strip() # sometimes has a newline
if relative_dir == self.relative_project_dir:
debug("Found metadata for our prefix: %s" %
metadata_file)
debug(" version: %s" % version)
debug(" dir: %s" % relative_dir)
if filename == self.project_name:
debug("Updating %s with new version." %
metadata_file)
else:
print "WARNING: %s also references %s" % (filename,
self.relative_project_dir)
print "Assuming package has been renamed and removing it."
run_command("git rm %s" % metadata_file)
def _get_git_user_info(self):
""" Return the user.name and user.email git config values. """
return (run_command('git config --get user.name'),
run_command('git config --get user.email'))
def _get_spec_version_and_release(self):
""" Get the package version from the spec file. """
return get_spec_version_and_release(self.full_project_dir,
self.spec_file_name)
def _get_new_tag(self, new_version):
""" Returns the actual tag we'll be creating. """
suffix = ""
# If global config specifies a tag suffix, use it:
if self.global_config.has_option("globalconfig", "tag_suffix"):
suffix = self.global_config.get("globalconfig", "tag_suffix")
return "%s-%s%s" % (self.project_name, new_version, suffix)
class ReleaseTagger(VersionTagger):
"""
Tagger which increments the spec file release instead of version.
Used for:
- Packages we build from a tarball checked directly into git.
- Satellite packages built on top of Spacewalk tarballs.
"""
def _tag_release(self):
"""
Tag a new release of the package. (i.e. x.y.z-r+1)
"""
self._check_today_in_changelog()
new_version = self._bump_version(release=True)
self._check_tag_does_not_exist(self._get_new_tag(new_version))
self._update_changelog(new_version)
self._update_package_metadata(new_version, release=True)