2012-09-05 15:45:44 +04:00
|
|
|
#!/usr/bin/python
|
|
|
|
'''
|
|
|
|
" Repodiff utility for finding differences between different repositories
|
|
|
|
"
|
|
|
|
" The tool downloads, unpacks and parses synthesis.hdlist.cz and
|
|
|
|
" changelog.xml.lzma to genererate lists of newly added packages,
|
|
|
|
" removed from new repository packages and updated packages.
|
|
|
|
" The tool outputs data to standart output or to file.
|
|
|
|
" It can show if a removed packages is obsoleted by some package
|
|
|
|
" in new repositories. Also the tool can output data in format of
|
|
|
|
" HTML table.
|
|
|
|
"
|
|
|
|
" REQUIREMENTS
|
|
|
|
" ============
|
|
|
|
" - urpmi
|
|
|
|
" - python-2.7
|
|
|
|
" - lzma
|
|
|
|
" - gzip
|
|
|
|
" - libxml2 python library
|
|
|
|
" - rpm python library
|
|
|
|
"
|
|
|
|
" Copyright (C) 2012 ROSA Laboratory.
|
|
|
|
" Written by Vladimir Testov <vladimir.testov@rosalab.ru>
|
|
|
|
"
|
|
|
|
" This program is free software: you can redistribute it and/or modify
|
|
|
|
" it under the terms of the GNU General Public License or the GNU Lesser
|
|
|
|
" General Public License as published by the Free Software Foundation,
|
|
|
|
" either version 2 of the Licenses, or (at your option) any later version.
|
|
|
|
"
|
|
|
|
" This program is distributed in the hope that it will be useful,
|
|
|
|
" but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
" GNU General Public License for more details.
|
|
|
|
"
|
|
|
|
" You should have received a copy of the GNU General Public License
|
|
|
|
" and the GNU Lesser General Public License along with this program.
|
|
|
|
" If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
'''
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import urllib
|
|
|
|
import tempfile
|
|
|
|
import os
|
|
|
|
import subprocess
|
|
|
|
import re
|
|
|
|
import libxml2
|
|
|
|
import sys
|
|
|
|
from datetime import date
|
|
|
|
import rpm
|
|
|
|
import shutil
|
|
|
|
import urllib2
|
|
|
|
import urpmmisc
|
|
|
|
|
|
|
|
import gettext
|
|
|
|
gettext.install('urpm-tools')
|
|
|
|
|
|
|
|
old_dir = "old"
|
|
|
|
new_dir = "new"
|
|
|
|
htmlname = "repodiff.html"
|
|
|
|
|
|
|
|
synthtags = ["provides", "requires", "obsoletes", "conflicts", "suggests",
|
|
|
|
"summary", "info"]
|
|
|
|
|
|
|
|
minus_check = re.compile('-')
|
|
|
|
re_search_unver = re.compile("([^\[\]]+)[\[\]]")
|
|
|
|
re_search_verrel = re.compile("\[(== |> |< |>= |<= )([\{\}+=0-9a-zA-Z_\.]*:)?([[\{\}+=0-9a-zA-Z_\.]+)(-[[\{\}+=0-9a-zA-Z_\.]+)?([^\[\]]*)\]$")
|
|
|
|
|
|
|
|
synthesis_arch = "synthesis.hdlist.cz"
|
|
|
|
synthesis_arch_renamed = "synthesis.hdlist.gz"
|
|
|
|
synthesis_file = "synthesis.hdlist"
|
|
|
|
changelog_arch = "changelog.xml.lzma"
|
|
|
|
changelog_file = "changelog.xml"
|
|
|
|
default_output = "sys.stdout"
|
|
|
|
timeout = 5
|
|
|
|
|
|
|
|
def ParseCommandLine():
|
|
|
|
"""Parse arguments.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Parse arguments from command line.
|
|
|
|
Return these arguments.
|
|
|
|
"""
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description=_("Tool for comparing sets of repositories."))
|
2012-09-21 12:33:45 +04:00
|
|
|
group = parser.add_argument_group(_('global parameters'),
|
|
|
|
description=_("Parameters used in all cases."))
|
|
|
|
group.add_argument("--old", "-o", action="append", nargs='+', required="True",
|
2012-09-05 15:45:44 +04:00
|
|
|
metavar="OLD_REPO", help=_("URL or PATH to old repositories"))
|
2012-09-21 12:33:45 +04:00
|
|
|
group.add_argument("--new", "-n", action="append", nargs='+', required="True",
|
2012-09-05 15:45:44 +04:00
|
|
|
metavar="NEW_REPO", help=_("URL or PATH to new repositories"))
|
2012-09-21 12:33:45 +04:00
|
|
|
group.add_argument("--quiet", "-q", action="store_false",
|
|
|
|
help=_("Hide service messages."))
|
|
|
|
group.add_argument("--no-release", "-r", action="store_true",
|
|
|
|
help=_("Ignore release during package compare."))
|
|
|
|
group.add_argument("--show-summary", action="store_true",
|
|
|
|
help=_("Output summary."))
|
|
|
|
group.add_argument("--output", "-out", action="store", nargs=1, default='',
|
|
|
|
metavar="OUTPUT_FILE", help=_("Change standart output to \"OUTPUT_FILE\"."))
|
|
|
|
group.add_argument("--ignore", "-i", action="store", nargs=1, default='',
|
|
|
|
metavar="IGNORELIST", help=_("File with list of ignored packages"))
|
|
|
|
group = parser.add_argument_group(_('text mode parameters'),
|
|
|
|
description=_("Parameters used only in text mode. (--html not present)"))
|
|
|
|
group.add_argument("--size", "-s", action="store_true",
|
2012-09-05 15:45:44 +04:00
|
|
|
help=_("Show differences in package sizes."))
|
2012-09-21 12:33:45 +04:00
|
|
|
group.add_argument("--simple", action="store_false",
|
2012-09-05 15:45:44 +04:00
|
|
|
help=_("Simple output format."))
|
2012-09-21 12:33:45 +04:00
|
|
|
group.add_argument("--changelog", "-c", action="store_true",
|
2012-09-05 15:45:44 +04:00
|
|
|
help=_("Show changelog difference."))
|
2012-09-21 12:33:45 +04:00
|
|
|
group = parser.add_argument_group(_('HTML mode parameters'),
|
|
|
|
description=_("Parameters used only in HTML mode. (--html is present)"))
|
|
|
|
group.add_argument("--html", action="store_true",
|
2012-09-05 15:45:44 +04:00
|
|
|
help=_("Output in HTML format, if --output is not present\
|
|
|
|
\"%s\" will be created in current directory. \
|
|
|
|
--size, --simple and --changelog options are ignored.") % htmlname)
|
2012-09-21 12:33:45 +04:00
|
|
|
group.add_argument("--reponames", action="store", nargs='+', default='',
|
|
|
|
metavar="REPONAME", help=_("Repository names for output."))
|
|
|
|
group = parser.add_argument_group(_('Filters'),
|
|
|
|
description=_("Filters for output. If none selected then every type will\
|
|
|
|
be shown"))
|
2012-09-13 23:34:04 +04:00
|
|
|
group.add_argument("--show-new", "-N", action="store_true",
|
|
|
|
help=_("Show new packages"))
|
|
|
|
group.add_argument("--show-removed", "-R", action="store_true",
|
|
|
|
help=_("Show removed packages"))
|
|
|
|
group.add_argument("--show-updated", "-U", action="store_true",
|
|
|
|
help=_("Show updated packages"))
|
|
|
|
group.add_argument("--show-downgraded", "-D", action="store_true",
|
|
|
|
help=_("Show downgraded packages"))
|
2012-09-05 15:45:44 +04:00
|
|
|
return parser.parse_args()
|
|
|
|
|
|
|
|
def exit_proc(arg):
|
|
|
|
"""
|
|
|
|
Remove trash.
|
|
|
|
"""
|
|
|
|
err_tmpdir = arg.temp_dir
|
|
|
|
err_output = arg.output
|
|
|
|
|
|
|
|
if err_output != None:
|
|
|
|
err_output.close()
|
|
|
|
if os.path.isdir(err_tmpdir):
|
|
|
|
shutil.rmtree(err_tmpdir)
|
|
|
|
exit(0)
|
|
|
|
|
|
|
|
def CheckURL(url, arg):
|
|
|
|
"""URL check.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Check that URL is gettable.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
urllib2.urlopen(url, None, timeout)
|
|
|
|
except:
|
|
|
|
print _("Error: URL to repository \"%s\" is incorrect") % url
|
|
|
|
exit_proc(arg)
|
|
|
|
|
|
|
|
def CheckArgs(urlpath, arg):
|
|
|
|
"""Trivial checks.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Check that url or path is correct.
|
|
|
|
"""
|
|
|
|
if (urlpath.startswith("http://") or urlpath.startswith("ftp://")):
|
|
|
|
if not urlpath.endswith('/'):
|
|
|
|
urlpath = urlpath + '/'
|
|
|
|
tmp_url = urlpath + "media_info/"
|
|
|
|
CheckURL(tmp_url, arg)
|
|
|
|
elif (os.path.isdir(urlpath)) or urlpath.startswith("file://"):
|
|
|
|
if urlpath.startswith("file://./"):
|
|
|
|
urlpath = urlpath[7:]
|
|
|
|
else:
|
|
|
|
urlpath = urlpath[6:]
|
|
|
|
if not urlpath.endswith('/'):
|
|
|
|
urlpath = urlpath + '/'
|
|
|
|
urlpath = urlpath + "media_info/"
|
|
|
|
if not os.path.isdir(urlpath):
|
|
|
|
print _("Error: directory %s does not exist") % urlpath
|
|
|
|
exit_proc(arg)
|
|
|
|
else:
|
|
|
|
(e1,e2,urltmp) = urpmmisc.GetUrlFromRepoName(urlpath)
|
|
|
|
if (urltmp):
|
|
|
|
if not urltmp.endswith('/'):
|
|
|
|
urltmp = urltmp + '/'
|
|
|
|
urlpath = urltmp + "media_info/"
|
|
|
|
CheckURL(urlpath, arg)
|
|
|
|
else:
|
|
|
|
print _("Error: \"%s\" is not correct url, path or name of repository") % urlpath
|
|
|
|
exit_proc(arg)
|
|
|
|
return urlpath
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
def CheckOutput(arg):
|
|
|
|
"""Check output file.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Check if the file can be created and redirect standart output to this file.
|
|
|
|
"""
|
|
|
|
file_output = arg.output
|
|
|
|
ifhtml = arg.html
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
if (file_output == default_output):
|
|
|
|
if(ifhtml):
|
|
|
|
try:
|
|
|
|
arg.output = open(htmlname, "w")
|
|
|
|
except:
|
|
|
|
print _("Error: Cannot open %s for writing.") % htmlname
|
|
|
|
exit_proc(arg)
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
arg.output = sys.stdout
|
|
|
|
return
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
if(file_output != ''):
|
|
|
|
if(os.path.isfile(file_output)):
|
|
|
|
print _("Error: File %s already exists") % file_output
|
|
|
|
arg.output = None
|
|
|
|
exit_proc(arg)
|
|
|
|
else:
|
|
|
|
dirname = os.path.dirname(file_output)
|
|
|
|
if(dirname == '') or (os.path.exists(dirname)):
|
|
|
|
try:
|
|
|
|
arg.output = open(file_output, "w")
|
|
|
|
except IOError:
|
|
|
|
print _("Error: File %s cannot be created") % file_output
|
|
|
|
arg.output = None
|
|
|
|
exit_proc(arg)
|
|
|
|
else:
|
|
|
|
print _("Error: Path %s does not exist.") % dirname
|
|
|
|
arg.output = None
|
|
|
|
exit_proc(arg)
|
|
|
|
|
|
|
|
def CheckParam(arg):
|
|
|
|
"""Check parameters.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
|
|
|
Ignore some parameters in HTML-mode. Ignore some parameters in text-mode.
|
2012-09-05 15:45:44 +04:00
|
|
|
"""
|
|
|
|
if arg.html:
|
|
|
|
arg.size = 0
|
|
|
|
arg.simple = 0
|
|
|
|
arg.changelog = 0
|
2012-09-21 12:33:45 +04:00
|
|
|
if (arg.reponames != '') and (len(arg.old) + len(arg.new) != len(arg.reponames)):
|
|
|
|
print _("Error: number of REPONAME's(%s) are not equal to number of groups(%s)") % \
|
|
|
|
(str(len(arg.reponames)), str(len(arg.old) + len(arg.new)))
|
|
|
|
exit_proc(arg)
|
|
|
|
else:
|
|
|
|
arg.repnames = ''
|
2012-09-05 15:45:44 +04:00
|
|
|
|
|
|
|
def GetFile(urlpath, filename, localdir, arg):
|
|
|
|
"""Donwload archive.
|
|
|
|
"""
|
|
|
|
ifnotquiet = arg.quiet
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
if not os.path.isdir(localdir):
|
|
|
|
os.makedirs(os.path.realpath(localdir))
|
|
|
|
if ifnotquiet:
|
|
|
|
print (_("getting file %s from ") % filename) + "\n " + urlpath + filename
|
|
|
|
if os.path.isdir(urlpath):
|
|
|
|
try:
|
|
|
|
shutil.copyfile(urlpath + filename, localdir + filename)
|
|
|
|
except:
|
|
|
|
print _("Error: file %s was not copied") % filename
|
|
|
|
exit_proc(arg)
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
file_from = urllib2.urlopen(urllib2.Request(urlpath + filename), None, timeout)
|
|
|
|
file_to = open(localdir + filename, "w")
|
|
|
|
shutil.copyfileobj(file_from, file_to)
|
|
|
|
except:
|
|
|
|
print _("Error: file %(from)s was not downloaded to %(to)s") %{"from": urlpath + filename, "to": localdir + filename}
|
|
|
|
exit_proc(arg)
|
|
|
|
file_from.close()
|
|
|
|
file_to.close()
|
|
|
|
|
|
|
|
def GetFiles(arg):
|
|
|
|
"""Get all needed files.
|
|
|
|
"""
|
|
|
|
ifchangelog = arg.changelog
|
|
|
|
file_dir = []
|
|
|
|
file_name = []
|
|
|
|
file_path = []
|
|
|
|
for i in range(len(arg.old)):
|
2012-09-21 12:33:45 +04:00
|
|
|
for j in range(len(arg.old[i])):
|
|
|
|
file_name.append(synthesis_arch)
|
|
|
|
file_dir.append(arg.temp_old[i][j])
|
|
|
|
file_path.append(arg.old[i][j] + "media_info/")
|
|
|
|
if ifchangelog:
|
|
|
|
file_name.append(changelog_arch)
|
|
|
|
file_dir.append(arg.temp_old[i][j])
|
|
|
|
file_path.append(arg.old[i][j] + "media_info/")
|
2012-09-05 15:45:44 +04:00
|
|
|
|
|
|
|
for i in range(len(arg.new)):
|
2012-09-21 12:33:45 +04:00
|
|
|
for j in range(len(arg.new[i])):
|
|
|
|
file_name.append(synthesis_arch)
|
|
|
|
file_dir.append(arg.temp_new[i][j])
|
|
|
|
file_path.append(arg.new[i][j] + "media_info/")
|
|
|
|
if ifchangelog:
|
|
|
|
file_name.append(changelog_arch)
|
|
|
|
file_dir.append(arg.temp_new[i][j])
|
|
|
|
file_path.append(arg.new[i][j] + "media_info/")
|
2012-09-05 15:45:44 +04:00
|
|
|
|
|
|
|
for i in range(len(file_name)):
|
|
|
|
GetFile(file_path[i], file_name[i], file_dir[i], arg)
|
2012-09-21 12:33:45 +04:00
|
|
|
|
|
|
|
def ReadIgnoreList(arg):
|
|
|
|
ignorefile = arg.ignore
|
|
|
|
ignorelist = []
|
|
|
|
if not os.path.isfile(ignorefile):
|
|
|
|
print _("Error: file %s does not exist.") % ignorefile
|
|
|
|
exit_proc(arg)
|
|
|
|
try:
|
|
|
|
ifile = open(ignorefile)
|
|
|
|
for string in ifile:
|
2012-09-21 14:39:22 +04:00
|
|
|
if string == '\n':
|
|
|
|
continue
|
|
|
|
if string.endswith('\n'):
|
|
|
|
string = string[:-1]
|
|
|
|
ignorelist.append(string)
|
2012-09-21 12:33:45 +04:00
|
|
|
ifile.close()
|
|
|
|
ignorelist.sort()
|
|
|
|
except:
|
|
|
|
print _("Error: file %s cannot be read.") % ignorefile
|
|
|
|
exit_proc(arg)
|
|
|
|
return ignorelist
|
|
|
|
|
2012-09-21 14:39:22 +04:00
|
|
|
def RemoveIgnored(dict_in, ignorelist):
|
|
|
|
dict_out = {}
|
|
|
|
sorted_list = sorted(dict_in)
|
|
|
|
i = 0
|
|
|
|
imax = len(sorted_list)
|
|
|
|
j = 0
|
|
|
|
jmax = len(ignorelist)
|
|
|
|
while (i < imax) and (j < jmax):
|
|
|
|
if ignorelist[j].endswith('*'):
|
|
|
|
comp_str = ignorelist[j][:-1]
|
|
|
|
while (i < imax) and (sorted_list[i] < comp_str):
|
|
|
|
dict_out[sorted_list[i]] = dict_in[sorted_list[i]]
|
|
|
|
i = i + 1
|
|
|
|
while (i < imax) and sorted_list[i].startswith(comp_str):
|
|
|
|
i = i + 1
|
|
|
|
else:
|
|
|
|
comp_str = ignorelist[j]
|
|
|
|
while (i < imax) and (sorted_list[i] < comp_str):
|
|
|
|
dict_out[sorted_list[i]] = dict_in[sorted_list[i]]
|
|
|
|
i = i + 1
|
|
|
|
if (i < imax) and (sorted_list[i] == comp_str):
|
|
|
|
i = i + 1
|
|
|
|
j = j + 1
|
|
|
|
if (i < imax) and (j == jmax):
|
|
|
|
for i in range(i, imax):
|
|
|
|
dict_out[sorted_list[i]] = dict_in[sorted_list[i]]
|
|
|
|
return dict_out
|
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
def RenameSynthFile(localdir, arg):
|
|
|
|
"""Rename.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Rename Synthesis file so zgip can understand format.
|
|
|
|
"""
|
|
|
|
ifnotquiet = arg.quiet
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
if not os.path.isfile(localdir + synthesis_arch):
|
|
|
|
print _("Error: file not found: ") + localdir + synthesis_arch
|
|
|
|
exit_proc(arg)
|
|
|
|
try:
|
|
|
|
os.rename(localdir + synthesis_arch, localdir + synthesis_arch_renamed)
|
|
|
|
except OSError:
|
|
|
|
print _("Error: cannot rename file %(from)s to %(to)s") % {"from": synthesis_arch, "to": synthesis_arch_renamed}
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
exit_proc(arg)
|
|
|
|
if not os.path.isfile(localdir + synthesis_arch_renamed):
|
|
|
|
print _("Error: file %s is missing.") % (localdir + synthesis_arch_renamed)
|
|
|
|
exit_proc(arg)
|
|
|
|
else:
|
|
|
|
if ifnotquiet:
|
|
|
|
print _("file %(from)s was renamed to %(to)s") % {"from": synthesis_arch, "to": synthesis_arch_renamed}
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
def UnpackFiles(files_dir, ifchangelog, ifnotquiet):
|
|
|
|
"""Unpack.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Unpack needed files in selected directory.
|
|
|
|
"""
|
|
|
|
if ifchangelog:
|
|
|
|
if ifnotquiet:
|
|
|
|
print _("unpacking file ") + changelog_arch
|
|
|
|
subprocess.call(["lzma", "-df", files_dir + changelog_arch])
|
|
|
|
if ifnotquiet:
|
|
|
|
print _("unpacking file ") + synthesis_arch_renamed
|
|
|
|
subprocess.call(["gzip", "-df", files_dir + synthesis_arch_renamed])
|
|
|
|
|
|
|
|
def ParseVersion(names_list):
|
|
|
|
"""Parse version info is present.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Parse version information from the field. e.g. provided_name[>= 1.2.3-4.5.6]
|
|
|
|
is parsed to (provided_name, sign, (epoch, version, release))
|
|
|
|
"""
|
|
|
|
new_names_list = []
|
|
|
|
for name in names_list:
|
|
|
|
match = re_search_unver.match(name)
|
|
|
|
if match:
|
|
|
|
tmp_entry = match.group(1)
|
|
|
|
else:
|
|
|
|
tmp_entry = name
|
|
|
|
match = re_search_verrel.search(name)
|
|
|
|
if match:
|
|
|
|
sign = match.group(1)[:-1]
|
|
|
|
epoch = match.group(2)
|
|
|
|
if epoch:
|
|
|
|
epoch = epoch[:-1]
|
|
|
|
else:
|
|
|
|
epoch = ''
|
|
|
|
version = match.group(3)
|
|
|
|
release = match.group(4)
|
|
|
|
if release:
|
|
|
|
release = release[1:]
|
|
|
|
else:
|
|
|
|
release = ''
|
|
|
|
verrel = (epoch, version, release)
|
|
|
|
else:
|
|
|
|
sign = ''
|
|
|
|
verrel = ('','','')
|
|
|
|
new_names_list.append((tmp_entry, sign, verrel))
|
|
|
|
return new_names_list
|
|
|
|
|
|
|
|
def ParseSynthesis(synthfile, pkgdict, arg):
|
|
|
|
"""Collect info about packages.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Parse synthesis.hdlist file (or add new entries to pkgdict).
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
pkgdict is a dictionary with format:
|
|
|
|
pkgdict[name]=(verrel,(s0,s1,s2))
|
|
|
|
where:
|
|
|
|
name - is package name parsed from package filename
|
|
|
|
verrel - is tuple (version, release, epoch)
|
|
|
|
s0[] - is package info
|
|
|
|
s1 - is package summary
|
|
|
|
s2[] - is list of obsoleted packages
|
|
|
|
"""
|
|
|
|
ifnotquiet = arg.quiet
|
2012-09-13 19:03:17 +04:00
|
|
|
ifreleaseignore = arg.no_release
|
2012-09-05 15:45:44 +04:00
|
|
|
|
|
|
|
if not os.path.isfile(synthfile):
|
|
|
|
print _("Error: Synthesis file %s was not found.") % synthfile
|
|
|
|
exit_proc(arg)
|
|
|
|
if ifnotquiet:
|
|
|
|
print _("Parsing synthesis")
|
|
|
|
try:
|
|
|
|
synth = open(synthfile)
|
|
|
|
tmp = ['', '', '']
|
|
|
|
for synthline in synth:
|
2012-09-13 17:39:13 +04:00
|
|
|
if not synthline.startswith('@'):
|
|
|
|
continue
|
2012-09-05 15:45:44 +04:00
|
|
|
if synthline.endswith('\n'):
|
|
|
|
synthline = synthline[:-1]
|
|
|
|
tmpline = synthline.split('@')
|
|
|
|
tag = tmpline[1]
|
|
|
|
if tag == synthtags[2]:
|
|
|
|
tmp[2] = tmpline[2:]
|
|
|
|
elif tag == synthtags[5]:
|
|
|
|
tmp[1] = '@'.join(tmpline[2:])
|
|
|
|
elif tag == synthtags[6]:
|
|
|
|
tmp[0] = tmpline[2:]
|
2012-09-13 19:03:17 +04:00
|
|
|
disttagepoch = ChkTagEpoch(tmp[0]) #disttag + distepoch
|
2012-09-05 15:45:44 +04:00
|
|
|
tmp[2] = ParseVersion(tmp[2])
|
2012-09-13 19:03:17 +04:00
|
|
|
(name, version, release) = RPMNameFilter(tmp[0][0],
|
|
|
|
disttagepoch, ifreleaseignore)
|
2012-09-05 15:45:44 +04:00
|
|
|
verrel = (version, release, tmp[0][1])
|
|
|
|
if(not name in pkgdict):
|
|
|
|
pkgdict[name]=(verrel, (tmp[0], tmp[1], tmp[2]))
|
|
|
|
elif(compare_versions(pkgdict[name][0], verrel) == -1):
|
|
|
|
pkgdict[name]=(verrel, (tmp[0], tmp[1], tmp[2]))
|
|
|
|
tmp = ['', '', '']
|
|
|
|
synth.close()
|
|
|
|
except IOError:
|
|
|
|
print _("Error: Failed to open synthesis file ") + synthfile
|
|
|
|
exit_proc(arg)
|
|
|
|
|
|
|
|
def ChkDist(disttag, distepoch):
|
|
|
|
"""No minus in tag and epoch.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Trivial check that tag and epoch hasn't got '-' in their name
|
|
|
|
"""
|
|
|
|
if minus_check.search(disttag) or minus_check.search(distepoch):
|
|
|
|
print _("REPODIFF-Warning: strange format of <disttag> or <distepoch>: ") +\
|
|
|
|
disttag + distepoch
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
def ChkTagEpoch(i):
|
|
|
|
"""No minus in tag and epoch.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Trivial check that tag and epoch hasn't got '-' in their name
|
|
|
|
"""
|
|
|
|
if len(i) == 4:
|
|
|
|
return '-'
|
|
|
|
elif len(i) == 5:
|
|
|
|
disttag = i[4]
|
|
|
|
distepoch = ''
|
|
|
|
ChkDist(disttag, distepoch)
|
|
|
|
return disttag + distepoch
|
|
|
|
elif len(i) == 6:
|
|
|
|
disttag = i[4]
|
|
|
|
distepoch = i[5]
|
|
|
|
ChkDist(disttag, distepoch)
|
|
|
|
return disttag + distepoch
|
|
|
|
else:
|
|
|
|
print _("REPODIFF-Warning: strange <info>: ") + str(i)
|
|
|
|
|
2012-09-13 19:03:17 +04:00
|
|
|
def RPMNameFilter(rpmname, disttagepoch, ifreleaseignore):
|
2012-09-05 15:45:44 +04:00
|
|
|
"""Parse name and verrel.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Function that parses name, version and release of a package.
|
|
|
|
"""
|
|
|
|
string = rpmname.split('-')
|
|
|
|
lastpart = string.pop()
|
|
|
|
tmp = lastpart.split('.')
|
2012-09-13 18:20:04 +04:00
|
|
|
issrc = (tmp.pop() == "src")
|
2012-09-21 12:33:45 +04:00
|
|
|
ismageia = 0
|
2012-09-13 18:20:04 +04:00
|
|
|
if tmp[-1].startswith("mga"):
|
|
|
|
tmp.pop()
|
2012-09-21 12:33:45 +04:00
|
|
|
ismageia = 1
|
2012-09-05 15:45:44 +04:00
|
|
|
lastpart = '.'.join(tmp)
|
|
|
|
if (lastpart[0].isdigit() or (not lastpart.startswith(disttagepoch))) and\
|
2012-09-21 12:33:45 +04:00
|
|
|
((not lastpart.isdigit()) or issrc or ismageia):
|
2012-09-05 15:45:44 +04:00
|
|
|
name = '-'.join(string[:-1])
|
|
|
|
ver = string[-1]
|
|
|
|
rel = lastpart
|
|
|
|
else:
|
|
|
|
name = '-'.join(string[:-2])
|
|
|
|
ver = string[-2]
|
|
|
|
rel = string[-1]
|
2012-09-13 19:03:17 +04:00
|
|
|
if ifreleaseignore:
|
|
|
|
rel = ""
|
2012-09-05 15:45:44 +04:00
|
|
|
return (name, ver, rel)
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
def compare_versions(first_entry, second_entry):
|
|
|
|
"""Compare two verrel tuples.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
dict_entry and comp_entry are verrel tuples
|
|
|
|
verrel = (version, release, epoch).
|
|
|
|
Return 1 if the first argument is higher.
|
|
|
|
0 if they are equivalent.
|
|
|
|
-1 if the second argument is higher.
|
|
|
|
"""
|
|
|
|
(version1, release1, first_epoch) = first_entry
|
|
|
|
(version2, release2, second_epoch) = second_entry
|
|
|
|
return(rpm.labelCompare((first_epoch, version1, release1),
|
|
|
|
(second_epoch, version2, release2)))
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
def ParsePackage(arg):
|
|
|
|
"""Processing files, parsing synthesis, getting pkgdict.
|
|
|
|
|
|
|
|
pkgdict is a dictionary with format:
|
|
|
|
pkgdict[name]=(verrel,(s0,s1,s2))
|
|
|
|
where:
|
|
|
|
name - is package name parsed from package filename
|
|
|
|
verrel - is tuple (version, release, epoch)
|
|
|
|
s0[] - is package info
|
|
|
|
s1 - is package summary
|
|
|
|
s2[] - is list of obsoleted packages
|
|
|
|
"""
|
|
|
|
ifchangelog = arg.changelog
|
|
|
|
ifnotquiet = arg.quiet
|
2012-09-21 14:39:22 +04:00
|
|
|
ignorelist = arg.ignorelist
|
2012-09-05 15:45:44 +04:00
|
|
|
pkgdict_old = {}
|
2012-09-21 12:33:45 +04:00
|
|
|
for tmp_list in arg.temp_old:
|
|
|
|
for directory in tmp_list:
|
|
|
|
RenameSynthFile(directory, arg)
|
|
|
|
UnpackFiles(directory, ifchangelog, ifnotquiet)
|
|
|
|
ParseSynthesis(directory + synthesis_file, pkgdict_old, arg)
|
2012-09-05 15:45:44 +04:00
|
|
|
pkgdict_new = {}
|
2012-09-21 12:33:45 +04:00
|
|
|
for tmp_list in arg.temp_new:
|
|
|
|
for directory in tmp_list:
|
|
|
|
RenameSynthFile(directory, arg)
|
|
|
|
UnpackFiles(directory, ifchangelog, ifnotquiet)
|
|
|
|
ParseSynthesis(directory + synthesis_file, pkgdict_new, arg)
|
2012-09-21 14:39:22 +04:00
|
|
|
pkgdict_old = RemoveIgnored(pkgdict_old, ignorelist)
|
|
|
|
pdkdict_new = RemoveIgnored(pkgdict_new, ignorelist)
|
|
|
|
ignorelist = ""
|
|
|
|
arg.ignorelist = ""
|
2012-09-05 15:45:44 +04:00
|
|
|
return pkgdict_old, pkgdict_new
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
def CreateDicts(dict_old, dict_new):
|
|
|
|
"""Creating dictionaries.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Creating dictionaries for new, updated and removed(deleted) packages
|
|
|
|
from two dictionaries: old and new, for old and new repositories.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
dict_old, dict_new are dictionaries with format:
|
|
|
|
pkgdict[name]=(verrel,(s0,s1,s2))
|
|
|
|
where:
|
|
|
|
name - is package name parsed from package filename
|
|
|
|
verrel - is tuple (version, release, epoch)
|
|
|
|
s0[] - is package info
|
|
|
|
s1 - is package summary
|
|
|
|
s2[] - is list of obsoleted packages
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
dict_new_packages and dict_del_packages have the same format.
|
|
|
|
dict_upd_packages has format:
|
|
|
|
dict_upd_packages[name]=((verrel_old,(so0,so1,so2)),
|
|
|
|
(verrel_new,(sn0,sn1,sn2)),ifdowngraded)
|
|
|
|
or
|
|
|
|
dict_upd_packages[name]=(dict_old[name],dict_new[name],ifdowngraded)
|
|
|
|
"""
|
|
|
|
dict_new_packages = {}
|
|
|
|
dict_del_packages = {}
|
|
|
|
dict_upd_packages = {}
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
for name in dict_new:
|
|
|
|
if(name in dict_old): #updated or downgraded
|
|
|
|
compare_result = compare_versions(dict_new[name][0],
|
|
|
|
dict_old[name][0])
|
|
|
|
if(compare_result > 0): #updated
|
|
|
|
dict_upd_packages[name] = (dict_old[name], dict_new[name], 0)
|
|
|
|
elif(compare_result < 0): #downgraded ?
|
|
|
|
dict_upd_packages[name] = (dict_old[name], dict_new[name], 1)
|
|
|
|
else: #new
|
|
|
|
dict_new_packages[name] = dict_new[name]
|
|
|
|
for name in dict_old:
|
|
|
|
if(not name in dict_new): #removed
|
|
|
|
dict_del_packages[name] = dict_old[name]
|
|
|
|
return (dict_new_packages, dict_del_packages, dict_upd_packages)
|
|
|
|
|
|
|
|
def ProcessNewPackages(dict_new_packages, file_output):
|
|
|
|
"""Processing newly added packages.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
dict_new_packages[name]=(verrel,(s0,s1,s2))
|
|
|
|
where:
|
|
|
|
name - is package name parsed from package filename
|
|
|
|
verrel - is tuple (version, release, epoch)
|
|
|
|
s0[] - is package info
|
|
|
|
s1 - is package summary
|
|
|
|
s2[] - is list of obsoleted packages
|
|
|
|
"""
|
|
|
|
sorted_list = sorted(dict_new_packages)
|
|
|
|
for name in sorted_list:
|
|
|
|
file_output.write(_("New package: ") + dict_new_packages[name][1][0][0] +\
|
|
|
|
"\n " + dict_new_packages[name][1][1] + "\n\n")
|
|
|
|
|
|
|
|
def GenerateDictObsoleted(dict_new, ifnotquiet):
|
|
|
|
"""Generate Dictionary of obsoleted packages.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
pkgdict[name]=(verrel,(s0,s1,s2))
|
|
|
|
where:
|
|
|
|
name - is package name parsed from package filename
|
|
|
|
verrel - is tuple (version, release, epoch)
|
|
|
|
s0[] - package info
|
|
|
|
s1 - package summary
|
|
|
|
s2[] - list of packages obsoleted by current package
|
|
|
|
"""
|
|
|
|
if ifnotquiet:
|
|
|
|
print _("Generating obsoleted list.")
|
|
|
|
obsoleted_by = {}
|
|
|
|
for name in dict_new:
|
|
|
|
for (obsolete, sign, verrel) in dict_new[name][1][2]:
|
|
|
|
if(not obsolete in obsoleted_by):
|
|
|
|
obsoleted_by[obsolete] = []
|
|
|
|
obsoleted_by[obsolete].append((dict_new[name][1][0][0], sign, verrel))
|
|
|
|
return obsoleted_by
|
|
|
|
|
|
|
|
def compare_verrel(verrel1, sign, verrel2):
|
|
|
|
if (sign == ''):
|
|
|
|
return 1
|
|
|
|
(e1, v1, r1) = verrel1
|
|
|
|
(e2, v2, r2) = verrel2
|
|
|
|
# checks
|
|
|
|
if (v2 == '') or (v1 == ''):
|
|
|
|
return 1
|
|
|
|
if (e1 == '') or (e2 == ''):
|
|
|
|
e1 = '0'
|
|
|
|
e2 = '0'
|
|
|
|
if (r1 == '') or (r2 == ''):
|
|
|
|
r1 = '0'
|
|
|
|
r2 = '0'
|
|
|
|
# compare
|
|
|
|
compare = rpm.labelCompare((e1, v1, r1), (e2, v2, r2))
|
|
|
|
if (sign == "=="):
|
|
|
|
if (compare == 0):
|
|
|
|
return 1
|
|
|
|
elif (sign == ">"):
|
|
|
|
if (compare == 1):
|
|
|
|
return 1
|
|
|
|
elif (sign == "<"):
|
|
|
|
if (compare == -1):
|
|
|
|
return 1
|
|
|
|
elif (sign == ">="):
|
|
|
|
if (compare > -1):
|
|
|
|
return 1
|
|
|
|
elif (sign == "<="):
|
|
|
|
if (compare < 1):
|
|
|
|
return 1
|
|
|
|
return 0
|
|
|
|
|
|
|
|
def ProcessDelPackages(dict_del_packages, dict_obsoleted, file_output):
|
|
|
|
"""Process deleted packages.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Printing every deleted package. Show if package is obsoleted.
|
|
|
|
pkgdict[name]=(verrel,(s0,s1,s2))
|
|
|
|
where:
|
|
|
|
name - is package name parsed from package filename
|
|
|
|
verrel - is tuple (version, release, epoch)
|
|
|
|
s0[] - is package info
|
|
|
|
s1 - is package summary
|
|
|
|
s2[] - is list of obsoleted packages
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
dict_obsoleted is dictionary
|
|
|
|
dict_obsoleted[name]=[obs1, ...]
|
|
|
|
"""
|
|
|
|
sorted_list = sorted(dict_del_packages)
|
|
|
|
for name in sorted_list:
|
|
|
|
file_output.write(_("Removed package: ") + dict_del_packages[name][1][0][0] + '\n')
|
|
|
|
if (name in dict_obsoleted):
|
|
|
|
tmp_list = []
|
|
|
|
for (obsolete, sign, verrel) in dict_obsoleted[name]:
|
|
|
|
if (compare_verrel(dict_del_packages[name][0], sign, verrel)):
|
|
|
|
tmp_list.append(obsolete)
|
|
|
|
sorted_obsolete = sorted(tmp_list)
|
|
|
|
for obs_package_name in sorted_obsolete:
|
|
|
|
file_output.write(_(" Obsoleted by ") + obs_package_name + '\n')
|
|
|
|
|
|
|
|
def ParseLogfile(dict_log, logfile, dict_upd_packages, mode, arg):
|
|
|
|
"""Parse Changelog.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
mode == 0 - for old changelog: we search only for 1st entry in changelog
|
|
|
|
mode == 1 - for new changelog: we collect entries from changelog untill
|
|
|
|
we find remembered entry from changelog
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Parse changelog.xml to compare changes between updated packages.
|
|
|
|
dict_log - is dictionary with format:
|
|
|
|
dict_log[name] =
|
|
|
|
[(verrel, (time,name,text)), (verrel,[(time,name,text),...])]
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
dict_upd_packages[name] = [old_pkg[name],new_pkg[name],ifdowngraded]
|
|
|
|
or dict_upd_packages[name] =
|
|
|
|
[(verler,(s0,s1,s2)),(verrel,(s0,s1,s2)),ifdowngraded]
|
|
|
|
"""
|
|
|
|
ifnotquiet = arg.quiet
|
2012-09-13 23:34:04 +04:00
|
|
|
ifreleaseignore = arg.no_release
|
2012-09-05 15:45:44 +04:00
|
|
|
|
|
|
|
if ifnotquiet:
|
|
|
|
print _("Reading changelog")
|
|
|
|
if not os.path.isfile(logfile):
|
|
|
|
print _("Error: Can't find changelog ") + logfile
|
|
|
|
exit_proc(arg)
|
|
|
|
doc = libxml2.parseFile(logfile)
|
|
|
|
if (not doc):
|
|
|
|
print _("Error: Can't read changelog ") + logfile + "."
|
|
|
|
exit_proc(arg)
|
|
|
|
root = doc.children
|
|
|
|
if root.name != "media_info":
|
|
|
|
print _("Error: Wrong changelog.")
|
|
|
|
doc.freeDoc()
|
|
|
|
exit_proc(arg)
|
|
|
|
tag_changelog = root.children
|
|
|
|
while(tag_changelog):
|
|
|
|
if(tag_changelog.name != "changelogs"):
|
|
|
|
tag_changelog = tag_changelog.next
|
|
|
|
continue
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
tag_property = tag_changelog.properties
|
|
|
|
pkgname = ''
|
|
|
|
disttag = ''
|
|
|
|
distepoch = ''
|
|
|
|
while(tag_property):
|
|
|
|
if (tag_property.name == "fn"):
|
|
|
|
pkgname = tag_property.content
|
|
|
|
elif (tag_property.name == "disttag"):
|
|
|
|
disttag = tag_property.content
|
|
|
|
elif (tag_property.name == "distepoch"):
|
|
|
|
distepoch = tag_property.content
|
|
|
|
tag_property = tag_property.next
|
|
|
|
if (pkgname == ''):
|
|
|
|
print _("Error: Corrupted changelog")
|
|
|
|
doc.freeDoc()
|
|
|
|
exit_proc(arg)
|
|
|
|
disttagepoch = disttag + distepoch
|
|
|
|
if (disttagepoch == ''):
|
|
|
|
disttagepoch = '-'
|
2012-09-13 23:34:04 +04:00
|
|
|
(result_key, version, release) = RPMNameFilter(pkgname, disttagepoch, ifreleaseignore)
|
2012-09-05 15:45:44 +04:00
|
|
|
verrel = (version, release, "-1")
|
|
|
|
# skip entry if it wasn't updated
|
|
|
|
if result_key not in dict_upd_packages:
|
|
|
|
tag_changelog = tag_changelog.next
|
|
|
|
continue
|
|
|
|
ifdowngraded = dict_upd_packages[result_key][2]
|
|
|
|
# skip entry if it's name is not in dictionary
|
|
|
|
if(dict_upd_packages[result_key][mode][1][0][0] != pkgname):
|
|
|
|
tag_changelog = tag_changelog.next
|
|
|
|
continue
|
|
|
|
# skip entry if it has been found already with appropriate version
|
|
|
|
if(result_key in dict_log) and (dict_log[result_key][mode]):
|
|
|
|
tag_changelog = tag_changelog.next
|
|
|
|
continue
|
2012-09-13 23:34:04 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
log_current = tag_changelog.children
|
|
|
|
result_changelog = []
|
|
|
|
while(log_current):
|
|
|
|
if(log_current.name != "log"):
|
|
|
|
log_current = log_current.next
|
|
|
|
continue
|
2012-09-13 23:34:04 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
if(log_current.properties.name == "time"):
|
|
|
|
entry_time = log_current.properties.content
|
|
|
|
else:
|
|
|
|
entry_time = 0
|
2012-09-13 23:34:04 +04:00
|
|
|
|
|
|
|
if(mode == 1) and (not ifdowngraded) and\
|
|
|
|
(result_key in dict_log) and\
|
2012-09-05 15:45:44 +04:00
|
|
|
(entry_time <= dict_log[result_key][0][1][0]):
|
|
|
|
break
|
|
|
|
log_child = log_current.children
|
|
|
|
while(log_child):
|
|
|
|
if(log_child.name == "log_name"):
|
|
|
|
entry_name = log_child.content
|
|
|
|
elif(log_child.name == "log_text"):
|
|
|
|
entry_text = log_child.content
|
|
|
|
log_child = log_child.next
|
|
|
|
result_changelog.append((entry_time, entry_name, entry_text))
|
2012-09-13 23:34:04 +04:00
|
|
|
# if "old" repository do not have changelog of the package
|
|
|
|
if(mode == 1) and (not result_key in dict_log):
|
|
|
|
dict_log[result_key] = []
|
|
|
|
dict_log[result_key].append([])
|
|
|
|
dict_log[result_key].append([])
|
|
|
|
dict_log[result_key][0] = (verrel, [])
|
|
|
|
if not ifdowngraded:
|
|
|
|
dict_log[result_key][0] = (verrel, result_changelog[0])
|
|
|
|
else:
|
|
|
|
dict_log[result_key][0] = (verrel, result_changelog)
|
2012-09-05 15:45:44 +04:00
|
|
|
if(mode == ifdowngraded):
|
|
|
|
break
|
|
|
|
log_current = log_current.next
|
|
|
|
if(mode == 0):
|
|
|
|
dict_log[result_key] = []
|
|
|
|
dict_log[result_key].append([])
|
|
|
|
dict_log[result_key].append([])
|
|
|
|
if not ifdowngraded:
|
|
|
|
dict_log[result_key][0] = (verrel, result_changelog[0])
|
|
|
|
else:
|
|
|
|
dict_log[result_key][0] = (verrel, result_changelog)
|
|
|
|
else:
|
|
|
|
if not ifdowngraded:
|
|
|
|
dict_log[result_key][1] = (verrel, result_changelog)
|
|
|
|
else: #special actions for downgraded packages
|
|
|
|
new_result = []
|
|
|
|
time_to_stop = result_changelog[0][0]
|
|
|
|
tmp_change = dict_log[result_key][0][1] #changelog list
|
|
|
|
if tmp_change: #changelog is not empty
|
|
|
|
i = 0
|
|
|
|
length = len(tmp_change)
|
|
|
|
while i < length:
|
|
|
|
if tmp_change[i][0] <= time_to_stop:
|
|
|
|
i = i + 1
|
|
|
|
break
|
|
|
|
new_result.append(tmp_change[i])
|
|
|
|
i = i + 1
|
|
|
|
dict_log[result_key][1] = (verrel, new_result)
|
|
|
|
tag_changelog = tag_changelog.next
|
|
|
|
doc.freeDoc()
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
def GenerateLogfileDiff(dict_upd_packages, arg):
|
|
|
|
"""Changelog difference list.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Generate changelog difference list.
|
|
|
|
dict_upd_packages[name] = [old_pkg[name],new_pkg[name],ifdowngraded]
|
|
|
|
or dict_upd_packages[name] = [(verler,(s0,s1,s2)),(verrel,(s0,s1,s2)),ifdowngraded]
|
|
|
|
"""
|
|
|
|
ifnotquiet = arg.quiet
|
|
|
|
temp_old = arg.temp_old
|
|
|
|
temp_new = arg.temp_new
|
|
|
|
|
|
|
|
if ifnotquiet:
|
|
|
|
print _("Generating changes list.")
|
|
|
|
dict_logfile_diff = {}
|
|
|
|
dict_log = {}
|
2012-09-21 12:33:45 +04:00
|
|
|
|
|
|
|
for i in temp_old:
|
|
|
|
for old_dir in temp_old[i]:
|
|
|
|
ParseLogfile(dict_log, old_dir + changelog_file, dict_upd_packages, 0, arg)
|
|
|
|
for i in temp_new:
|
|
|
|
for new_dir in temp_new[i]:
|
|
|
|
ParseLogfile(dict_log, new_dir + changelog_file, dict_upd_packages, 1, arg)
|
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
for name in dict_upd_packages:
|
|
|
|
if(name in dict_log):
|
|
|
|
if dict_log[name][1]:
|
|
|
|
entry = dict_log[name][1][1]
|
|
|
|
else:
|
|
|
|
print _("REPODIFF-Warning: Package %s was not described in changelogs.xml") % name
|
|
|
|
entry = [(0, '', _("REPODIFF-Warning: Changelogs of a package are absent in \"new\" repository."))]
|
|
|
|
else:
|
|
|
|
print _("REPODIFF-Warning: Package %s was not described in changelogs.xml") % name
|
|
|
|
entry = [(0, '', _("REPODIFF-Warning: Changelogs of a package are absent."))]
|
|
|
|
dict_logfile_diff[name] = entry
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
return dict_logfile_diff
|
|
|
|
|
|
|
|
def ChangelogPrint(changes_list, file_output):
|
|
|
|
"""Changelog difference.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Output changes in changelog.
|
|
|
|
changes_list is list with format:
|
|
|
|
changes_list = [(time,author,text)]
|
|
|
|
"""
|
2012-09-13 23:34:04 +04:00
|
|
|
if len(changes_list) > 0:
|
|
|
|
for entry in changes_list:
|
|
|
|
file_output.write("* " + str(date.fromtimestamp(float(entry[0]))) +\
|
|
|
|
" " + entry[1] + '\n' + entry[2] + '\n\n')
|
|
|
|
else:
|
|
|
|
file_output.write('\n')
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
def PrintLogfileDiff(package_name, dict_logfile_diff, file_output):
|
|
|
|
"""Changelog difference.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Output changes in changelog.
|
|
|
|
dict_logfile_diff is dictionary with format:
|
|
|
|
dict_logfile_diff[name] = [(time,author,text)]
|
|
|
|
"""
|
|
|
|
if package_name in dict_logfile_diff:
|
|
|
|
ChangelogPrint(dict_logfile_diff[package_name], file_output)
|
|
|
|
else:
|
|
|
|
file_output.write(_("Package %s has no changelog info\n") % package_name)
|
|
|
|
|
|
|
|
def ProcessUpdPackages(dict_upd_packages, dict_logfile_diff, arg):
|
|
|
|
"""Process updated packages.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
ifsizes - is indicator: should we (1) or should we not (0) print
|
|
|
|
difference in package sizes.
|
|
|
|
ifnotsimple - is indicator: should we (0) or shoudl we not (1) print
|
|
|
|
difference in changelogs.
|
|
|
|
Process updated packages and output everything needed info.
|
|
|
|
dict_upd_packages[name] = [old_pkg[name],new_pkg[name],ifdowngraded]
|
|
|
|
or dict_upd_packages[name] = [(verler,(s0,s1,s2)),(verrel,(s0,s1,s2)),ifdowngraded]
|
|
|
|
"""
|
|
|
|
ifnotsimple = arg.simple
|
|
|
|
file_output = arg.output
|
|
|
|
ifchangelog = arg.changelog
|
|
|
|
ifsizes = arg.size
|
2012-09-13 23:34:04 +04:00
|
|
|
ifup = arg.show_updated
|
|
|
|
ifdown = arg.show_downgraded
|
2012-09-05 15:45:44 +04:00
|
|
|
|
|
|
|
file_output.write(_("\n\nUpdated packages:\n\n"))
|
|
|
|
sorted_list = sorted(dict_upd_packages)
|
|
|
|
for name in sorted_list:
|
|
|
|
package = dict_upd_packages[name][1][1][0][0]
|
|
|
|
if ifnotsimple:
|
2012-09-13 23:34:04 +04:00
|
|
|
if dict_upd_packages[name][2]:
|
|
|
|
if ifdown:
|
|
|
|
file_output.write(package + '\n' + '-'*len(package) + '\n')
|
|
|
|
file_output.write(_(" ***DOWNGRADED***\n"))
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
if ifup:
|
|
|
|
file_output.write(package + '\n' + '-'*len(package) + '\n')
|
|
|
|
else:
|
|
|
|
continue
|
2012-09-05 15:45:44 +04:00
|
|
|
if ifchangelog:
|
|
|
|
PrintLogfileDiff(name, dict_logfile_diff, file_output)
|
2012-09-13 23:34:04 +04:00
|
|
|
else:
|
|
|
|
file_output.write('\n')
|
2012-09-05 15:45:44 +04:00
|
|
|
else:
|
|
|
|
old_package = dict_upd_packages[name][0][1][0][0]
|
|
|
|
file_output.write(name + ": " + old_package + " -> " + package + '\n')
|
|
|
|
if(ifsizes):
|
|
|
|
sizediff = int(dict_upd_packages[name][1][1][0][2]) - \
|
|
|
|
int(dict_upd_packages[name][0][1][0][2])
|
|
|
|
file_output.write(_("Size Change: %d bytes\n\n") % sizediff)
|
|
|
|
|
2012-09-21 12:33:45 +04:00
|
|
|
def PrintSummary(dict_new_packages, dict_del_packages, dict_upd_packages, arg):
|
2012-09-05 15:45:44 +04:00
|
|
|
"""Output summary.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Output summary: numbers of new/removew/updated packages at all.
|
|
|
|
"""
|
2012-09-21 12:33:45 +04:00
|
|
|
file_output = arg.output
|
|
|
|
ifhtml = arg.html
|
|
|
|
|
|
|
|
if ifhtml:
|
|
|
|
endstr = '<br />'
|
|
|
|
else:
|
|
|
|
endstr = '\n'
|
|
|
|
|
|
|
|
tmp_str = _("Summary:")
|
|
|
|
if ifhtml:
|
|
|
|
tmp_str = '<p class="bold">' + tmp_str + '</p>'
|
|
|
|
else:
|
|
|
|
tmp_str = tmp_str + endstr
|
|
|
|
if arg.show_new:
|
|
|
|
length = len(dict_new_packages)
|
|
|
|
if length:
|
|
|
|
tmp_str = tmp_str + (_(" Total added packages: ") + str(length) + endstr)
|
|
|
|
if arg.show_removed:
|
|
|
|
length = len(dict_del_packages)
|
|
|
|
if length:
|
|
|
|
tmp_str = tmp_str + (_(" Total removed packages: ") + str(length) + endstr)
|
|
|
|
if arg.show_updated or arg.show_downgraded:
|
|
|
|
length = 0
|
|
|
|
length_d = 0
|
|
|
|
for packagename in dict_upd_packages:
|
|
|
|
if dict_upd_packages[packagename][2] == 0:
|
|
|
|
length = length + 1
|
|
|
|
else:
|
|
|
|
length_d = length_d + 1
|
|
|
|
if arg.show_updated:
|
|
|
|
if length:
|
|
|
|
tmp_str = tmp_str + (_(" Total updated packages: ") + str(length) + endstr)
|
|
|
|
if arg.show_downgraded:
|
|
|
|
if length_d:
|
|
|
|
tmp_str = tmp_str + (_(" Total downgraded packages: ") + str(length_d) + endstr)
|
|
|
|
|
|
|
|
if ifhtml:
|
|
|
|
return tmp_str
|
|
|
|
else:
|
|
|
|
file_ouput.write(tmp_str)
|
2012-09-05 15:45:44 +04:00
|
|
|
|
|
|
|
def HTML_ParsePackage(arg):
|
|
|
|
"""Parse hdlist.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
HTML-specific ParsePackage(). Calls for ParsePackage
|
|
|
|
"""
|
|
|
|
ifchangelog = arg.changelog
|
|
|
|
ifnotquiet = arg.quiet
|
2012-09-21 14:39:22 +04:00
|
|
|
ignorelist = arg.ignorelist
|
2012-09-05 15:45:44 +04:00
|
|
|
|
|
|
|
html_old_dict_list = []
|
|
|
|
html_new_dict_list = []
|
|
|
|
|
2012-09-21 12:33:45 +04:00
|
|
|
for tmp_list in arg.temp_old:
|
|
|
|
tmp_dict = {}
|
|
|
|
for directory in tmp_list:
|
|
|
|
RenameSynthFile(directory, arg)
|
|
|
|
UnpackFiles(directory, 0, ifnotquiet)
|
|
|
|
ParseSynthesis(directory + synthesis_file, tmp_dict, arg)
|
2012-09-21 14:39:22 +04:00
|
|
|
html_old_dict_list.append(RemoveIgnored(tmp_dict, ignorelist))
|
2012-09-21 12:33:45 +04:00
|
|
|
for tmp_list in arg.temp_new:
|
2012-09-05 15:45:44 +04:00
|
|
|
tmp_dict = {}
|
2012-09-21 12:33:45 +04:00
|
|
|
for directory in tmp_list:
|
|
|
|
RenameSynthFile(directory, arg)
|
|
|
|
UnpackFiles(directory, 0, ifnotquiet)
|
|
|
|
ParseSynthesis(directory + synthesis_file, tmp_dict, arg)
|
2012-09-21 14:39:22 +04:00
|
|
|
html_new_dict_list.append(RemoveIgnored(tmp_dict, ignorelist))
|
|
|
|
ignorelist = ""
|
|
|
|
arg.ignorelist = ""
|
2012-09-05 15:45:44 +04:00
|
|
|
return html_old_dict_list, html_new_dict_list
|
|
|
|
|
2012-09-21 12:33:45 +04:00
|
|
|
def HTML_UniteDicts(list_dict):
|
2012-09-05 15:45:44 +04:00
|
|
|
"""Union of dictionaries.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
HTML-specific.
|
|
|
|
"""
|
2012-09-21 12:33:45 +04:00
|
|
|
dict_all = {}
|
|
|
|
i = 0
|
|
|
|
while(i < len(list_dict)):
|
|
|
|
for name in list_dict[i]:
|
|
|
|
if name not in dict_all:
|
|
|
|
dict_all[name] = list_dict[i][name]
|
|
|
|
elif(compare_versions(dict_all[name][0], list_dict[i][name][0]) == -1):
|
|
|
|
dict_all[name] = list_dict[i][name]
|
2012-09-05 15:45:44 +04:00
|
|
|
i = i + 1
|
2012-09-21 12:33:45 +04:00
|
|
|
return dict_all
|
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
def HTML_CreateDicts(dict_old, list_dict_new):
|
|
|
|
"""Create dictionary of packages.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Dictionary of packages and types of changes.
|
|
|
|
"""
|
|
|
|
dict_packages = {}
|
|
|
|
i = 0
|
|
|
|
for dict_new in list_dict_new:
|
|
|
|
(tmp_new, tmp_del, tmp_upd) = CreateDicts(dict_old, dict_new)
|
|
|
|
for packagename in tmp_new:
|
|
|
|
if packagename not in dict_packages:
|
|
|
|
dict_packages[packagename] = []
|
|
|
|
dict_packages[packagename].append((tmp_new[packagename], i, 1))
|
|
|
|
for packagename in tmp_del:
|
|
|
|
if packagename not in dict_packages:
|
|
|
|
dict_packages[packagename] = []
|
|
|
|
dict_packages[packagename].append((tmp_del[packagename], i, 2))
|
|
|
|
for packagename in tmp_upd:
|
|
|
|
if packagename not in dict_packages:
|
|
|
|
dict_packages[packagename] = []
|
|
|
|
if tmp_upd[packagename][2] == 0:
|
|
|
|
dict_packages[packagename].append((tmp_upd[packagename][1], i, 3))
|
|
|
|
elif tmp_upd[packagename][2] == 1:
|
|
|
|
dict_packages[packagename].append((tmp_upd[packagename][1], i, 4))
|
|
|
|
i = i + 1
|
|
|
|
return dict_packages
|
|
|
|
|
|
|
|
def CssOutput():
|
|
|
|
"""Output style.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Output contents of style tag or to .css file.
|
|
|
|
"""
|
|
|
|
csscontent = '\nbody {\nfont-size: 1em;\nmargin: 1em;\ncolor: black;\nbackground-color: white;\n}\n' +\
|
|
|
|
'th {\nborder-bottom-style: double;\n}\n' +\
|
|
|
|
'h1 {\nfont-size: 1.6em;\n}\n' +\
|
|
|
|
'h2 {\nfont-size: 1.4em;\n}\n' +\
|
|
|
|
'ul {\nfont-size: 1.2em;\n}\n' +\
|
|
|
|
'li {\nfont-size: 1em; list-style: disc;\n}\n' +\
|
|
|
|
'.even {\nbackground-color: #CCCCCC;\n}\n' +\
|
|
|
|
'.odd {\nbackground-color: #FFFFFF;\n}\n' +\
|
|
|
|
'.new {\nbackground-color: #C6DEFF;\n}\n' +\
|
|
|
|
'.removed {\nbackground-color: #FFC3CE;\n}\n' +\
|
|
|
|
'.updated {\nbackground-color: #CCFFCC;\n}\n' +\
|
|
|
|
'.downgraded {\nbackground-color: #F4F4AF;\n}\n' +\
|
|
|
|
'p.bold {\n font-weight: bold\n}\n'
|
|
|
|
return csscontent
|
|
|
|
|
|
|
|
def JavaScriptOutput():
|
|
|
|
"""Output scripts.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Output javascript to script tag or to .js file.
|
|
|
|
"""
|
|
|
|
javacontent = """
|
|
|
|
var tableBody;
|
|
|
|
var table2sort;
|
|
|
|
var imgUp;
|
|
|
|
var imgDown;
|
|
|
|
var suffix;
|
|
|
|
var lastSortCol;
|
|
|
|
var lastSortOrderAsc;
|
|
|
|
var index;
|
|
|
|
var rows;
|
|
|
|
|
|
|
|
function TableSorter(table,suf) {
|
|
|
|
this.table2sort = table;
|
|
|
|
this.suffix = suf;
|
|
|
|
this.lastSortCol = -1;
|
|
|
|
this.lastSortOrderAsc = true;
|
|
|
|
this.tableBody = this.table2sort.getElementsByTagName("tbody")[0];
|
|
|
|
|
|
|
|
this.imgUp = document.createTextNode(String.fromCharCode(0x2193));
|
|
|
|
this.imgDown = document.createTextNode(String.fromCharCode(0x2191));
|
|
|
|
}
|
|
|
|
|
|
|
|
TableSorter.prototype.sort = function (col, type) {
|
|
|
|
if (this.lastSortCol != -1) {
|
|
|
|
sortCell = document.getElementById("sortCell" + this.suffix + this.lastSortCol);
|
|
|
|
if (sortCell != null) {
|
|
|
|
if (this.lastSortOrderAsc == true) {
|
|
|
|
sortCell.removeChild(this.imgUp);
|
|
|
|
} else {
|
|
|
|
sortCell.removeChild(this.imgDown);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
sortLink = document.getElementById("sortCellLink" + this.suffix + this.lastSortCol);
|
|
|
|
if(sortLink != null) {
|
|
|
|
sortLink.title = "Sort Ascending";
|
|
|
|
}
|
|
|
|
}else{
|
|
|
|
this.rows = this.tableBody.rows;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (this.lastSortCol == col) {
|
|
|
|
this.lastSortOrderAsc = !this.lastSortOrderAsc;
|
|
|
|
} else {
|
|
|
|
this.lastSortCol = col;
|
|
|
|
this.lastSortOrderAsc = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
var newRows = new Array();
|
|
|
|
|
|
|
|
var newRowsCount = 0;
|
|
|
|
for (i = 1; i < this.rows.length; i ++) {
|
|
|
|
newRows[newRowsCount++] = this.rows[i];
|
|
|
|
}
|
|
|
|
|
|
|
|
index = this.lastSortCol;
|
|
|
|
if (type == 'string') {
|
|
|
|
newRows.sort(sortFunction_string);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
newRows.sort(sortFunction_attr);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (this.lastSortOrderAsc == false) {
|
|
|
|
newRows.reverse();
|
|
|
|
}
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
var count = 0;
|
|
|
|
var newclass;
|
|
|
|
for (i = 0; i < newRows.length; i++) {
|
|
|
|
if (count++ % 2 == 0){
|
|
|
|
newclass = "odd";
|
|
|
|
}else{
|
|
|
|
newclass = "even";
|
|
|
|
}
|
|
|
|
newRows[i].className = newclass;
|
|
|
|
this.table2sort.tBodies[0].appendChild(newRows[i]);
|
|
|
|
}
|
|
|
|
|
|
|
|
sortCell = document.getElementById("sortCell" + this.suffix + col);
|
|
|
|
if (sortCell == null) {
|
|
|
|
} else {
|
|
|
|
if (this.lastSortOrderAsc == true) {
|
|
|
|
sortCell.appendChild(this.imgUp);
|
|
|
|
} else {
|
|
|
|
sortCell.appendChild(this.imgDown);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sortLink = document.getElementById("sortCellLink" + this.suffix + col);
|
|
|
|
if (sortLink == null) {
|
|
|
|
} else {
|
|
|
|
if (this.lastSortOrderAsc == true) {
|
|
|
|
sortLink.title = "Sort Descending";
|
|
|
|
} else {
|
|
|
|
sortLink.title = "Sort Ascending";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function getCellContent(elem) {
|
|
|
|
if (typeof elem == "string") return elem;
|
|
|
|
if (typeof elem == "undefined") { return elem };
|
|
|
|
if (elem.innerText) return elem.innerText;
|
|
|
|
var str = "";
|
|
|
|
|
|
|
|
var cs = elem.childNodes;
|
|
|
|
var l = cs.length;
|
|
|
|
for (var i = 0; i < l; i++) {
|
|
|
|
switch (cs[i].nodeType) {
|
|
|
|
case 1: // 'ELEMENT_NODE'
|
|
|
|
str += getCellContent(cs[i]);
|
|
|
|
break;
|
|
|
|
case 3: // 'TEXT_NODE'
|
|
|
|
str += cs[i].nodeValue;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return str;
|
|
|
|
}
|
|
|
|
|
|
|
|
function sortFunction_attr(a, b) {
|
|
|
|
elem1 = a.cells[index] ;
|
|
|
|
elem2 = b.cells[index] ;
|
|
|
|
str1 = elem1.className;
|
|
|
|
str2 = elem2.className;
|
|
|
|
sub1 = getCellContent(a.cells[0]).toLowerCase();
|
|
|
|
sub2 = getCellContent(b.cells[0]).toLowerCase();
|
|
|
|
|
|
|
|
if (str1 == str2){
|
|
|
|
if (sub1 == sub2) return 0;
|
|
|
|
if (sub1 < sub2) return -1;
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
if (str1 < str2) return -1;
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
function sortFunction_string(a, b) {
|
|
|
|
str1 = getCellContent(a.cells[index]).toLowerCase();
|
|
|
|
str2 = getCellContent(b.cells[index]).toLowerCase();
|
|
|
|
|
|
|
|
if (str1 == str2) return 0;
|
|
|
|
if (str1 < str2) return -1;
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
var diffTableSorter = null;
|
|
|
|
|
|
|
|
function init_diff(){
|
|
|
|
if( document.getElementById("table_diff") ) {
|
|
|
|
diffTableSorter = new TableSorter(document.getElementById("table_diff"), 'diff');
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function sort_diff(col, type) {
|
|
|
|
if( diffTableSorter != null ) {
|
|
|
|
diffTableSorter.sort(col, type);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return javacontent
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
def HTML_OutputHead(file_output):
|
|
|
|
"""Output beginning of the document.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Outputs static text.
|
|
|
|
"""
|
|
|
|
file_output.write('<!--?xml version="1.0" encoding="UTF-8"?-->\n' +\
|
|
|
|
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n' +\
|
|
|
|
'<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">\n' +
|
|
|
|
'<head>\n' +\
|
2012-09-21 12:33:45 +04:00
|
|
|
'<title>Differences between repositories</title>\n' +\
|
|
|
|
'<meta name="keywords" content="Mandriva,Rosa,RPM,repository,difference,changes"/>\n' +\
|
|
|
|
'<meta name="description" content="List of changes between repositories"/>\n' +\
|
2012-09-05 15:45:44 +04:00
|
|
|
'<meta http-equiv="content-type" content="text/html; charset=UTF-8"/>\n' +\
|
|
|
|
'<style type="text/css">' +\
|
|
|
|
CssOutput() +\
|
|
|
|
'</style>\n' +\
|
2012-09-21 12:33:45 +04:00
|
|
|
'<script type="text/javascript">' +\
|
|
|
|
'/* <![CDATA[ */' +\
|
2012-09-05 15:45:44 +04:00
|
|
|
JavaScriptOutput() +\
|
2012-09-21 12:33:45 +04:00
|
|
|
'/* ]]> */' +\
|
2012-09-05 15:45:44 +04:00
|
|
|
'</script>\n' +\
|
|
|
|
'</head>\n' +\
|
|
|
|
'<body>\n\n')
|
|
|
|
|
2012-09-13 19:03:17 +04:00
|
|
|
def GetRepoInfo(dict_packages, packagename, lenold, lennew, list_dict_old,
|
|
|
|
list_dict_new, ifreleaseignore):
|
2012-09-05 15:45:44 +04:00
|
|
|
"""Generate package-specific information.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Generates class and name to be displayed in the table.
|
|
|
|
"""
|
|
|
|
result1 = []
|
|
|
|
result2 = []
|
|
|
|
flag = 0
|
2012-09-13 23:34:04 +04:00
|
|
|
show_filter = [0,0,0,0]
|
2012-09-13 19:03:17 +04:00
|
|
|
tmpstr = ""
|
2012-09-05 15:45:44 +04:00
|
|
|
for i in range(lenold):
|
|
|
|
if packagename in list_dict_old[i]:
|
2012-09-13 19:03:17 +04:00
|
|
|
tmpstr = list_dict_old[i][packagename][0][0]
|
|
|
|
if not ifreleaseignore:
|
|
|
|
tmpstr = tmpstr + '-' + list_dict_old[i][packagename][0][1]
|
|
|
|
result1.append(tmpstr)
|
2012-09-05 15:45:44 +04:00
|
|
|
else:
|
|
|
|
result1.append("N/A")
|
|
|
|
result2.append('')
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
tmplist = dict_packages[packagename]
|
|
|
|
tmpdict = {}
|
|
|
|
for (entry, reponum, entry_type) in dict_packages[packagename]:
|
2012-09-13 19:03:17 +04:00
|
|
|
tmpstr = entry[0][0]
|
|
|
|
if not ifreleaseignore:
|
|
|
|
tmpstr = tmpstr + '-' + entry[0][1]
|
|
|
|
tmpdict[reponum] = (tmpstr, entry_type)
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
for i in range(lennew):
|
|
|
|
if(i not in tmpdict):
|
|
|
|
if(packagename not in list_dict_new[i]):
|
|
|
|
result1.append("N/A")
|
|
|
|
result2.append("")
|
|
|
|
else:
|
2012-09-13 23:34:04 +04:00
|
|
|
tmpstr = list_dict_new[i][packagename][0][0]
|
|
|
|
if not ifreleaseignore:
|
2012-09-14 09:37:46 +04:00
|
|
|
tmpstr = tmpstr + '-' + list_dict_new[i][packagename][0][1]
|
2012-09-13 23:34:04 +04:00
|
|
|
result1.append(tmpstr)
|
2012-09-05 15:45:44 +04:00
|
|
|
result2.append("")
|
|
|
|
else:
|
|
|
|
(name, entry_type) = tmpdict[i]
|
|
|
|
if entry_type == 1:
|
|
|
|
result1.append(name)
|
|
|
|
result2.append('class = "new"')
|
2012-09-13 23:34:04 +04:00
|
|
|
show_filter[0] = 1
|
2012-09-05 15:45:44 +04:00
|
|
|
elif entry_type == 2:
|
|
|
|
result1.append("Removed")
|
|
|
|
result2.append('class = "removed"')
|
|
|
|
flag = 1
|
2012-09-13 23:34:04 +04:00
|
|
|
show_filter[1] = 1
|
2012-09-05 15:45:44 +04:00
|
|
|
elif entry_type == 3:
|
|
|
|
result1.append(name)
|
|
|
|
result2.append('class = "updated"')
|
2012-09-13 23:34:04 +04:00
|
|
|
show_filter[2] = 1
|
2012-09-05 15:45:44 +04:00
|
|
|
elif entry_type == 4:
|
|
|
|
result1.append(name)
|
|
|
|
result2.append('class = "downgraded"')
|
2012-09-13 23:34:04 +04:00
|
|
|
show_filter[3] = 1
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-13 23:34:04 +04:00
|
|
|
return (result1, result2, flag, show_filter)
|
2012-09-05 15:45:44 +04:00
|
|
|
|
|
|
|
def HTML_OutputBody(dict_packages, list_dict_old, list_dict_new, arg):
|
|
|
|
"""Output table.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Outputs table in HTML format.
|
|
|
|
"""
|
|
|
|
old = arg.old
|
|
|
|
new = arg.new
|
|
|
|
file_output = arg.output
|
2012-09-13 19:03:17 +04:00
|
|
|
ifreleaseignore = arg.no_release
|
2012-09-21 12:33:45 +04:00
|
|
|
reponames = arg.reponames
|
|
|
|
show_summary = arg.show_summary
|
2012-09-13 23:34:04 +04:00
|
|
|
show_mask = [arg.show_new, arg.show_removed, arg.show_updated, arg.show_downgraded]
|
2012-09-05 15:45:44 +04:00
|
|
|
|
|
|
|
file_output.write('<h1>Difference between repositories.</h1>\n' +\
|
|
|
|
'<p class="bold">The use of color coding in tables:</p>\n' +\
|
|
|
|
'<table>\n' +\
|
|
|
|
'<tbody><tr><td class="new">New</td>\n' +\
|
|
|
|
'<td class="updated">Updated</td></tr>\n' +\
|
|
|
|
'<tr><td class="downgraded">Downgraded</td>\n' +\
|
|
|
|
'<td class="removed">Removed</td></tr>\n' +\
|
2012-09-21 12:33:45 +04:00
|
|
|
'</tbody></table>\n\n' +\
|
|
|
|
'<div>' + arg.summary + '</div>\n')
|
2012-09-05 15:45:44 +04:00
|
|
|
repo_list = []
|
|
|
|
|
|
|
|
all_list = []
|
2012-09-21 12:33:45 +04:00
|
|
|
for tmp_list in old:
|
|
|
|
all_list.extend(tmp_list)
|
|
|
|
for tmp_list in new:
|
|
|
|
all_list.extend(tmp_list)
|
2012-09-05 15:45:44 +04:00
|
|
|
lenold = len(old)
|
|
|
|
lennew = len(new)
|
|
|
|
length = lenold + lennew
|
|
|
|
|
2012-09-21 12:33:45 +04:00
|
|
|
i = 0
|
|
|
|
for k in range(lenold):
|
|
|
|
if reponames == '':
|
|
|
|
tmp_string = 'Repository group A' + str(i)
|
|
|
|
else:
|
|
|
|
tmp_string = reponames[i]
|
|
|
|
tmp_string = '<h2>' + tmp_string + ':</h2>\n<ul>\n'
|
|
|
|
for z in range(len(old[k])):
|
|
|
|
tmp_string = tmp_string + '<li><a href="' +\
|
|
|
|
old[k][z] + '">' + old[k][z] + '</a></li>\n'
|
|
|
|
tmp_string = tmp_string + '</ul>\n'
|
|
|
|
file_output.write(tmp_string)
|
2012-09-05 15:45:44 +04:00
|
|
|
i = i + 1
|
|
|
|
|
2012-09-21 12:33:45 +04:00
|
|
|
i = 0
|
|
|
|
for k in range(lennew):
|
|
|
|
if reponames == '':
|
|
|
|
tmp_string = 'Repository group B' + str(i)
|
|
|
|
else:
|
|
|
|
tmp_string = reponames[i + lenold]
|
|
|
|
tmp_string = '<h2>' + tmp_string + ':</h2>\n<ul>\n'
|
|
|
|
for z in range(len(new[k])):
|
|
|
|
tmp_string = tmp_string + '<li><a href="' +\
|
|
|
|
new[k][z] + '">' + new[k][z] + '</a></li>\n'
|
|
|
|
tmp_string = tmp_string + '</ul>\n'
|
|
|
|
file_output.write(tmp_string)
|
|
|
|
i = i + 1
|
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
i = 0
|
|
|
|
while(i < length):
|
|
|
|
if i < lenold:
|
2012-09-21 12:33:45 +04:00
|
|
|
if reponames == '':
|
|
|
|
temp = 'Group A' + str(i)
|
|
|
|
else:
|
|
|
|
temp = reponames[i]
|
|
|
|
repo_list.append('<th>' + temp + '</th>')
|
2012-09-05 15:45:44 +04:00
|
|
|
else:
|
|
|
|
ii = i + 1
|
2012-09-21 12:33:45 +04:00
|
|
|
if reponames == '':
|
|
|
|
temp = 'Group B' + str(i - lenold)
|
|
|
|
else:
|
|
|
|
temp = reponames[i]
|
|
|
|
repo_list.append('<th id="sortCelldiff'+str(ii)+'"><a id="sortCellLinkdiff'+str(ii)+'" title="Sort Ascending" href="javascript:sort_diff('+str(ii)+', \'className\')">'+temp+'</a></th>')
|
2012-09-05 15:45:44 +04:00
|
|
|
i = i + 1
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
tmp_string = '<table id="table_diff">\n<tbody>\n<tr><th id="sortCelldiff0"><a id="sortCellLinkdiff0" title="Sort Ascending" href="javascript:sort_diff(0, \'string\')">Package name</a></th>'
|
|
|
|
for reponame in repo_list:
|
|
|
|
tmp_string = tmp_string + reponame
|
|
|
|
tmp_string = tmp_string + '</tr>\n'
|
2012-09-21 12:33:45 +04:00
|
|
|
file_output.write(tmp_string)
|
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
strnum = 1
|
|
|
|
resrange = []
|
|
|
|
for i in range(lennew):
|
|
|
|
resrange.append(lenold + i)
|
|
|
|
|
|
|
|
sorted_list = sorted(dict_packages, key=str.lower)
|
|
|
|
for packagename in sorted_list:
|
2012-09-21 12:33:45 +04:00
|
|
|
(repo_name, repo_class, flag, show_filter) = GetRepoInfo(dict_packages, packagename,
|
2012-09-13 19:03:17 +04:00
|
|
|
lenold, lennew, list_dict_old, list_dict_new, ifreleaseignore)
|
2012-09-13 23:34:04 +04:00
|
|
|
res = 0
|
|
|
|
for i in range(4):
|
|
|
|
if show_filter[i]*show_mask[i] == 1:
|
|
|
|
res = 1
|
|
|
|
if res == 0:
|
|
|
|
continue
|
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
if flag:
|
2012-09-13 23:34:04 +04:00
|
|
|
res = 0
|
|
|
|
for k in resrange:
|
|
|
|
if(repo_name[k] != "Removed"):
|
|
|
|
res = 1
|
|
|
|
if res:
|
2012-09-05 15:45:44 +04:00
|
|
|
for k in resrange:
|
|
|
|
if(repo_name[k] == "Removed"):
|
|
|
|
repo_name[k] = "N/A"
|
|
|
|
repo_class[k] = ''
|
2012-09-13 23:34:04 +04:00
|
|
|
|
|
|
|
if strnum % 2:
|
|
|
|
strtype = "odd"
|
|
|
|
else:
|
|
|
|
strtype = "even"
|
2012-09-21 12:33:45 +04:00
|
|
|
tmp_string = '<tr class="' + strtype + '">'
|
2012-09-13 23:34:04 +04:00
|
|
|
tmp_string = tmp_string + '<td>' + packagename + '</td>'
|
2012-09-05 15:45:44 +04:00
|
|
|
for i in range(length):
|
|
|
|
tmp_string = tmp_string + '<td ' + repo_class[i] + '>' +\
|
|
|
|
repo_name[i] + '</td>'
|
|
|
|
tmp_string = tmp_string + '</tr>\n'
|
2012-09-21 12:33:45 +04:00
|
|
|
file_output.write(tmp_string)
|
2012-09-05 15:45:44 +04:00
|
|
|
strnum = strnum + 1
|
2012-09-21 12:33:45 +04:00
|
|
|
file_output.write('</tbody>\n</table>\n')
|
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
def HTML_OutputTail(file_output):
|
|
|
|
"""Output end of document.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Outputs static text.
|
|
|
|
"""
|
|
|
|
file_output.write('''
|
2012-09-21 12:33:45 +04:00
|
|
|
<script type='text/javascript'>
|
|
|
|
/* <![CDATA[ */
|
2012-09-05 15:45:44 +04:00
|
|
|
init_diff();
|
2012-09-21 12:33:45 +04:00
|
|
|
/* ]]> */
|
2012-09-05 15:45:44 +04:00
|
|
|
</script>
|
|
|
|
''');
|
|
|
|
file_output.write('</body>\n</html>\n')
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
def HTML_Output(dict_packages, list_dict_old, list_dict_new, arg):
|
|
|
|
"""Output HTML file.
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
Generates HTML file.
|
|
|
|
"""
|
|
|
|
ifnotquiet = arg.quiet
|
|
|
|
file_output = arg.output
|
|
|
|
|
|
|
|
if ifnotquiet:
|
|
|
|
print _("Creating HTML file.")
|
|
|
|
HTML_OutputHead(file_output)
|
|
|
|
HTML_OutputBody(dict_packages, list_dict_old, list_dict_new, arg)
|
2012-09-21 12:33:45 +04:00
|
|
|
HTML_OutputTail(file_output)
|
2012-09-05 15:45:44 +04:00
|
|
|
|
|
|
|
def main(args):
|
|
|
|
arg = ParseCommandLine()
|
|
|
|
arg.temp_dir = tempfile.mkdtemp() + '/'
|
|
|
|
head_old = arg.temp_dir + old_dir
|
|
|
|
head_new = arg.temp_dir + new_dir
|
|
|
|
arg.temp_old = []
|
|
|
|
arg.temp_new = []
|
|
|
|
if (arg.output):
|
|
|
|
tmp_output = arg.output[0]
|
|
|
|
else:
|
|
|
|
tmp_output = default_output
|
2012-09-21 12:33:45 +04:00
|
|
|
if (arg.ignore):
|
|
|
|
arg.ignore = arg.ignore[0]
|
2012-09-05 15:45:44 +04:00
|
|
|
arg.output = None;
|
|
|
|
for i in range(len(arg.old)):
|
2012-09-21 12:33:45 +04:00
|
|
|
arg.temp_old.append([])
|
|
|
|
for j in range(len(arg.old[i])):
|
|
|
|
arg.old[i][j] = CheckArgs(arg.old[i][j], arg)
|
|
|
|
arg.temp_old[i].append(head_old + str(i) + '-' + str(j) + '/')
|
2012-09-05 15:45:44 +04:00
|
|
|
for i in range(len(arg.new)):
|
2012-09-21 12:33:45 +04:00
|
|
|
arg.temp_new.append([])
|
|
|
|
for j in range(len(arg.new[i])):
|
|
|
|
arg.new[i][j] = CheckArgs(arg.new[i][j], arg)
|
|
|
|
arg.temp_new[i].append(head_new + str(i) + '-' + str(j) + '/')
|
2012-09-05 15:45:44 +04:00
|
|
|
arg.output = tmp_output
|
|
|
|
CheckOutput(arg)
|
|
|
|
CheckParam(arg)
|
|
|
|
|
|
|
|
ifsizes = arg.size
|
|
|
|
ifnotsimple = arg.simple
|
|
|
|
output_file = arg.output
|
|
|
|
ifnotquiet = arg.quiet
|
|
|
|
ifhtml = arg.html
|
|
|
|
ifchangelog = arg.changelog
|
2012-09-13 23:34:04 +04:00
|
|
|
if (not arg.show_new) and (not arg.show_removed) and\
|
|
|
|
(not arg.show_updated) and (not arg.show_downgraded):
|
|
|
|
arg.show_new=True
|
|
|
|
arg.show_removed=True
|
|
|
|
arg.show_updated=True
|
|
|
|
arg.show_downgraded=True
|
2012-09-21 12:33:45 +04:00
|
|
|
|
|
|
|
if arg.ignore:
|
2012-09-21 14:39:22 +04:00
|
|
|
arg.ignorelist = ReadIgnoreList(arg)
|
2012-09-21 12:33:45 +04:00
|
|
|
else:
|
2012-09-21 14:39:22 +04:00
|
|
|
arg.ignorelist = []
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
GetFiles(arg)
|
|
|
|
|
|
|
|
if not ifhtml:
|
|
|
|
(dict_old, dict_new) = ParsePackage(arg)
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
(dict_new_packages, dict_del_packages, dict_upd_packages) = CreateDicts(
|
|
|
|
dict_old, dict_new)
|
2012-09-21 12:33:45 +04:00
|
|
|
dict_old = ''
|
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
dict_obsoleted = GenerateDictObsoleted(dict_new, ifnotquiet)
|
2012-09-21 12:33:45 +04:00
|
|
|
dict_new = ''
|
2012-09-05 15:45:44 +04:00
|
|
|
if(dict_upd_packages) and (ifnotsimple) and (ifchangelog):
|
|
|
|
dict_logfile_diff = GenerateLogfileDiff(dict_upd_packages, arg)
|
|
|
|
if not ifnotsimple or not ifchangelog:
|
|
|
|
dict_logfile_diff = {}
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-13 23:34:04 +04:00
|
|
|
if arg.show_new:
|
|
|
|
ProcessNewPackages(dict_new_packages, arg.output)
|
|
|
|
if arg.show_removed:
|
|
|
|
ProcessDelPackages(dict_del_packages, dict_obsoleted, arg.output)
|
|
|
|
if dict_upd_packages and (arg.show_updated or arg.show_downgraded):
|
2012-09-05 15:45:44 +04:00
|
|
|
ProcessUpdPackages(dict_upd_packages, dict_logfile_diff, arg)
|
2012-09-21 12:33:45 +04:00
|
|
|
if arg.show_summary:
|
|
|
|
PrintSummary(dict_new_packages, dict_del_packages, dict_upd_packages, arg)
|
2012-09-05 15:45:44 +04:00
|
|
|
else:
|
|
|
|
(list_dict_old, list_dict_new) = HTML_ParsePackage(arg)
|
2012-09-21 12:33:45 +04:00
|
|
|
dict_old = HTML_UniteDicts(list_dict_old)
|
|
|
|
if arg.show_summary:
|
|
|
|
dict_new = HTML_UniteDicts(list_dict_new)
|
|
|
|
(dict_new_packages, dict_del_packages, dict_upd_packages) = CreateDicts(
|
|
|
|
dict_old, dict_new)
|
|
|
|
arg.summary = PrintSummary(dict_new_packages, dict_del_packages, dict_upd_packages, arg)
|
|
|
|
dict_new = ''
|
|
|
|
dict_new_packages = ''
|
|
|
|
dict_del_packages = ''
|
|
|
|
dict_upd_packages = ''
|
2012-09-05 15:45:44 +04:00
|
|
|
dict_packages = HTML_CreateDicts(dict_old, list_dict_new)
|
2012-09-21 12:33:45 +04:00
|
|
|
dict_old = ''
|
2012-09-05 15:45:44 +04:00
|
|
|
HTML_Output(dict_packages, list_dict_old, list_dict_new, arg)
|
2012-09-21 12:33:45 +04:00
|
|
|
|
2012-09-05 15:45:44 +04:00
|
|
|
exit_proc(arg)
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main(sys.argv)
|