mirror of
https://abf.rosa.ru/djam/urpm-tools.git
synced 2025-02-23 09:22:47 +00:00
repodiff: new options, fixes
This commit is contained in:
parent
fb6d8d3561
commit
bfeecbd388
1 changed files with 316 additions and 205 deletions
521
urpm-repodiff.py
521
urpm-repodiff.py
|
@ -75,35 +75,47 @@ timeout = 5
|
|||
|
||||
def ParseCommandLine():
|
||||
"""Parse arguments.
|
||||
|
||||
|
||||
Parse arguments from command line.
|
||||
Return these arguments.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description=_("Tool for comparing sets of repositories."))
|
||||
parser.add_argument("--old", "-o", action="store", nargs='+', required="True",
|
||||
group = parser.add_argument_group(_('global parameters'),
|
||||
description=_("Parameters used in all cases."))
|
||||
group.add_argument("--old", "-o", action="append", nargs='+', required="True",
|
||||
metavar="OLD_REPO", help=_("URL or PATH to old repositories"))
|
||||
parser.add_argument("--new", "-n", action="store", nargs='+', required="True",
|
||||
group.add_argument("--new", "-n", action="append", nargs='+', required="True",
|
||||
metavar="NEW_REPO", help=_("URL or PATH to new repositories"))
|
||||
parser.add_argument("--size", "-s", action="store_true",
|
||||
help=_("Show differences in package sizes."))
|
||||
parser.add_argument("--simple", action="store_false",
|
||||
help=_("Simple output format."))
|
||||
parser.add_argument("--quiet", "-q", action="store_false",
|
||||
group.add_argument("--quiet", "-q", action="store_false",
|
||||
help=_("Hide service messages."))
|
||||
parser.add_argument("--changelog", "-c", action="store_true",
|
||||
group.add_argument("--no-release", "-r", action="store_true",
|
||||
help=_("Ignore release during package compare."))
|
||||
group.add_argument("--show-summary", action="store_true",
|
||||
help=_("Output summary."))
|
||||
group.add_argument("--output", "-out", action="store", nargs=1, default='',
|
||||
metavar="OUTPUT_FILE", help=_("Change standart output to \"OUTPUT_FILE\"."))
|
||||
group.add_argument("--ignore", "-i", action="store", nargs=1, default='',
|
||||
metavar="IGNORELIST", help=_("File with list of ignored packages"))
|
||||
group = parser.add_argument_group(_('text mode parameters'),
|
||||
description=_("Parameters used only in text mode. (--html not present)"))
|
||||
group.add_argument("--size", "-s", action="store_true",
|
||||
help=_("Show differences in package sizes."))
|
||||
group.add_argument("--simple", action="store_false",
|
||||
help=_("Simple output format."))
|
||||
group.add_argument("--changelog", "-c", action="store_true",
|
||||
help=_("Show changelog difference."))
|
||||
parser.add_argument("--html", action="store_true",
|
||||
group = parser.add_argument_group(_('HTML mode parameters'),
|
||||
description=_("Parameters used only in HTML mode. (--html is present)"))
|
||||
group.add_argument("--html", action="store_true",
|
||||
help=_("Output in HTML format, if --output is not present\
|
||||
\"%s\" will be created in current directory. \
|
||||
--size, --simple and --changelog options are ignored.") % htmlname)
|
||||
parser.add_argument("--no-release", "-r", action="store_true",
|
||||
help=_("Ignore release during package compare."))
|
||||
parser.add_argument("--output", "-out", action="store", nargs=1, default='',
|
||||
metavar="OUTPUT_FILE", help=_("Change standart output to \"OUTPUT_FILE\"."))
|
||||
group = parser.add_argument_group('Filters',
|
||||
description="Filters for output. If none selected then every type will\
|
||||
be shown")
|
||||
group.add_argument("--reponames", action="store", nargs='+', default='',
|
||||
metavar="REPONAME", help=_("Repository names for output."))
|
||||
group = parser.add_argument_group(_('Filters'),
|
||||
description=_("Filters for output. If none selected then every type will\
|
||||
be shown"))
|
||||
group.add_argument("--show-new", "-N", action="store_true",
|
||||
help=_("Show new packages"))
|
||||
group.add_argument("--show-removed", "-R", action="store_true",
|
||||
|
@ -129,7 +141,7 @@ def exit_proc(arg):
|
|||
|
||||
def CheckURL(url, arg):
|
||||
"""URL check.
|
||||
|
||||
|
||||
Check that URL is gettable.
|
||||
"""
|
||||
try:
|
||||
|
@ -140,7 +152,7 @@ def CheckURL(url, arg):
|
|||
|
||||
def CheckArgs(urlpath, arg):
|
||||
"""Trivial checks.
|
||||
|
||||
|
||||
Check that url or path is correct.
|
||||
"""
|
||||
if (urlpath.startswith("http://") or urlpath.startswith("ftp://")):
|
||||
|
@ -170,15 +182,15 @@ def CheckArgs(urlpath, arg):
|
|||
print _("Error: \"%s\" is not correct url, path or name of repository") % urlpath
|
||||
exit_proc(arg)
|
||||
return urlpath
|
||||
|
||||
|
||||
def CheckOutput(arg):
|
||||
"""Check output file.
|
||||
|
||||
|
||||
Check if the file can be created and redirect standart output to this file.
|
||||
"""
|
||||
file_output = arg.output
|
||||
ifhtml = arg.html
|
||||
|
||||
|
||||
if (file_output == default_output):
|
||||
if(ifhtml):
|
||||
try:
|
||||
|
@ -190,7 +202,7 @@ def CheckOutput(arg):
|
|||
else:
|
||||
arg.output = sys.stdout
|
||||
return
|
||||
|
||||
|
||||
if(file_output != ''):
|
||||
if(os.path.isfile(file_output)):
|
||||
print _("Error: File %s already exists") % file_output
|
||||
|
@ -212,19 +224,25 @@ def CheckOutput(arg):
|
|||
|
||||
def CheckParam(arg):
|
||||
"""Check parameters.
|
||||
|
||||
Ignore some parameters in HTML-case.
|
||||
|
||||
Ignore some parameters in HTML-mode. Ignore some parameters in text-mode.
|
||||
"""
|
||||
if arg.html:
|
||||
arg.size = 0
|
||||
arg.simple = 0
|
||||
arg.changelog = 0
|
||||
if (arg.reponames != '') and (len(arg.old) + len(arg.new) != len(arg.reponames)):
|
||||
print _("Error: number of REPONAME's(%s) are not equal to number of groups(%s)") % \
|
||||
(str(len(arg.reponames)), str(len(arg.old) + len(arg.new)))
|
||||
exit_proc(arg)
|
||||
else:
|
||||
arg.repnames = ''
|
||||
|
||||
def GetFile(urlpath, filename, localdir, arg):
|
||||
"""Donwload archive.
|
||||
"""
|
||||
ifnotquiet = arg.quiet
|
||||
|
||||
|
||||
if not os.path.isdir(localdir):
|
||||
os.makedirs(os.path.realpath(localdir))
|
||||
if ifnotquiet:
|
||||
|
@ -254,33 +272,53 @@ def GetFiles(arg):
|
|||
file_name = []
|
||||
file_path = []
|
||||
for i in range(len(arg.old)):
|
||||
file_name.append(synthesis_arch)
|
||||
file_dir.append(arg.temp_old[i])
|
||||
file_path.append(arg.old[i] + "media_info/")
|
||||
if ifchangelog:
|
||||
file_name.append(changelog_arch)
|
||||
file_dir.append(arg.temp_old[i])
|
||||
file_path.append(arg.old[i] + "media_info/")
|
||||
for j in range(len(arg.old[i])):
|
||||
file_name.append(synthesis_arch)
|
||||
file_dir.append(arg.temp_old[i][j])
|
||||
file_path.append(arg.old[i][j] + "media_info/")
|
||||
if ifchangelog:
|
||||
file_name.append(changelog_arch)
|
||||
file_dir.append(arg.temp_old[i][j])
|
||||
file_path.append(arg.old[i][j] + "media_info/")
|
||||
|
||||
for i in range(len(arg.new)):
|
||||
file_name.append(synthesis_arch)
|
||||
file_dir.append(arg.temp_new[i])
|
||||
file_path.append(arg.new[i] + "media_info/")
|
||||
if ifchangelog:
|
||||
file_name.append(changelog_arch)
|
||||
file_dir.append(arg.temp_new[i])
|
||||
file_path.append(arg.new[i] + "media_info/")
|
||||
for j in range(len(arg.new[i])):
|
||||
file_name.append(synthesis_arch)
|
||||
file_dir.append(arg.temp_new[i][j])
|
||||
file_path.append(arg.new[i][j] + "media_info/")
|
||||
if ifchangelog:
|
||||
file_name.append(changelog_arch)
|
||||
file_dir.append(arg.temp_new[i][j])
|
||||
file_path.append(arg.new[i][j] + "media_info/")
|
||||
|
||||
for i in range(len(file_name)):
|
||||
GetFile(file_path[i], file_name[i], file_dir[i], arg)
|
||||
|
||||
|
||||
def ReadIgnoreList(arg):
|
||||
ignorefile = arg.ignore
|
||||
ignorelist = []
|
||||
if not os.path.isfile(ignorefile):
|
||||
print _("Error: file %s does not exist.") % ignorefile
|
||||
exit_proc(arg)
|
||||
try:
|
||||
ifile = open(ignorefile)
|
||||
for string in ifile:
|
||||
if string != '':
|
||||
ignorelist.append(string)
|
||||
ifile.close()
|
||||
ignorelist.sort()
|
||||
except:
|
||||
print _("Error: file %s cannot be read.") % ignorefile
|
||||
exit_proc(arg)
|
||||
return ignorelist
|
||||
|
||||
def RenameSynthFile(localdir, arg):
|
||||
"""Rename.
|
||||
|
||||
|
||||
Rename Synthesis file so zgip can understand format.
|
||||
"""
|
||||
ifnotquiet = arg.quiet
|
||||
|
||||
|
||||
if not os.path.isfile(localdir + synthesis_arch):
|
||||
print _("Error: file not found: ") + localdir + synthesis_arch
|
||||
exit_proc(arg)
|
||||
|
@ -288,7 +326,7 @@ def RenameSynthFile(localdir, arg):
|
|||
os.rename(localdir + synthesis_arch, localdir + synthesis_arch_renamed)
|
||||
except OSError:
|
||||
print _("Error: cannot rename file %(from)s to %(to)s") % {"from": synthesis_arch, "to": synthesis_arch_renamed}
|
||||
|
||||
|
||||
exit_proc(arg)
|
||||
if not os.path.isfile(localdir + synthesis_arch_renamed):
|
||||
print _("Error: file %s is missing.") % (localdir + synthesis_arch_renamed)
|
||||
|
@ -296,10 +334,10 @@ def RenameSynthFile(localdir, arg):
|
|||
else:
|
||||
if ifnotquiet:
|
||||
print _("file %(from)s was renamed to %(to)s") % {"from": synthesis_arch, "to": synthesis_arch_renamed}
|
||||
|
||||
|
||||
def UnpackFiles(files_dir, ifchangelog, ifnotquiet):
|
||||
"""Unpack.
|
||||
|
||||
|
||||
Unpack needed files in selected directory.
|
||||
"""
|
||||
if ifchangelog:
|
||||
|
@ -312,7 +350,7 @@ def UnpackFiles(files_dir, ifchangelog, ifnotquiet):
|
|||
|
||||
def ParseVersion(names_list):
|
||||
"""Parse version info is present.
|
||||
|
||||
|
||||
Parse version information from the field. e.g. provided_name[>= 1.2.3-4.5.6]
|
||||
is parsed to (provided_name, sign, (epoch, version, release))
|
||||
"""
|
||||
|
@ -346,9 +384,9 @@ def ParseVersion(names_list):
|
|||
|
||||
def ParseSynthesis(synthfile, pkgdict, arg):
|
||||
"""Collect info about packages.
|
||||
|
||||
|
||||
Parse synthesis.hdlist file (or add new entries to pkgdict).
|
||||
|
||||
|
||||
pkgdict is a dictionary with format:
|
||||
pkgdict[name]=(verrel,(s0,s1,s2))
|
||||
where:
|
||||
|
@ -399,16 +437,16 @@ def ParseSynthesis(synthfile, pkgdict, arg):
|
|||
|
||||
def ChkDist(disttag, distepoch):
|
||||
"""No minus in tag and epoch.
|
||||
|
||||
|
||||
Trivial check that tag and epoch hasn't got '-' in their name
|
||||
"""
|
||||
if minus_check.search(disttag) or minus_check.search(distepoch):
|
||||
print _("REPODIFF-Warning: strange format of <disttag> or <distepoch>: ") +\
|
||||
disttag + distepoch
|
||||
|
||||
|
||||
def ChkTagEpoch(i):
|
||||
"""No minus in tag and epoch.
|
||||
|
||||
|
||||
Trivial check that tag and epoch hasn't got '-' in their name
|
||||
"""
|
||||
if len(i) == 4:
|
||||
|
@ -428,18 +466,20 @@ def ChkTagEpoch(i):
|
|||
|
||||
def RPMNameFilter(rpmname, disttagepoch, ifreleaseignore):
|
||||
"""Parse name and verrel.
|
||||
|
||||
|
||||
Function that parses name, version and release of a package.
|
||||
"""
|
||||
string = rpmname.split('-')
|
||||
lastpart = string.pop()
|
||||
tmp = lastpart.split('.')
|
||||
issrc = (tmp.pop() == "src")
|
||||
ismageia = 0
|
||||
if tmp[-1].startswith("mga"):
|
||||
tmp.pop()
|
||||
ismageia = 1
|
||||
lastpart = '.'.join(tmp)
|
||||
if (lastpart[0].isdigit() or (not lastpart.startswith(disttagepoch))) and\
|
||||
((not lastpart.isdigit()) or issrc):
|
||||
((not lastpart.isdigit()) or issrc or ismageia):
|
||||
name = '-'.join(string[:-1])
|
||||
ver = string[-1]
|
||||
rel = lastpart
|
||||
|
@ -450,10 +490,10 @@ def RPMNameFilter(rpmname, disttagepoch, ifreleaseignore):
|
|||
if ifreleaseignore:
|
||||
rel = ""
|
||||
return (name, ver, rel)
|
||||
|
||||
|
||||
def compare_versions(first_entry, second_entry):
|
||||
"""Compare two verrel tuples.
|
||||
|
||||
|
||||
dict_entry and comp_entry are verrel tuples
|
||||
verrel = (version, release, epoch).
|
||||
Return 1 if the first argument is higher.
|
||||
|
@ -464,7 +504,7 @@ def compare_versions(first_entry, second_entry):
|
|||
(version2, release2, second_epoch) = second_entry
|
||||
return(rpm.labelCompare((first_epoch, version1, release1),
|
||||
(second_epoch, version2, release2)))
|
||||
|
||||
|
||||
def ParsePackage(arg):
|
||||
"""Processing files, parsing synthesis, getting pkgdict.
|
||||
|
||||
|
@ -480,23 +520,25 @@ def ParsePackage(arg):
|
|||
ifchangelog = arg.changelog
|
||||
ifnotquiet = arg.quiet
|
||||
pkgdict_old = {}
|
||||
for directory in arg.temp_old:
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, ifchangelog, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, pkgdict_old, arg)
|
||||
for tmp_list in arg.temp_old:
|
||||
for directory in tmp_list:
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, ifchangelog, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, pkgdict_old, arg)
|
||||
pkgdict_new = {}
|
||||
for directory in arg.temp_new:
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, ifchangelog, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, pkgdict_new, arg)
|
||||
for tmp_list in arg.temp_new:
|
||||
for directory in tmp_list:
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, ifchangelog, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, pkgdict_new, arg)
|
||||
return pkgdict_old, pkgdict_new
|
||||
|
||||
|
||||
def CreateDicts(dict_old, dict_new):
|
||||
"""Creating dictionaries.
|
||||
|
||||
|
||||
Creating dictionaries for new, updated and removed(deleted) packages
|
||||
from two dictionaries: old and new, for old and new repositories.
|
||||
|
||||
|
||||
dict_old, dict_new are dictionaries with format:
|
||||
pkgdict[name]=(verrel,(s0,s1,s2))
|
||||
where:
|
||||
|
@ -505,7 +547,7 @@ def CreateDicts(dict_old, dict_new):
|
|||
s0[] - is package info
|
||||
s1 - is package summary
|
||||
s2[] - is list of obsoleted packages
|
||||
|
||||
|
||||
dict_new_packages and dict_del_packages have the same format.
|
||||
dict_upd_packages has format:
|
||||
dict_upd_packages[name]=((verrel_old,(so0,so1,so2)),
|
||||
|
@ -516,7 +558,7 @@ def CreateDicts(dict_old, dict_new):
|
|||
dict_new_packages = {}
|
||||
dict_del_packages = {}
|
||||
dict_upd_packages = {}
|
||||
|
||||
|
||||
for name in dict_new:
|
||||
if(name in dict_old): #updated or downgraded
|
||||
compare_result = compare_versions(dict_new[name][0],
|
||||
|
@ -534,7 +576,7 @@ def CreateDicts(dict_old, dict_new):
|
|||
|
||||
def ProcessNewPackages(dict_new_packages, file_output):
|
||||
"""Processing newly added packages.
|
||||
|
||||
|
||||
dict_new_packages[name]=(verrel,(s0,s1,s2))
|
||||
where:
|
||||
name - is package name parsed from package filename
|
||||
|
@ -550,7 +592,7 @@ def ProcessNewPackages(dict_new_packages, file_output):
|
|||
|
||||
def GenerateDictObsoleted(dict_new, ifnotquiet):
|
||||
"""Generate Dictionary of obsoleted packages.
|
||||
|
||||
|
||||
pkgdict[name]=(verrel,(s0,s1,s2))
|
||||
where:
|
||||
name - is package name parsed from package filename
|
||||
|
@ -604,7 +646,7 @@ def compare_verrel(verrel1, sign, verrel2):
|
|||
|
||||
def ProcessDelPackages(dict_del_packages, dict_obsoleted, file_output):
|
||||
"""Process deleted packages.
|
||||
|
||||
|
||||
Printing every deleted package. Show if package is obsoleted.
|
||||
pkgdict[name]=(verrel,(s0,s1,s2))
|
||||
where:
|
||||
|
@ -613,7 +655,7 @@ def ProcessDelPackages(dict_del_packages, dict_obsoleted, file_output):
|
|||
s0[] - is package info
|
||||
s1 - is package summary
|
||||
s2[] - is list of obsoleted packages
|
||||
|
||||
|
||||
dict_obsoleted is dictionary
|
||||
dict_obsoleted[name]=[obs1, ...]
|
||||
"""
|
||||
|
@ -631,16 +673,16 @@ def ProcessDelPackages(dict_del_packages, dict_obsoleted, file_output):
|
|||
|
||||
def ParseLogfile(dict_log, logfile, dict_upd_packages, mode, arg):
|
||||
"""Parse Changelog.
|
||||
|
||||
|
||||
mode == 0 - for old changelog: we search only for 1st entry in changelog
|
||||
mode == 1 - for new changelog: we collect entries from changelog untill
|
||||
we find remembered entry from changelog
|
||||
|
||||
|
||||
Parse changelog.xml to compare changes between updated packages.
|
||||
dict_log - is dictionary with format:
|
||||
dict_log[name] =
|
||||
[(verrel, (time,name,text)), (verrel,[(time,name,text),...])]
|
||||
|
||||
|
||||
dict_upd_packages[name] = [old_pkg[name],new_pkg[name],ifdowngraded]
|
||||
or dict_upd_packages[name] =
|
||||
[(verler,(s0,s1,s2)),(verrel,(s0,s1,s2)),ifdowngraded]
|
||||
|
@ -667,7 +709,7 @@ def ParseLogfile(dict_log, logfile, dict_upd_packages, mode, arg):
|
|||
if(tag_changelog.name != "changelogs"):
|
||||
tag_changelog = tag_changelog.next
|
||||
continue
|
||||
|
||||
|
||||
tag_property = tag_changelog.properties
|
||||
pkgname = ''
|
||||
disttag = ''
|
||||
|
@ -767,10 +809,10 @@ def ParseLogfile(dict_log, logfile, dict_upd_packages, mode, arg):
|
|||
dict_log[result_key][1] = (verrel, new_result)
|
||||
tag_changelog = tag_changelog.next
|
||||
doc.freeDoc()
|
||||
|
||||
|
||||
def GenerateLogfileDiff(dict_upd_packages, arg):
|
||||
"""Changelog difference list.
|
||||
|
||||
|
||||
Generate changelog difference list.
|
||||
dict_upd_packages[name] = [old_pkg[name],new_pkg[name],ifdowngraded]
|
||||
or dict_upd_packages[name] = [(verler,(s0,s1,s2)),(verrel,(s0,s1,s2)),ifdowngraded]
|
||||
|
@ -783,12 +825,14 @@ def GenerateLogfileDiff(dict_upd_packages, arg):
|
|||
print _("Generating changes list.")
|
||||
dict_logfile_diff = {}
|
||||
dict_log = {}
|
||||
|
||||
for old_dir in temp_old:
|
||||
ParseLogfile(dict_log, old_dir + changelog_file, dict_upd_packages, 0, arg)
|
||||
for new_dir in temp_new:
|
||||
ParseLogfile(dict_log, new_dir + changelog_file, dict_upd_packages, 1, arg)
|
||||
|
||||
|
||||
for i in temp_old:
|
||||
for old_dir in temp_old[i]:
|
||||
ParseLogfile(dict_log, old_dir + changelog_file, dict_upd_packages, 0, arg)
|
||||
for i in temp_new:
|
||||
for new_dir in temp_new[i]:
|
||||
ParseLogfile(dict_log, new_dir + changelog_file, dict_upd_packages, 1, arg)
|
||||
|
||||
for name in dict_upd_packages:
|
||||
if(name in dict_log):
|
||||
if dict_log[name][1]:
|
||||
|
@ -800,12 +844,12 @@ def GenerateLogfileDiff(dict_upd_packages, arg):
|
|||
print _("REPODIFF-Warning: Package %s was not described in changelogs.xml") % name
|
||||
entry = [(0, '', _("REPODIFF-Warning: Changelogs of a package are absent."))]
|
||||
dict_logfile_diff[name] = entry
|
||||
|
||||
|
||||
return dict_logfile_diff
|
||||
|
||||
def ChangelogPrint(changes_list, file_output):
|
||||
"""Changelog difference.
|
||||
|
||||
|
||||
Output changes in changelog.
|
||||
changes_list is list with format:
|
||||
changes_list = [(time,author,text)]
|
||||
|
@ -816,10 +860,10 @@ def ChangelogPrint(changes_list, file_output):
|
|||
" " + entry[1] + '\n' + entry[2] + '\n\n')
|
||||
else:
|
||||
file_output.write('\n')
|
||||
|
||||
|
||||
def PrintLogfileDiff(package_name, dict_logfile_diff, file_output):
|
||||
"""Changelog difference.
|
||||
|
||||
|
||||
Output changes in changelog.
|
||||
dict_logfile_diff is dictionary with format:
|
||||
dict_logfile_diff[name] = [(time,author,text)]
|
||||
|
@ -831,7 +875,7 @@ def PrintLogfileDiff(package_name, dict_logfile_diff, file_output):
|
|||
|
||||
def ProcessUpdPackages(dict_upd_packages, dict_logfile_diff, arg):
|
||||
"""Process updated packages.
|
||||
|
||||
|
||||
ifsizes - is indicator: should we (1) or should we not (0) print
|
||||
difference in package sizes.
|
||||
ifnotsimple - is indicator: should we (0) or shoudl we not (1) print
|
||||
|
@ -875,33 +919,55 @@ def ProcessUpdPackages(dict_upd_packages, dict_logfile_diff, arg):
|
|||
int(dict_upd_packages[name][0][1][0][2])
|
||||
file_output.write(_("Size Change: %d bytes\n\n") % sizediff)
|
||||
|
||||
def PrintSummary(dict_new_packages, dict_del_packages, dict_upd_packages, file_output):
|
||||
def PrintSummary(dict_new_packages, dict_del_packages, dict_upd_packages, arg):
|
||||
"""Output summary.
|
||||
|
||||
|
||||
Output summary: numbers of new/removew/updated packages at all.
|
||||
"""
|
||||
file_output.write("Summary:\n")
|
||||
length = len(dict_new_packages)
|
||||
if length:
|
||||
file_output.write(_(" Total added packages: ") + str(length) + '\n')
|
||||
length = len(dict_del_packages)
|
||||
if length:
|
||||
file_output.write(_(" Total removed packages: ") + str(length) + '\n')
|
||||
length = 0
|
||||
length_d = 0
|
||||
for packagename in dict_upd_packages:
|
||||
if dict_upd_packages[packagename][2] == 0:
|
||||
length = length + 1
|
||||
else:
|
||||
length_d = length_d + 1
|
||||
if length:
|
||||
file_output.write(_(" Total updated packages: ") + str(length) + '\n')
|
||||
if length_d:
|
||||
file_output.write(_(" Total downgraded packages: ") + str(length_d) + '\n')
|
||||
file_output = arg.output
|
||||
ifhtml = arg.html
|
||||
|
||||
if ifhtml:
|
||||
endstr = '<br />'
|
||||
else:
|
||||
endstr = '\n'
|
||||
|
||||
tmp_str = _("Summary:")
|
||||
if ifhtml:
|
||||
tmp_str = '<p class="bold">' + tmp_str + '</p>'
|
||||
else:
|
||||
tmp_str = tmp_str + endstr
|
||||
if arg.show_new:
|
||||
length = len(dict_new_packages)
|
||||
if length:
|
||||
tmp_str = tmp_str + (_(" Total added packages: ") + str(length) + endstr)
|
||||
if arg.show_removed:
|
||||
length = len(dict_del_packages)
|
||||
if length:
|
||||
tmp_str = tmp_str + (_(" Total removed packages: ") + str(length) + endstr)
|
||||
if arg.show_updated or arg.show_downgraded:
|
||||
length = 0
|
||||
length_d = 0
|
||||
for packagename in dict_upd_packages:
|
||||
if dict_upd_packages[packagename][2] == 0:
|
||||
length = length + 1
|
||||
else:
|
||||
length_d = length_d + 1
|
||||
if arg.show_updated:
|
||||
if length:
|
||||
tmp_str = tmp_str + (_(" Total updated packages: ") + str(length) + endstr)
|
||||
if arg.show_downgraded:
|
||||
if length_d:
|
||||
tmp_str = tmp_str + (_(" Total downgraded packages: ") + str(length_d) + endstr)
|
||||
|
||||
if ifhtml:
|
||||
return tmp_str
|
||||
else:
|
||||
file_ouput.write(tmp_str)
|
||||
|
||||
def HTML_ParsePackage(arg):
|
||||
"""Parse hdlist.
|
||||
|
||||
|
||||
HTML-specific ParsePackage(). Calls for ParsePackage
|
||||
"""
|
||||
ifchangelog = arg.changelog
|
||||
|
@ -910,39 +976,41 @@ def HTML_ParsePackage(arg):
|
|||
html_old_dict_list = []
|
||||
html_new_dict_list = []
|
||||
|
||||
for directory in arg.temp_old:
|
||||
for tmp_list in arg.temp_old:
|
||||
tmp_dict = {}
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, 0, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, tmp_dict, arg)
|
||||
html_old_dict_list.append(tmp_dict)
|
||||
for directory in arg.temp_new:
|
||||
tmp_dict = {}
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, 0, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, tmp_dict, arg)
|
||||
for directory in tmp_list:
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, 0, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, tmp_dict, arg)
|
||||
html_old_dict_list.append(tmp_dict)
|
||||
for tmp_list in arg.temp_new:
|
||||
tmp_dict = {}
|
||||
for directory in tmp_list:
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, 0, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, tmp_dict, arg)
|
||||
html_new_dict_list.append(tmp_dict)
|
||||
return html_old_dict_list, html_new_dict_list
|
||||
|
||||
def HTML_UniteOld(list_dict_old):
|
||||
def HTML_UniteDicts(list_dict):
|
||||
"""Union of dictionaries.
|
||||
|
||||
|
||||
HTML-specific.
|
||||
"""
|
||||
dict_old = list_dict_old[0]
|
||||
i = 1
|
||||
while(i < len(list_dict_old)):
|
||||
for name in list_dict_old[i]:
|
||||
if name not in dict_old:
|
||||
dict_old[name] = list_dict_old[i][name]
|
||||
elif(compare_versions(dict_old[name][0], list_dict_old[i][name][0]) == -1):
|
||||
dict_old[name] = list_dict_old[i][name]
|
||||
dict_all = {}
|
||||
i = 0
|
||||
while(i < len(list_dict)):
|
||||
for name in list_dict[i]:
|
||||
if name not in dict_all:
|
||||
dict_all[name] = list_dict[i][name]
|
||||
elif(compare_versions(dict_all[name][0], list_dict[i][name][0]) == -1):
|
||||
dict_all[name] = list_dict[i][name]
|
||||
i = i + 1
|
||||
return dict_old
|
||||
|
||||
return dict_all
|
||||
|
||||
def HTML_CreateDicts(dict_old, list_dict_new):
|
||||
"""Create dictionary of packages.
|
||||
|
||||
|
||||
Dictionary of packages and types of changes.
|
||||
"""
|
||||
dict_packages = {}
|
||||
|
@ -969,7 +1037,7 @@ def HTML_CreateDicts(dict_old, list_dict_new):
|
|||
|
||||
def CssOutput():
|
||||
"""Output style.
|
||||
|
||||
|
||||
Output contents of style tag or to .css file.
|
||||
"""
|
||||
csscontent = '\nbody {\nfont-size: 1em;\nmargin: 1em;\ncolor: black;\nbackground-color: white;\n}\n' +\
|
||||
|
@ -989,7 +1057,7 @@ def CssOutput():
|
|||
|
||||
def JavaScriptOutput():
|
||||
"""Output scripts.
|
||||
|
||||
|
||||
Output javascript to script tag or to .js file.
|
||||
"""
|
||||
javacontent = """
|
||||
|
@ -1057,7 +1125,7 @@ TableSorter.prototype.sort = function (col, type) {
|
|||
if (this.lastSortOrderAsc == false) {
|
||||
newRows.reverse();
|
||||
}
|
||||
|
||||
|
||||
var count = 0;
|
||||
var newclass;
|
||||
for (i = 0; i < newRows.length; i++) {
|
||||
|
@ -1153,25 +1221,27 @@ function sort_diff(col, type) {
|
|||
}
|
||||
"""
|
||||
return javacontent
|
||||
|
||||
|
||||
def HTML_OutputHead(file_output):
|
||||
"""Output beginning of the document.
|
||||
|
||||
|
||||
Outputs static text.
|
||||
"""
|
||||
file_output.write('<!--?xml version="1.0" encoding="UTF-8"?-->\n' +\
|
||||
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n' +\
|
||||
'<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">\n' +
|
||||
'<head>\n' +\
|
||||
'<title>Differences between Mandriva / Rosa releases</title>\n' +\
|
||||
'<meta name="keywords" content="Mandriva,Rosa,RPM,changes"/>\n' +\
|
||||
'<meta name="description" content="List of changes between Mandriva / Rosa releases"/>\n' +\
|
||||
'<title>Differences between repositories</title>\n' +\
|
||||
'<meta name="keywords" content="Mandriva,Rosa,RPM,repository,difference,changes"/>\n' +\
|
||||
'<meta name="description" content="List of changes between repositories"/>\n' +\
|
||||
'<meta http-equiv="content-type" content="text/html; charset=UTF-8"/>\n' +\
|
||||
'<style type="text/css">' +\
|
||||
CssOutput() +\
|
||||
'</style>\n' +\
|
||||
'<script language="JavaScript" type="text/javascript">' +\
|
||||
'<script type="text/javascript">' +\
|
||||
'/* <![CDATA[ */' +\
|
||||
JavaScriptOutput() +\
|
||||
'/* ]]> */' +\
|
||||
'</script>\n' +\
|
||||
'</head>\n' +\
|
||||
'<body>\n\n')
|
||||
|
@ -1179,7 +1249,7 @@ def HTML_OutputHead(file_output):
|
|||
def GetRepoInfo(dict_packages, packagename, lenold, lennew, list_dict_old,
|
||||
list_dict_new, ifreleaseignore):
|
||||
"""Generate package-specific information.
|
||||
|
||||
|
||||
Generates class and name to be displayed in the table.
|
||||
"""
|
||||
result1 = []
|
||||
|
@ -1196,7 +1266,7 @@ def GetRepoInfo(dict_packages, packagename, lenold, lennew, list_dict_old,
|
|||
else:
|
||||
result1.append("N/A")
|
||||
result2.append('')
|
||||
|
||||
|
||||
tmplist = dict_packages[packagename]
|
||||
tmpdict = {}
|
||||
for (entry, reponum, entry_type) in dict_packages[packagename]:
|
||||
|
@ -1204,7 +1274,7 @@ def GetRepoInfo(dict_packages, packagename, lenold, lennew, list_dict_old,
|
|||
if not ifreleaseignore:
|
||||
tmpstr = tmpstr + '-' + entry[0][1]
|
||||
tmpdict[reponum] = (tmpstr, entry_type)
|
||||
|
||||
|
||||
for i in range(lennew):
|
||||
if(i not in tmpdict):
|
||||
if(packagename not in list_dict_new[i]):
|
||||
|
@ -1235,18 +1305,20 @@ def GetRepoInfo(dict_packages, packagename, lenold, lennew, list_dict_old,
|
|||
result1.append(name)
|
||||
result2.append('class = "downgraded"')
|
||||
show_filter[3] = 1
|
||||
|
||||
|
||||
return (result1, result2, flag, show_filter)
|
||||
|
||||
def HTML_OutputBody(dict_packages, list_dict_old, list_dict_new, arg):
|
||||
"""Output table.
|
||||
|
||||
|
||||
Outputs table in HTML format.
|
||||
"""
|
||||
old = arg.old
|
||||
new = arg.new
|
||||
file_output = arg.output
|
||||
ifreleaseignore = arg.no_release
|
||||
reponames = arg.reponames
|
||||
show_summary = arg.show_summary
|
||||
show_mask = [arg.show_new, arg.show_removed, arg.show_updated, arg.show_downgraded]
|
||||
|
||||
file_output.write('<h1>Difference between repositories.</h1>\n' +\
|
||||
|
@ -1256,59 +1328,70 @@ def HTML_OutputBody(dict_packages, list_dict_old, list_dict_new, arg):
|
|||
'<td class="updated">Updated</td></tr>\n' +\
|
||||
'<tr><td class="downgraded">Downgraded</td>\n' +\
|
||||
'<td class="removed">Removed</td></tr>\n' +\
|
||||
'</tbody></table>\n\n')
|
||||
'</tbody></table>\n\n' +\
|
||||
'<div>' + arg.summary + '</div>\n')
|
||||
repo_list = []
|
||||
|
||||
all_list = []
|
||||
all_list.extend(old)
|
||||
all_list.extend(new)
|
||||
for tmp_list in old:
|
||||
all_list.extend(tmp_list)
|
||||
for tmp_list in new:
|
||||
all_list.extend(tmp_list)
|
||||
lenold = len(old)
|
||||
lennew = len(new)
|
||||
length = lenold + lennew
|
||||
|
||||
reptext = 'repositories' if lenold > 1 else 'repository'
|
||||
tmp_string = '<h2>Old ' + reptext + ':</h2>\n<ul>\n'
|
||||
for i in range(lenold):
|
||||
tmp_string = tmp_string + '<li>Repository ' + str(i) + ' : <a href="' +\
|
||||
old[i] + '">' + old[i] + '</a></li>\n'
|
||||
tmp_string = tmp_string + '</ul>\n'
|
||||
file_output.write(tmp_string)
|
||||
|
||||
reptext = 'repositories' if lennew > 1 else 'repository'
|
||||
tmp_string = '<h2>New ' + reptext + ':</h2>\n<ul>\n'
|
||||
for k in range(lennew):
|
||||
i = 0
|
||||
for k in range(lenold):
|
||||
if reponames == '':
|
||||
tmp_string = 'Repository group A' + str(i)
|
||||
else:
|
||||
tmp_string = reponames[i]
|
||||
tmp_string = '<h2>' + tmp_string + ':</h2>\n<ul>\n'
|
||||
for z in range(len(old[k])):
|
||||
tmp_string = tmp_string + '<li><a href="' +\
|
||||
old[k][z] + '">' + old[k][z] + '</a></li>\n'
|
||||
tmp_string = tmp_string + '</ul>\n'
|
||||
file_output.write(tmp_string)
|
||||
i = i + 1
|
||||
tmp_string = tmp_string + '<li>Repository ' + str(i) + ' : <a href="' +\
|
||||
new[k] + '">' + new[k] + '</a></li>\n'
|
||||
tmp_string = tmp_string + '</ul>\n'
|
||||
file_output.write(tmp_string)
|
||||
|
||||
tmp_string = '<h2>Difference between '
|
||||
i = 0
|
||||
for k in range(lennew):
|
||||
if reponames == '':
|
||||
tmp_string = 'Repository group B' + str(i)
|
||||
else:
|
||||
tmp_string = reponames[i + lenold]
|
||||
tmp_string = '<h2>' + tmp_string + ':</h2>\n<ul>\n'
|
||||
for z in range(len(new[k])):
|
||||
tmp_string = tmp_string + '<li><a href="' +\
|
||||
new[k][z] + '">' + new[k][z] + '</a></li>\n'
|
||||
tmp_string = tmp_string + '</ul>\n'
|
||||
file_output.write(tmp_string)
|
||||
i = i + 1
|
||||
|
||||
i = 0
|
||||
while(i < length):
|
||||
if(i < length - 2):
|
||||
delimeter = " , "
|
||||
elif(i == length - 2):
|
||||
delimeter = " and "
|
||||
else:
|
||||
delimeter = ''
|
||||
temp = '<a href="' + all_list[i] + '">' + \
|
||||
'Repository ' + str(i) + '</a>'
|
||||
if i < lenold:
|
||||
repo_list.append('<th>Repository ' + str(i) + '</th>')
|
||||
if reponames == '':
|
||||
temp = 'Group A' + str(i)
|
||||
else:
|
||||
temp = reponames[i]
|
||||
repo_list.append('<th>' + temp + '</th>')
|
||||
else:
|
||||
ii = i + 1
|
||||
repo_list.append('<th id="sortCelldiff'+str(ii)+'"><a id="sortCellLinkdiff'+str(ii)+'" title="Sort Ascending" href="javascript:sort_diff('+str(ii)+', \'className\')">Repository '+str(i)+'</a></th>')
|
||||
tmp_string = tmp_string + temp + delimeter
|
||||
if reponames == '':
|
||||
temp = 'Group B' + str(i - lenold)
|
||||
else:
|
||||
temp = reponames[i]
|
||||
repo_list.append('<th id="sortCelldiff'+str(ii)+'"><a id="sortCellLinkdiff'+str(ii)+'" title="Sort Ascending" href="javascript:sort_diff('+str(ii)+', \'className\')">'+temp+'</a></th>')
|
||||
i = i + 1
|
||||
tmp_string = tmp_string + ".</h2>\n"
|
||||
file_output.write(tmp_string)
|
||||
|
||||
|
||||
tmp_string = '<table id="table_diff">\n<tbody>\n<tr><th id="sortCelldiff0"><a id="sortCellLinkdiff0" title="Sort Ascending" href="javascript:sort_diff(0, \'string\')">Package name</a></th>'
|
||||
for reponame in repo_list:
|
||||
tmp_string = tmp_string + reponame
|
||||
tmp_string = tmp_string + '</tr>\n'
|
||||
|
||||
file_output.write(tmp_string)
|
||||
|
||||
strnum = 1
|
||||
resrange = []
|
||||
for i in range(lennew):
|
||||
|
@ -1316,7 +1399,7 @@ def HTML_OutputBody(dict_packages, list_dict_old, list_dict_new, arg):
|
|||
|
||||
sorted_list = sorted(dict_packages, key=str.lower)
|
||||
for packagename in sorted_list:
|
||||
(repo_name, repo_class, flag, show_filter) = GetRepoInfo(dict_packages, packagename,
|
||||
(repo_name, repo_class, flag, show_filter) = GetRepoInfo(dict_packages, packagename,
|
||||
lenold, lennew, list_dict_old, list_dict_new, ifreleaseignore)
|
||||
res = 0
|
||||
for i in range(4):
|
||||
|
@ -1340,32 +1423,33 @@ def HTML_OutputBody(dict_packages, list_dict_old, list_dict_new, arg):
|
|||
strtype = "odd"
|
||||
else:
|
||||
strtype = "even"
|
||||
tmp_string = tmp_string + '<tr class="' + strtype + '">'
|
||||
tmp_string = '<tr class="' + strtype + '">'
|
||||
tmp_string = tmp_string + '<td>' + packagename + '</td>'
|
||||
for i in range(length):
|
||||
tmp_string = tmp_string + '<td ' + repo_class[i] + '>' +\
|
||||
repo_name[i] + '</td>'
|
||||
tmp_string = tmp_string + '</tr>\n'
|
||||
file_output.write(tmp_string)
|
||||
strnum = strnum + 1
|
||||
tmp_string = tmp_string + '</tbody>\n</table>\n'
|
||||
|
||||
file_output.write(tmp_string)
|
||||
|
||||
file_output.write('</tbody>\n</table>\n')
|
||||
|
||||
def HTML_OutputTail(file_output):
|
||||
"""Output end of document.
|
||||
|
||||
|
||||
Outputs static text.
|
||||
"""
|
||||
file_output.write('''
|
||||
<script language='JavaScript' type='text/javascript'>
|
||||
<script type='text/javascript'>
|
||||
/* <![CDATA[ */
|
||||
init_diff();
|
||||
/* ]]> */
|
||||
</script>
|
||||
''');
|
||||
file_output.write('</body>\n</html>\n')
|
||||
|
||||
|
||||
def HTML_Output(dict_packages, list_dict_old, list_dict_new, arg):
|
||||
"""Output HTML file.
|
||||
|
||||
|
||||
Generates HTML file.
|
||||
"""
|
||||
ifnotquiet = arg.quiet
|
||||
|
@ -1375,7 +1459,7 @@ def HTML_Output(dict_packages, list_dict_old, list_dict_new, arg):
|
|||
print _("Creating HTML file.")
|
||||
HTML_OutputHead(file_output)
|
||||
HTML_OutputBody(dict_packages, list_dict_old, list_dict_new, arg)
|
||||
HTML_OutputTail(file_output)
|
||||
HTML_OutputTail(file_output)
|
||||
|
||||
def main(args):
|
||||
arg = ParseCommandLine()
|
||||
|
@ -1388,13 +1472,19 @@ def main(args):
|
|||
tmp_output = arg.output[0]
|
||||
else:
|
||||
tmp_output = default_output
|
||||
if (arg.ignore):
|
||||
arg.ignore = arg.ignore[0]
|
||||
arg.output = None;
|
||||
for i in range(len(arg.old)):
|
||||
arg.old[i] = CheckArgs(arg.old[i], arg)
|
||||
arg.temp_old.append(head_old + str(i) + '/')
|
||||
arg.temp_old.append([])
|
||||
for j in range(len(arg.old[i])):
|
||||
arg.old[i][j] = CheckArgs(arg.old[i][j], arg)
|
||||
arg.temp_old[i].append(head_old + str(i) + '-' + str(j) + '/')
|
||||
for i in range(len(arg.new)):
|
||||
arg.new[i] = CheckArgs(arg.new[i], arg)
|
||||
arg.temp_new.append(head_new + str(i) + '/')
|
||||
arg.temp_new.append([])
|
||||
for j in range(len(arg.new[i])):
|
||||
arg.new[i][j] = CheckArgs(arg.new[i][j], arg)
|
||||
arg.temp_new[i].append(head_new + str(i) + '-' + str(j) + '/')
|
||||
arg.output = tmp_output
|
||||
CheckOutput(arg)
|
||||
CheckParam(arg)
|
||||
|
@ -1411,34 +1501,55 @@ def main(args):
|
|||
arg.show_removed=True
|
||||
arg.show_updated=True
|
||||
arg.show_downgraded=True
|
||||
|
||||
|
||||
if arg.ignore:
|
||||
ignorelist = ReadIgnoreList(arg)
|
||||
else:
|
||||
ignorelist = []
|
||||
|
||||
for s in ignorelist:
|
||||
print s
|
||||
|
||||
GetFiles(arg)
|
||||
|
||||
if not ifhtml:
|
||||
(dict_old, dict_new) = ParsePackage(arg)
|
||||
|
||||
|
||||
(dict_new_packages, dict_del_packages, dict_upd_packages) = CreateDicts(
|
||||
dict_old, dict_new)
|
||||
|
||||
dict_old = ''
|
||||
|
||||
dict_obsoleted = GenerateDictObsoleted(dict_new, ifnotquiet)
|
||||
dict_new = ''
|
||||
if(dict_upd_packages) and (ifnotsimple) and (ifchangelog):
|
||||
dict_logfile_diff = GenerateLogfileDiff(dict_upd_packages, arg)
|
||||
if not ifnotsimple or not ifchangelog:
|
||||
dict_logfile_diff = {}
|
||||
|
||||
|
||||
if arg.show_new:
|
||||
ProcessNewPackages(dict_new_packages, arg.output)
|
||||
if arg.show_removed:
|
||||
ProcessDelPackages(dict_del_packages, dict_obsoleted, arg.output)
|
||||
if dict_upd_packages and (arg.show_updated or arg.show_downgraded):
|
||||
ProcessUpdPackages(dict_upd_packages, dict_logfile_diff, arg)
|
||||
PrintSummary(dict_new_packages, dict_del_packages, dict_upd_packages, arg.output)
|
||||
if arg.show_summary:
|
||||
PrintSummary(dict_new_packages, dict_del_packages, dict_upd_packages, arg)
|
||||
else:
|
||||
(list_dict_old, list_dict_new) = HTML_ParsePackage(arg)
|
||||
dict_old = HTML_UniteOld(list_dict_old)
|
||||
dict_old = HTML_UniteDicts(list_dict_old)
|
||||
if arg.show_summary:
|
||||
dict_new = HTML_UniteDicts(list_dict_new)
|
||||
(dict_new_packages, dict_del_packages, dict_upd_packages) = CreateDicts(
|
||||
dict_old, dict_new)
|
||||
arg.summary = PrintSummary(dict_new_packages, dict_del_packages, dict_upd_packages, arg)
|
||||
dict_new = ''
|
||||
dict_new_packages = ''
|
||||
dict_del_packages = ''
|
||||
dict_upd_packages = ''
|
||||
dict_packages = HTML_CreateDicts(dict_old, list_dict_new)
|
||||
dict_old = ''
|
||||
HTML_Output(dict_packages, list_dict_old, list_dict_new, arg)
|
||||
|
||||
|
||||
exit_proc(arg)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
Loading…
Add table
Reference in a new issue