mirror of
https://abf.rosa.ru/djam/urpm-tools.git
synced 2025-02-23 09:22:47 +00:00
repodiff: some code cleaning
This commit is contained in:
parent
35a3dde9f5
commit
caedf23923
1 changed files with 232 additions and 232 deletions
464
urpm-repodiff.py
464
urpm-repodiff.py
|
@ -2,16 +2,16 @@
|
|||
'''
|
||||
" Repodiff utility for finding differences between different repositories
|
||||
"
|
||||
" The tool downloads, unpacks and parses synthesis.hdlist.cz and
|
||||
" The tool downloads, unpacks and parses synthesis.hdlist.cz and
|
||||
" changelog.xml.lzma to genererate lists of newly added packages,
|
||||
" removed from new repository packages and updated packages.
|
||||
" The tool outputs data to standart output or to file.
|
||||
" It can show if a removed packages is obsoleted by some package
|
||||
" in new repositories. Also the tool can output data in format of
|
||||
" HTML table.
|
||||
"
|
||||
"
|
||||
" REQUIREMENTS
|
||||
" ============
|
||||
" ============
|
||||
" - urpmi
|
||||
" - python-2.7
|
||||
" - lzma
|
||||
|
@ -82,7 +82,7 @@ def ParseCommandLine():
|
|||
parser = argparse.ArgumentParser(
|
||||
description=_("Tool for comparing sets of repositories."))
|
||||
group = parser.add_argument_group(_('global parameters'),
|
||||
description=_("Parameters used in all cases."))
|
||||
description=_("Parameters used in all cases."))
|
||||
group.add_argument("--old", "-o", action="append", nargs='+', required="True",
|
||||
metavar="OLD_REPO", help=_("URL or PATH to old repositories"))
|
||||
group.add_argument("--new", "-n", action="append", nargs='+', required="True",
|
||||
|
@ -90,43 +90,43 @@ def ParseCommandLine():
|
|||
group.add_argument("--quiet", "-q", action="store_false",
|
||||
help=_("Hide service messages."))
|
||||
group.add_argument("--no-release", "-r", action="store_true",
|
||||
help=_("Ignore release during package compare."))
|
||||
help=_("Ignore release during package compare."))
|
||||
group.add_argument("--show-summary", action="store_true",
|
||||
help=_("Output summary."))
|
||||
help=_("Output summary."))
|
||||
group.add_argument("--output", "-out", action="store", default=default_output,
|
||||
metavar="OUTPUT_FILE", help=_("Change standart output to \"OUTPUT_FILE\"."))
|
||||
group.add_argument("--ignore", "-i", action="store", default='',
|
||||
metavar="IGNORELIST", help=_("File with list of ignored packages"))
|
||||
metavar="IGNORELIST", help=_("File with list of ignored packages"))
|
||||
group = parser.add_argument_group(_('text mode parameters'),
|
||||
description=_("Parameters used only in text mode. (--html not present)"))
|
||||
description=_("Parameters used only in text mode. (--html not present)"))
|
||||
group.add_argument("--size", "-s", action="store_true",
|
||||
help=_("Show differences in package sizes."))
|
||||
group.add_argument("--simple", action="store_false",
|
||||
help=_("Simple output format."))
|
||||
group.add_argument("--changelog", "-c", action="store_true",
|
||||
help=_("Show changelog difference."))
|
||||
help=_("Show changelog difference."))
|
||||
group = parser.add_argument_group(_('HTML mode parameters'),
|
||||
description=_("Parameters used only in HTML mode. (--html is present)"))
|
||||
description=_("Parameters used only in HTML mode. (--html is present)"))
|
||||
group.add_argument("--html", action="store_true",
|
||||
help=_("Output in HTML format, if --output is not present\
|
||||
\"%s\" will be created in current directory. \
|
||||
--size, --simple and --changelog options are ignored.") % htmlname)
|
||||
group.add_argument("--reponames", action="store", nargs='+', default='',
|
||||
metavar="REPONAME", help=_("Repository names for output."))
|
||||
metavar="REPONAME", help=_("Repository names for output."))
|
||||
group.add_argument("--title", "-t", action="store",
|
||||
default="Difference between repositories.",
|
||||
help=_("Set title."))
|
||||
default="Difference between repositories.",
|
||||
help=_("Set title."))
|
||||
group = parser.add_argument_group(_('Filters'),
|
||||
description=_("Filters for output. If none selected then every type will\
|
||||
be shown"))
|
||||
description=_("Filters for output. If none selected then every type will\
|
||||
be shown"))
|
||||
group.add_argument("--show-new", "-N", action="store_true",
|
||||
help=_("Show new packages"))
|
||||
help=_("Show new packages"))
|
||||
group.add_argument("--show-removed", "-R", action="store_true",
|
||||
help=_("Show removed packages"))
|
||||
help=_("Show removed packages"))
|
||||
group.add_argument("--show-updated", "-U", action="store_true",
|
||||
help=_("Show updated packages"))
|
||||
help=_("Show updated packages"))
|
||||
group.add_argument("--show-downgraded", "-D", action="store_true",
|
||||
help=_("Show downgraded packages"))
|
||||
help=_("Show downgraded packages"))
|
||||
return parser.parse_args()
|
||||
|
||||
def exit_proc(arg):
|
||||
|
@ -182,7 +182,7 @@ def CheckArgs(urlpath, arg):
|
|||
urlpath = urltmp + "media_info/"
|
||||
CheckURL(urlpath, arg)
|
||||
else:
|
||||
print _("Error: \"%s\" is not correct url, path or name of repository") % urlpath
|
||||
print _("Error: \"%s\" is not correct url, path or name of repository") % urlpath
|
||||
exit_proc(arg)
|
||||
return urlpath
|
||||
|
||||
|
@ -201,15 +201,15 @@ def CheckOutput(arg):
|
|||
except:
|
||||
print _("Error: Cannot open %s for writing.") % htmlname
|
||||
exit_proc(arg)
|
||||
return
|
||||
else:
|
||||
arg.output = sys.stdout
|
||||
return
|
||||
return
|
||||
else:
|
||||
arg.output = sys.stdout
|
||||
return
|
||||
|
||||
if(file_output != ''):
|
||||
if(os.path.isfile(file_output)):
|
||||
print _("Error: File %s already exists") % file_output
|
||||
arg.output = None
|
||||
arg.output = None
|
||||
exit_proc(arg)
|
||||
else:
|
||||
dirname = os.path.dirname(file_output)
|
||||
|
@ -218,11 +218,11 @@ def CheckOutput(arg):
|
|||
arg.output = open(file_output, "w")
|
||||
except IOError:
|
||||
print _("Error: File %s cannot be created") % file_output
|
||||
arg.output = None
|
||||
exit_proc(arg)
|
||||
arg.output = None
|
||||
exit_proc(arg)
|
||||
else:
|
||||
print _("Error: Path %s does not exist.") % dirname
|
||||
arg.output = None
|
||||
arg.output = None
|
||||
exit_proc(arg)
|
||||
|
||||
def CheckParam(arg):
|
||||
|
@ -235,12 +235,12 @@ def CheckParam(arg):
|
|||
arg.simple = 0
|
||||
arg.changelog = 0
|
||||
if (arg.reponames != '') and (len(arg.old) + len(arg.new) != len(arg.reponames)):
|
||||
print _("Error: number of REPONAME's(%s) are not equal to number of groups(%s)") % \
|
||||
(str(len(arg.reponames)), str(len(arg.old) + len(arg.new)))
|
||||
exit_proc(arg)
|
||||
print _("Error: number of REPONAME's(%s) are not equal to number of groups(%s)") % \
|
||||
(str(len(arg.reponames)), str(len(arg.old) + len(arg.new)))
|
||||
exit_proc(arg)
|
||||
else:
|
||||
arg.repnames = ''
|
||||
arg.title = ''
|
||||
arg.repnames = ''
|
||||
arg.title = ''
|
||||
|
||||
def GetFile(urlpath, filename, localdir, arg):
|
||||
"""Donwload archive.
|
||||
|
@ -276,47 +276,47 @@ def GetFiles(arg):
|
|||
file_name = []
|
||||
file_path = []
|
||||
for i in range(len(arg.old)):
|
||||
for j in range(len(arg.old[i])):
|
||||
for j in range(len(arg.old[i])):
|
||||
file_name.append(synthesis_arch)
|
||||
file_dir.append(arg.temp_old[i][j])
|
||||
file_path.append(arg.old[i][j] + "media_info/")
|
||||
if ifchangelog:
|
||||
file_name.append(changelog_arch)
|
||||
file_dir.append(arg.temp_old[i][j])
|
||||
file_path.append(arg.old[i][j] + "media_info/")
|
||||
file_dir.append(arg.temp_old[i][j])
|
||||
file_path.append(arg.old[i][j] + "media_info/")
|
||||
if ifchangelog:
|
||||
file_name.append(changelog_arch)
|
||||
file_dir.append(arg.temp_old[i][j])
|
||||
file_path.append(arg.old[i][j] + "media_info/")
|
||||
|
||||
for i in range(len(arg.new)):
|
||||
for j in range(len(arg.new[i])):
|
||||
file_name.append(synthesis_arch)
|
||||
file_dir.append(arg.temp_new[i][j])
|
||||
file_path.append(arg.new[i][j] + "media_info/")
|
||||
if ifchangelog:
|
||||
file_name.append(changelog_arch)
|
||||
file_dir.append(arg.temp_new[i][j])
|
||||
file_path.append(arg.new[i][j] + "media_info/")
|
||||
for j in range(len(arg.new[i])):
|
||||
file_name.append(synthesis_arch)
|
||||
file_dir.append(arg.temp_new[i][j])
|
||||
file_path.append(arg.new[i][j] + "media_info/")
|
||||
if ifchangelog:
|
||||
file_name.append(changelog_arch)
|
||||
file_dir.append(arg.temp_new[i][j])
|
||||
file_path.append(arg.new[i][j] + "media_info/")
|
||||
|
||||
for i in range(len(file_name)):
|
||||
GetFile(file_path[i], file_name[i], file_dir[i], arg)
|
||||
GetFile(file_path[i], file_name[i], file_dir[i], arg)
|
||||
|
||||
def ReadIgnoreList(arg):
|
||||
ignorefile = arg.ignore
|
||||
ignorelist = []
|
||||
if not os.path.isfile(ignorefile):
|
||||
print _("Error: file %s does not exist.") % ignorefile
|
||||
exit_proc(arg)
|
||||
print _("Error: file %s does not exist.") % ignorefile
|
||||
exit_proc(arg)
|
||||
try:
|
||||
ifile = open(ignorefile)
|
||||
for string in ifile:
|
||||
if string == '\n':
|
||||
continue
|
||||
if string.endswith('\n'):
|
||||
string = string[:-1]
|
||||
ignorelist.append(string)
|
||||
ifile.close()
|
||||
ignorelist.sort()
|
||||
ifile = open(ignorefile)
|
||||
for string in ifile:
|
||||
if string == '\n':
|
||||
continue
|
||||
if string.endswith('\n'):
|
||||
string = string[:-1]
|
||||
ignorelist.append(string)
|
||||
ifile.close()
|
||||
ignorelist.sort()
|
||||
except:
|
||||
print _("Error: file %s cannot be read.") % ignorefile
|
||||
exit_proc(arg)
|
||||
print _("Error: file %s cannot be read.") % ignorefile
|
||||
exit_proc(arg)
|
||||
return ignorelist
|
||||
|
||||
def RemoveIgnored(dict_in, ignorelist):
|
||||
|
@ -327,24 +327,24 @@ def RemoveIgnored(dict_in, ignorelist):
|
|||
j = 0
|
||||
jmax = len(ignorelist)
|
||||
while (i < imax) and (j < jmax):
|
||||
if ignorelist[j].endswith('*'):
|
||||
comp_str = ignorelist[j][:-1]
|
||||
while (i < imax) and (sorted_list[i] < comp_str):
|
||||
dict_out[sorted_list[i]] = dict_in[sorted_list[i]]
|
||||
i = i + 1
|
||||
while (i < imax) and sorted_list[i].startswith(comp_str):
|
||||
i = i + 1
|
||||
else:
|
||||
comp_str = ignorelist[j]
|
||||
while (i < imax) and (sorted_list[i] < comp_str):
|
||||
dict_out[sorted_list[i]] = dict_in[sorted_list[i]]
|
||||
i = i + 1
|
||||
if (i < imax) and (sorted_list[i] == comp_str):
|
||||
i = i + 1
|
||||
j = j + 1
|
||||
if ignorelist[j].endswith('*'):
|
||||
comp_str = ignorelist[j][:-1]
|
||||
while (i < imax) and (sorted_list[i] < comp_str):
|
||||
dict_out[sorted_list[i]] = dict_in[sorted_list[i]]
|
||||
i = i + 1
|
||||
while (i < imax) and sorted_list[i].startswith(comp_str):
|
||||
i = i + 1
|
||||
else:
|
||||
comp_str = ignorelist[j]
|
||||
while (i < imax) and (sorted_list[i] < comp_str):
|
||||
dict_out[sorted_list[i]] = dict_in[sorted_list[i]]
|
||||
i = i + 1
|
||||
if (i < imax) and (sorted_list[i] == comp_str):
|
||||
i = i + 1
|
||||
j = j + 1
|
||||
if (i < imax) and (j == jmax):
|
||||
for i in range(i, imax):
|
||||
dict_out[sorted_list[i]] = dict_in[sorted_list[i]]
|
||||
for i in range(i, imax):
|
||||
dict_out[sorted_list[i]] = dict_in[sorted_list[i]]
|
||||
return dict_out
|
||||
|
||||
def RenameSynthFile(localdir, arg):
|
||||
|
@ -443,8 +443,8 @@ def ParseSynthesis(synthfile, pkgdict, arg):
|
|||
synth = open(synthfile)
|
||||
tmp = ['', '', '']
|
||||
for synthline in synth:
|
||||
if not synthline.startswith('@'):
|
||||
continue
|
||||
if not synthline.startswith('@'):
|
||||
continue
|
||||
if synthline.endswith('\n'):
|
||||
synthline = synthline[:-1]
|
||||
tmpline = synthline.split('@')
|
||||
|
@ -458,7 +458,7 @@ def ParseSynthesis(synthfile, pkgdict, arg):
|
|||
disttagepoch = ChkTagEpoch(tmp[0]) #disttag + distepoch
|
||||
tmp[2] = ParseVersion(tmp[2])
|
||||
(name, version, release) = RPMNameFilter(tmp[0][0],
|
||||
disttagepoch, ifreleaseignore)
|
||||
disttagepoch, ifreleaseignore)
|
||||
verrel = (version, release, tmp[0][1])
|
||||
if(not name in pkgdict):
|
||||
pkgdict[name]=(verrel, (tmp[0], tmp[1], tmp[2]))
|
||||
|
@ -510,8 +510,8 @@ def RPMNameFilter(rpmname, disttagepoch, ifreleaseignore):
|
|||
issrc = (tmp.pop() == "src")
|
||||
ismageia = 0
|
||||
if tmp[-1].startswith("mga"):
|
||||
tmp.pop()
|
||||
ismageia = 1
|
||||
tmp.pop()
|
||||
ismageia = 1
|
||||
lastpart = '.'.join(tmp)
|
||||
if (lastpart[0].isdigit() or (not lastpart.startswith(disttagepoch))) and\
|
||||
((not lastpart.isdigit()) or issrc or ismageia):
|
||||
|
@ -523,8 +523,8 @@ def RPMNameFilter(rpmname, disttagepoch, ifreleaseignore):
|
|||
ver = string[-2]
|
||||
rel = string[-1]
|
||||
if ifreleaseignore:
|
||||
rel = ""
|
||||
return (name, ver, rel)
|
||||
rel = ""
|
||||
return (name, ver, rel)
|
||||
|
||||
def compare_versions(first_entry, second_entry):
|
||||
"""Compare two verrel tuples.
|
||||
|
@ -557,16 +557,16 @@ def ParsePackage(arg):
|
|||
ignorelist = arg.ignorelist
|
||||
pkgdict_old = {}
|
||||
for tmp_list in arg.temp_old:
|
||||
for directory in tmp_list:
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, ifchangelog, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, pkgdict_old, arg)
|
||||
for directory in tmp_list:
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, ifchangelog, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, pkgdict_old, arg)
|
||||
pkgdict_new = {}
|
||||
for tmp_list in arg.temp_new:
|
||||
for directory in tmp_list:
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, ifchangelog, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, pkgdict_new, arg)
|
||||
for directory in tmp_list:
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, ifchangelog, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, pkgdict_new, arg)
|
||||
pkgdict_old = RemoveIgnored(pkgdict_old, ignorelist)
|
||||
pdkdict_new = RemoveIgnored(pkgdict_new, ignorelist)
|
||||
ignorelist = ""
|
||||
|
@ -797,8 +797,8 @@ def ParseLogfile(dict_log, logfile, dict_upd_packages, mode, arg):
|
|||
else:
|
||||
entry_time = 0
|
||||
|
||||
if(mode == 1) and (not ifdowngraded) and\
|
||||
(result_key in dict_log) and\
|
||||
if(mode == 1) and (not ifdowngraded) and\
|
||||
(result_key in dict_log) and\
|
||||
(entry_time <= dict_log[result_key][0][1][0]):
|
||||
break
|
||||
log_child = log_current.children
|
||||
|
@ -809,16 +809,16 @@ def ParseLogfile(dict_log, logfile, dict_upd_packages, mode, arg):
|
|||
entry_text = log_child.content
|
||||
log_child = log_child.next
|
||||
result_changelog.append((entry_time, entry_name, entry_text))
|
||||
# if "old" repository do not have changelog of the package
|
||||
if(mode == 1) and (not result_key in dict_log):
|
||||
dict_log[result_key] = []
|
||||
dict_log[result_key].append([])
|
||||
dict_log[result_key].append([])
|
||||
dict_log[result_key][0] = (verrel, [])
|
||||
if not ifdowngraded:
|
||||
dict_log[result_key][0] = (verrel, result_changelog[0])
|
||||
else:
|
||||
dict_log[result_key][0] = (verrel, result_changelog)
|
||||
# if "old" repository do not have changelog of the package
|
||||
if(mode == 1) and (not result_key in dict_log):
|
||||
dict_log[result_key] = []
|
||||
dict_log[result_key].append([])
|
||||
dict_log[result_key].append([])
|
||||
dict_log[result_key][0] = (verrel, [])
|
||||
if not ifdowngraded:
|
||||
dict_log[result_key][0] = (verrel, result_changelog[0])
|
||||
else:
|
||||
dict_log[result_key][0] = (verrel, result_changelog)
|
||||
if(mode == ifdowngraded):
|
||||
break
|
||||
log_current = log_current.next
|
||||
|
@ -867,11 +867,11 @@ def GenerateLogfileDiff(dict_upd_packages, arg):
|
|||
dict_log = {}
|
||||
|
||||
for i in temp_old:
|
||||
for old_dir in temp_old[i]:
|
||||
ParseLogfile(dict_log, old_dir + changelog_file, dict_upd_packages, 0, arg)
|
||||
for old_dir in temp_old[i]:
|
||||
ParseLogfile(dict_log, old_dir + changelog_file, dict_upd_packages, 0, arg)
|
||||
for i in temp_new:
|
||||
for new_dir in temp_new[i]:
|
||||
ParseLogfile(dict_log, new_dir + changelog_file, dict_upd_packages, 1, arg)
|
||||
for new_dir in temp_new[i]:
|
||||
ParseLogfile(dict_log, new_dir + changelog_file, dict_upd_packages, 1, arg)
|
||||
|
||||
for name in dict_upd_packages:
|
||||
if(name in dict_log):
|
||||
|
@ -895,11 +895,11 @@ def ChangelogPrint(changes_list, file_output):
|
|||
changes_list = [(time,author,text)]
|
||||
"""
|
||||
if len(changes_list) > 0:
|
||||
for entry in changes_list:
|
||||
file_output.write("* " + str(date.fromtimestamp(float(entry[0]))) +\
|
||||
" " + entry[1] + '\n' + entry[2] + '\n\n')
|
||||
for entry in changes_list:
|
||||
file_output.write("* " + str(date.fromtimestamp(float(entry[0]))) +\
|
||||
" " + entry[1] + '\n' + entry[2] + '\n\n')
|
||||
else:
|
||||
file_output.write('\n')
|
||||
file_output.write('\n')
|
||||
|
||||
def PrintLogfileDiff(package_name, dict_logfile_diff, file_output):
|
||||
"""Changelog difference.
|
||||
|
@ -936,21 +936,21 @@ def ProcessUpdPackages(dict_upd_packages, dict_logfile_diff, arg):
|
|||
for name in sorted_list:
|
||||
package = dict_upd_packages[name][1][1][0][0]
|
||||
if ifnotsimple:
|
||||
if dict_upd_packages[name][2]:
|
||||
if ifdown:
|
||||
file_output.write(package + '\n' + '-'*len(package) + '\n')
|
||||
if dict_upd_packages[name][2]:
|
||||
if ifdown:
|
||||
file_output.write(package + '\n' + '-'*len(package) + '\n')
|
||||
file_output.write(_(" ***DOWNGRADED***\n"))
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
if ifup:
|
||||
file_output.write(package + '\n' + '-'*len(package) + '\n')
|
||||
else:
|
||||
continue
|
||||
if ifchangelog:
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
if ifup:
|
||||
file_output.write(package + '\n' + '-'*len(package) + '\n')
|
||||
else:
|
||||
continue
|
||||
if ifchangelog:
|
||||
PrintLogfileDiff(name, dict_logfile_diff, file_output)
|
||||
else:
|
||||
file_output.write('\n')
|
||||
file_output.write('\n')
|
||||
else:
|
||||
old_package = dict_upd_packages[name][0][1][0][0]
|
||||
file_output.write(name + ": " + old_package + " -> " + package + '\n')
|
||||
|
@ -968,42 +968,42 @@ def PrintSummary(dict_new_packages, dict_del_packages, dict_upd_packages, arg):
|
|||
ifhtml = arg.html
|
||||
|
||||
if ifhtml:
|
||||
endstr = '<br />'
|
||||
endstr = '<br />'
|
||||
else:
|
||||
endstr = '\n'
|
||||
endstr = '\n'
|
||||
|
||||
tmp_str = _("Summary:")
|
||||
if ifhtml:
|
||||
tmp_str = '<p class="bold">' + tmp_str + '</p>'
|
||||
tmp_str = '<p class="bold">' + tmp_str + '</p>'
|
||||
else:
|
||||
tmp_str = tmp_str + endstr
|
||||
tmp_str = tmp_str + endstr
|
||||
if arg.show_new:
|
||||
length = len(dict_new_packages)
|
||||
if length:
|
||||
tmp_str = tmp_str + (_(" Total added packages: ") + str(length) + endstr)
|
||||
length = len(dict_new_packages)
|
||||
if length:
|
||||
tmp_str = tmp_str + (_(" Total added packages: ") + str(length) + endstr)
|
||||
if arg.show_removed:
|
||||
length = len(dict_del_packages)
|
||||
if length:
|
||||
tmp_str = tmp_str + (_(" Total removed packages: ") + str(length) + endstr)
|
||||
length = len(dict_del_packages)
|
||||
if length:
|
||||
tmp_str = tmp_str + (_(" Total removed packages: ") + str(length) + endstr)
|
||||
if arg.show_updated or arg.show_downgraded:
|
||||
length = 0
|
||||
length_d = 0
|
||||
for packagename in dict_upd_packages:
|
||||
if dict_upd_packages[packagename][2] == 0:
|
||||
length = length + 1
|
||||
else:
|
||||
length_d = length_d + 1
|
||||
length = 0
|
||||
length_d = 0
|
||||
for packagename in dict_upd_packages:
|
||||
if dict_upd_packages[packagename][2] == 0:
|
||||
length = length + 1
|
||||
else:
|
||||
length_d = length_d + 1
|
||||
if arg.show_updated:
|
||||
if length:
|
||||
tmp_str = tmp_str + (_(" Total updated packages: ") + str(length) + endstr)
|
||||
if arg.show_downgraded:
|
||||
if length_d:
|
||||
tmp_str = tmp_str + (_(" Total downgraded packages: ") + str(length_d) + endstr)
|
||||
if length:
|
||||
tmp_str = tmp_str + (_(" Total updated packages: ") + str(length) + endstr)
|
||||
if arg.show_downgraded:
|
||||
if length_d:
|
||||
tmp_str = tmp_str + (_(" Total downgraded packages: ") + str(length_d) + endstr)
|
||||
|
||||
if ifhtml:
|
||||
return tmp_str
|
||||
return tmp_str
|
||||
else:
|
||||
file_ouput.write(tmp_str)
|
||||
file_ouput.write(tmp_str)
|
||||
|
||||
def HTML_ParsePackage(arg):
|
||||
"""Parse hdlist.
|
||||
|
@ -1018,19 +1018,19 @@ def HTML_ParsePackage(arg):
|
|||
html_new_dict_list = []
|
||||
|
||||
for tmp_list in arg.temp_old:
|
||||
tmp_dict = {}
|
||||
for directory in tmp_list:
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, 0, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, tmp_dict, arg)
|
||||
html_old_dict_list.append(RemoveIgnored(tmp_dict, ignorelist))
|
||||
tmp_dict = {}
|
||||
for directory in tmp_list:
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, 0, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, tmp_dict, arg)
|
||||
html_old_dict_list.append(RemoveIgnored(tmp_dict, ignorelist))
|
||||
for tmp_list in arg.temp_new:
|
||||
tmp_dict = {}
|
||||
for directory in tmp_list:
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, 0, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, tmp_dict, arg)
|
||||
html_new_dict_list.append(RemoveIgnored(tmp_dict, ignorelist))
|
||||
tmp_dict = {}
|
||||
for directory in tmp_list:
|
||||
RenameSynthFile(directory, arg)
|
||||
UnpackFiles(directory, 0, ifnotquiet)
|
||||
ParseSynthesis(directory + synthesis_file, tmp_dict, arg)
|
||||
html_new_dict_list.append(RemoveIgnored(tmp_dict, ignorelist))
|
||||
ignorelist = ""
|
||||
arg.ignorelist = ""
|
||||
return html_old_dict_list, html_new_dict_list
|
||||
|
@ -1292,7 +1292,7 @@ def HTML_OutputHead(arg):
|
|||
'<body>\n\n')
|
||||
|
||||
def GetRepoInfo(dict_packages, packagename, lenold, lennew, list_dict_old,
|
||||
list_dict_new, ifreleaseignore):
|
||||
list_dict_new, ifreleaseignore):
|
||||
"""Generate package-specific information.
|
||||
|
||||
Generates class and name to be displayed in the table.
|
||||
|
@ -1304,9 +1304,9 @@ def GetRepoInfo(dict_packages, packagename, lenold, lennew, list_dict_old,
|
|||
tmpstr = ""
|
||||
for i in range(lenold):
|
||||
if packagename in list_dict_old[i]:
|
||||
tmpstr = list_dict_old[i][packagename][0][0]
|
||||
if not ifreleaseignore:
|
||||
tmpstr = tmpstr + '-' + list_dict_old[i][packagename][0][1]
|
||||
tmpstr = list_dict_old[i][packagename][0][0]
|
||||
if not ifreleaseignore:
|
||||
tmpstr = tmpstr + '-' + list_dict_old[i][packagename][0][1]
|
||||
result1.append(tmpstr)
|
||||
else:
|
||||
result1.append("N/A")
|
||||
|
@ -1315,9 +1315,9 @@ def GetRepoInfo(dict_packages, packagename, lenold, lennew, list_dict_old,
|
|||
tmplist = dict_packages[packagename]
|
||||
tmpdict = {}
|
||||
for (entry, reponum, entry_type) in dict_packages[packagename]:
|
||||
tmpstr = entry[0][0]
|
||||
if not ifreleaseignore:
|
||||
tmpstr = tmpstr + '-' + entry[0][1]
|
||||
tmpstr = entry[0][0]
|
||||
if not ifreleaseignore:
|
||||
tmpstr = tmpstr + '-' + entry[0][1]
|
||||
tmpdict[reponum] = (tmpstr, entry_type)
|
||||
|
||||
for i in range(lennew):
|
||||
|
@ -1326,9 +1326,9 @@ def GetRepoInfo(dict_packages, packagename, lenold, lennew, list_dict_old,
|
|||
result1.append("N/A")
|
||||
result2.append("")
|
||||
else:
|
||||
tmpstr = list_dict_new[i][packagename][0][0]
|
||||
if not ifreleaseignore:
|
||||
tmpstr = tmpstr + '-' + list_dict_new[i][packagename][0][1]
|
||||
tmpstr = list_dict_new[i][packagename][0][0]
|
||||
if not ifreleaseignore:
|
||||
tmpstr = tmpstr + '-' + list_dict_new[i][packagename][0][1]
|
||||
result1.append(tmpstr)
|
||||
result2.append("")
|
||||
else:
|
||||
|
@ -1380,22 +1380,22 @@ def HTML_OutputBody(dict_packages, list_dict_old, list_dict_new, arg):
|
|||
|
||||
all_list = []
|
||||
for tmp_list in old:
|
||||
all_list.extend(tmp_list)
|
||||
all_list.extend(tmp_list)
|
||||
for tmp_list in new:
|
||||
all_list.extend(tmp_list)
|
||||
all_list.extend(tmp_list)
|
||||
lenold = len(old)
|
||||
lennew = len(new)
|
||||
length = lenold + lennew
|
||||
|
||||
i = 0
|
||||
for k in range(lenold):
|
||||
if reponames == '':
|
||||
tmp_string = 'Repository group A' + str(i)
|
||||
else:
|
||||
tmp_string = reponames[i]
|
||||
tmp_string = '<h2>' + tmp_string + ':</h2>\n<ul>\n'
|
||||
for z in range(len(old[k])):
|
||||
tmp_string = tmp_string + '<li><a href="' +\
|
||||
if reponames == '':
|
||||
tmp_string = 'Repository group A' + str(i)
|
||||
else:
|
||||
tmp_string = reponames[i]
|
||||
tmp_string = '<h2>' + tmp_string + ':</h2>\n<ul>\n'
|
||||
for z in range(len(old[k])):
|
||||
tmp_string = tmp_string + '<li><a href="' +\
|
||||
old[k][z] + '">' + old[k][z] + '</a></li>\n'
|
||||
tmp_string = tmp_string + '</ul>\n'
|
||||
file_output.write(tmp_string)
|
||||
|
@ -1403,32 +1403,32 @@ def HTML_OutputBody(dict_packages, list_dict_old, list_dict_new, arg):
|
|||
|
||||
i = 0
|
||||
for k in range(lennew):
|
||||
if reponames == '':
|
||||
tmp_string = 'Repository group B' + str(i)
|
||||
else:
|
||||
tmp_string = reponames[i + lenold]
|
||||
tmp_string = '<h2>' + tmp_string + ':</h2>\n<ul>\n'
|
||||
for z in range(len(new[k])):
|
||||
tmp_string = tmp_string + '<li><a href="' +\
|
||||
new[k][z] + '">' + new[k][z] + '</a></li>\n'
|
||||
if reponames == '':
|
||||
tmp_string = 'Repository group B' + str(i)
|
||||
else:
|
||||
tmp_string = reponames[i + lenold]
|
||||
tmp_string = '<h2>' + tmp_string + ':</h2>\n<ul>\n'
|
||||
for z in range(len(new[k])):
|
||||
tmp_string = tmp_string + '<li><a href="' +\
|
||||
new[k][z] + '">' + new[k][z] + '</a></li>\n'
|
||||
tmp_string = tmp_string + '</ul>\n'
|
||||
file_output.write(tmp_string)
|
||||
i = i + 1
|
||||
file_output.write(tmp_string)
|
||||
i = i + 1
|
||||
|
||||
i = 0
|
||||
while(i < length):
|
||||
if i < lenold:
|
||||
if reponames == '':
|
||||
temp = 'Group A' + str(i)
|
||||
else:
|
||||
temp = reponames[i]
|
||||
if reponames == '':
|
||||
temp = 'Group A' + str(i)
|
||||
else:
|
||||
temp = reponames[i]
|
||||
repo_list.append('<th>' + temp + '</th>')
|
||||
else:
|
||||
ii = i + 1
|
||||
if reponames == '':
|
||||
temp = 'Group B' + str(i - lenold)
|
||||
else:
|
||||
temp = reponames[i]
|
||||
temp = 'Group B' + str(i - lenold)
|
||||
else:
|
||||
temp = reponames[i]
|
||||
repo_list.append('<th id="sortCelldiff'+str(ii)+'"><a id="sortCellLinkdiff'+str(ii)+'" title="Sort Ascending" href="javascript:sort_diff('+str(ii)+', \'className\')">'+temp+'</a></th>')
|
||||
i = i + 1
|
||||
|
||||
|
@ -1447,12 +1447,12 @@ def HTML_OutputBody(dict_packages, list_dict_old, list_dict_new, arg):
|
|||
for packagename in sorted_list:
|
||||
(repo_name, repo_class, flag, show_filter) = GetRepoInfo(dict_packages, packagename,
|
||||
lenold, lennew, list_dict_old, list_dict_new, ifreleaseignore)
|
||||
res = 0
|
||||
for i in range(4):
|
||||
if show_filter[i]*show_mask[i] == 1:
|
||||
res = 1
|
||||
if res == 0:
|
||||
continue
|
||||
res = 0
|
||||
for i in range(4):
|
||||
if show_filter[i]*show_mask[i] == 1:
|
||||
res = 1
|
||||
if res == 0:
|
||||
continue
|
||||
|
||||
if flag:
|
||||
res = 0
|
||||
|
@ -1517,15 +1517,15 @@ def main(args):
|
|||
tmp_output = arg.output
|
||||
arg.output = None
|
||||
for i in range(len(arg.old)):
|
||||
arg.temp_old.append([])
|
||||
for j in range(len(arg.old[i])):
|
||||
arg.old[i][j] = CheckArgs(arg.old[i][j], arg)
|
||||
arg.temp_old[i].append(head_old + str(i) + '-' + str(j) + '/')
|
||||
arg.temp_old.append([])
|
||||
for j in range(len(arg.old[i])):
|
||||
arg.old[i][j] = CheckArgs(arg.old[i][j], arg)
|
||||
arg.temp_old[i].append(head_old + str(i) + '-' + str(j) + '/')
|
||||
for i in range(len(arg.new)):
|
||||
arg.temp_new.append([])
|
||||
for j in range(len(arg.new[i])):
|
||||
arg.new[i][j] = CheckArgs(arg.new[i][j], arg)
|
||||
arg.temp_new[i].append(head_new + str(i) + '-' + str(j) + '/')
|
||||
arg.temp_new.append([])
|
||||
for j in range(len(arg.new[i])):
|
||||
arg.new[i][j] = CheckArgs(arg.new[i][j], arg)
|
||||
arg.temp_new[i].append(head_new + str(i) + '-' + str(j) + '/')
|
||||
arg.output = tmp_output
|
||||
arg.summary = ''
|
||||
CheckOutput(arg)
|
||||
|
@ -1538,16 +1538,16 @@ def main(args):
|
|||
ifhtml = arg.html
|
||||
ifchangelog = arg.changelog
|
||||
if (not arg.show_new) and (not arg.show_removed) and\
|
||||
(not arg.show_updated) and (not arg.show_downgraded):
|
||||
arg.show_new=True
|
||||
arg.show_removed=True
|
||||
arg.show_updated=True
|
||||
arg.show_downgraded=True
|
||||
(not arg.show_updated) and (not arg.show_downgraded):
|
||||
arg.show_new=True
|
||||
arg.show_removed=True
|
||||
arg.show_updated=True
|
||||
arg.show_downgraded=True
|
||||
|
||||
if arg.ignore:
|
||||
arg.ignorelist = ReadIgnoreList(arg)
|
||||
arg.ignorelist = ReadIgnoreList(arg)
|
||||
else:
|
||||
arg.ignorelist = []
|
||||
arg.ignorelist = []
|
||||
|
||||
GetFiles(arg)
|
||||
|
||||
|
@ -1566,27 +1566,27 @@ def main(args):
|
|||
dict_logfile_diff = {}
|
||||
|
||||
if arg.show_new:
|
||||
ProcessNewPackages(dict_new_packages, arg.output)
|
||||
ProcessNewPackages(dict_new_packages, arg.output)
|
||||
if arg.show_removed:
|
||||
ProcessDelPackages(dict_del_packages, dict_obsoleted, arg.output)
|
||||
ProcessDelPackages(dict_del_packages, dict_obsoleted, arg.output)
|
||||
if dict_upd_packages and (arg.show_updated or arg.show_downgraded):
|
||||
ProcessUpdPackages(dict_upd_packages, dict_logfile_diff, arg)
|
||||
if arg.show_summary:
|
||||
PrintSummary(dict_new_packages, dict_del_packages, dict_upd_packages, arg)
|
||||
PrintSummary(dict_new_packages, dict_del_packages, dict_upd_packages, arg)
|
||||
else:
|
||||
(list_dict_old, list_dict_new) = HTML_ParsePackage(arg)
|
||||
dict_old = HTML_UniteDicts(list_dict_old)
|
||||
if arg.show_summary:
|
||||
dict_new = HTML_UniteDicts(list_dict_new)
|
||||
(dict_new_packages, dict_del_packages, dict_upd_packages) = CreateDicts(
|
||||
dict_old, dict_new)
|
||||
arg.summary = PrintSummary(dict_new_packages, dict_del_packages, dict_upd_packages, arg)
|
||||
dict_new = ''
|
||||
dict_new_packages = ''
|
||||
dict_del_packages = ''
|
||||
dict_upd_packages = ''
|
||||
if arg.show_summary:
|
||||
dict_new = HTML_UniteDicts(list_dict_new)
|
||||
(dict_new_packages, dict_del_packages, dict_upd_packages) = CreateDicts(
|
||||
dict_old, dict_new)
|
||||
arg.summary = PrintSummary(dict_new_packages, dict_del_packages, dict_upd_packages, arg)
|
||||
dict_new = ''
|
||||
dict_new_packages = ''
|
||||
dict_del_packages = ''
|
||||
dict_upd_packages = ''
|
||||
dict_packages = HTML_CreateDicts(dict_old, list_dict_new)
|
||||
dict_old = ''
|
||||
dict_old = ''
|
||||
HTML_Output(dict_packages, list_dict_old, list_dict_new, arg)
|
||||
|
||||
exit_proc(arg)
|
||||
|
|
Loading…
Add table
Reference in a new issue