mirror of
https://abf.rosa.ru/djam/urpm-tools.git
synced 2025-02-23 09:22:47 +00:00
1472 lines
53 KiB
Python
Executable file
1472 lines
53 KiB
Python
Executable file
#!/usr/bin/python
|
|
'''
|
|
" Repograph utility for outputting graph of packages and their dependencies
|
|
" on each other. Also checks for unprovided dependencies.
|
|
"
|
|
" The tool downloads, unpacks and parses synthesis.hdlist.cz and
|
|
" (if necessary) files.xml.lzma to check for unprovided dependencies and
|
|
" to output graph of packages and their dependencies in DOT language format.
|
|
" The tool outputs data to standart output or to file.
|
|
"
|
|
" REQUIREMENTS
|
|
" ============
|
|
" - urpmi
|
|
" - python-2.7
|
|
" - lzma
|
|
" - gzip
|
|
" - libxml2 python library
|
|
" - rpm python library
|
|
" - networkx python library
|
|
"
|
|
" Copyright (C) 2012 ROSA Laboratory.
|
|
" Written by Vladimir Testov <vladimir.testov@rosalab.ru>
|
|
"
|
|
" This program is free software: you can redistribute it and/or modify
|
|
" it under the terms of the GNU General Public License or the GNU Lesser
|
|
" General Public License as published by the Free Software Foundation,
|
|
" either version 2 of the Licenses, or (at your option) any later version.
|
|
"
|
|
" This program is distributed in the hope that it will be useful,
|
|
" but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
" GNU General Public License for more details.
|
|
"
|
|
" You should have received a copy of the GNU General Public License
|
|
" and the GNU Lesser General Public License along with this program.
|
|
" If not, see <http://www.gnu.org/licenses/>.
|
|
'''
|
|
import argparse
|
|
import shutil
|
|
import sys
|
|
import os
|
|
import urllib2
|
|
import urllib
|
|
import tempfile
|
|
import subprocess
|
|
import re
|
|
import time
|
|
|
|
import rpm
|
|
import libxml2
|
|
import urpmmisc
|
|
|
|
import rpm5utils
|
|
from rpm5utils.urpmgraphs.classes.digraph import DiGraph
|
|
from rpm5utils.urpmgraphs.algorithms.cycles import simple_cycles
|
|
import gettext
|
|
|
|
gettext.install('urpm-tools')
|
|
#import rpm5utils.urpmgraphs
|
|
#from rpm5utils.urpmgraphs.algorithms import cycles
|
|
#from rpm5utils.urpmgraphs.classes import digraph
|
|
|
|
|
|
synthesis_arch = "synthesis.hdlist.cz"
|
|
synthesis_arch_renamed = "synthesis.hdlist.gz"
|
|
synthesis_file = "synthesis.hdlist"
|
|
synthesis_search_field = ["info", "requires", "suggests", "provides"]
|
|
fileslist_arch = "files.xml.lzma"
|
|
fileslist_file = "files.xml"
|
|
tmp_cross_path = "cross"
|
|
loopdotfile = "loopgraph"
|
|
altdotfile = "altgraph"
|
|
default_output = "sys.stdout"
|
|
timeout = 5
|
|
|
|
re_search_unver = re.compile("([^\[\]]+)[\[\]]")
|
|
re_search_verrel = re.compile("\[(== |> |< |>= |<= )([\{\}+=0-9a-zA-Z_\.]*:)?([[\{\}+=0-9a-zA-Z_\.]+)(-[[\{\}+=0-9a-zA-Z_\.]+)?([^\[\]]*)\]$")
|
|
|
|
def ParseCommandLine():
|
|
"""Parse arguments.
|
|
|
|
Parse arguments from command line.
|
|
Return these arguments.
|
|
"""
|
|
parser = argparse.ArgumentParser(
|
|
description=_("Tool for generating dependency graph for REPOSITORY packages."))
|
|
parser.add_argument("repository", action="store", nargs=1,
|
|
metavar="REPOSITORY", help="URL or local PATH to repository.")
|
|
parser.add_argument("--cross", "-c", action="store", nargs='+', metavar="CROSS_REPO",
|
|
help=_("Search for cross-repository references in CROSS_REPO(s) repositories."))
|
|
|
|
parser.add_argument("--quiet", "-q", action="store_false",
|
|
help=_("Hide service messages. (About progress status etc.)"))
|
|
parser.add_argument("--verbose", "-v", action="store_true",
|
|
help=_("Show warnings. (About unprovided packages etc.)"))
|
|
|
|
parser.add_argument("--requires", "-r", action="store_true",
|
|
help=_("Process \"requires\" package dependencies. Used by default."))
|
|
parser.add_argument("--suggests", "-s", action="store_true",
|
|
help=_("Process \"suggests\" package dependencies. If used without \
|
|
--requires then only suggests dependencies are processed."))
|
|
parser.add_argument("--file", "-f", action="store_true",
|
|
help=_("Process file dependencies."))
|
|
parser.add_argument("--unprovided", "-u", action="store_true",
|
|
help=_("Show unprovided dependencies."))
|
|
|
|
pkgrequiresgroup = parser.add_mutually_exclusive_group()
|
|
pkgrequiresgroup.add_argument("--requires-recursive", action="store", nargs=1, default=None,
|
|
metavar="PKG", help=_("Search for packages, which are required by package PKG (PKG is a file name or package name)"))
|
|
pkgrequiresgroup.add_argument("--whatrequires", action="store", nargs=1, default=None,
|
|
metavar="PKG", help=_("Search for packages, which requires package PKG (PKG is a file name or package name)"))
|
|
|
|
opactgroup = parser.add_mutually_exclusive_group()
|
|
opactgroup.add_argument("--loops", "-l", action="store_true",
|
|
help=_("Search for all simple loops of package dependecies."))
|
|
opactgroup.add_argument("--alternatives", "-a", action="store_true",
|
|
help=_("Search for alternative packages providing the same feature."))
|
|
opactgroup.add_argument("--broken", "-b", action="store_true",
|
|
help=_("Search for all broken packages and anything beetween them"))
|
|
parser.add_argument("--different", "-d", action="store_true",
|
|
help=_("Output each loop or each alternative in different file. \
|
|
Ignored if --loops or --alternatives options are not present. \
|
|
OUTPUT_FILE (if present) is tracted as folder name for new files in that case."))
|
|
|
|
graphgroup = parser.add_mutually_exclusive_group()
|
|
graphgroup.add_argument("--output", "-o", action="store", nargs=1, default='',
|
|
metavar="OUTPUT_FILE", help=_("Change graph output to \"OUTPUT_FILE\". STDOUT by default."))
|
|
graphgroup.add_argument("--nograph", "-n", action="store_true",
|
|
help=_("Do not output graph. Tool will not start working if --quiet, --nograph are present \
|
|
and --verbose is not. (If there is nothing to output - then nothing has to be done.)"))
|
|
return parser.parse_args()
|
|
|
|
def exit_proc(arg):
|
|
"""
|
|
Remove trash.
|
|
"""
|
|
err_tmp_dir = arg.tmp_dir
|
|
err_output = arg.output
|
|
err_loops = arg.loops
|
|
err_alternatives = arg.alternatives
|
|
err_different = arg.different
|
|
|
|
if (err_output != None) and not ((err_loops or err_alternatives) and (err_different)):
|
|
err_output.close()
|
|
if os.path.isdir(err_tmp_dir):
|
|
shutil.rmtree(err_tmp_dir)
|
|
exit(0)
|
|
|
|
def CheckURL(url, arg):
|
|
"""URL check.
|
|
|
|
Check that URL is gettable.
|
|
"""
|
|
try:
|
|
urllib2.urlopen(url, None, timeout)
|
|
except:
|
|
print _("Error: URL to repository \"%s\" is incorrect") % url
|
|
exit_proc(arg)
|
|
|
|
def CheckURLPATH(urlpath, arg):
|
|
"""Argument checks.
|
|
|
|
Check, that url or path is correct.
|
|
"""
|
|
if (urlpath.startswith("http://") or urlpath.startswith("ftp://")):
|
|
if not urlpath.endswith('/'):
|
|
urlpath = urlpath + '/'
|
|
urlpath = urlpath + "media_info/"
|
|
CheckURL(urlpath, arg)
|
|
elif (os.path.isdir(urlpath)) or urlpath.startswith("file://"):
|
|
if urlpath.startswith("file://./"):
|
|
urlpath = urlpath[7:]
|
|
else:
|
|
urlpath = urlpath[6:]
|
|
if not urlpath.endswith('/'):
|
|
urlpath = urlpath + '/'
|
|
urlpath = urlpath + "media_info/"
|
|
if not os.path.isdir(urlpath):
|
|
print _("Error: directory %s does not exist") % urlpath
|
|
exit_proc(arg)
|
|
else:
|
|
(e1,e2,urltmp) = urpmmisc.GetUrlFromRepoName(urlpath)
|
|
if (urltmp):
|
|
if not urltmp.endswith('/'):
|
|
urltmp = urltmp + '/'
|
|
urlpath = urltmp + "media_info/"
|
|
CheckURL(urlpath, arg)
|
|
else:
|
|
print _("Error: \"%s\" is not correct url, path or name of repository") % urlpath
|
|
exit_proc(arg)
|
|
return urlpath
|
|
|
|
def CheckOptions(arg):
|
|
"""Options check.
|
|
|
|
Make options understandable for the program.
|
|
"""
|
|
if (arg.suggests == 0):
|
|
arg.requires = 1
|
|
|
|
def CheckOutput(arg):
|
|
"""Check output file.
|
|
|
|
Check if the file can be created and redirect standart output to this file.
|
|
"""
|
|
file_output = arg.output
|
|
ifloops = arg.loops
|
|
ifalternatives = arg.alternatives
|
|
ifdifferent = arg.different
|
|
|
|
if (file_output == "sys.stdout") or (file_output == "stdout"):
|
|
arg.output = sys.stdout
|
|
return
|
|
if((ifloops or ifalternatives) and ifdifferent): # check for dir
|
|
if(os.path.isdir(file_output)):
|
|
print _("Error: directory %s already exists") % file_output
|
|
arg.output = None
|
|
exit_proc(arg)
|
|
else:
|
|
file_output = os.path.realpath(file_output)
|
|
if (os.path.isfile(file_output)):
|
|
print _("Error: File %s already exists") % file_output
|
|
arg.output = None
|
|
exit_proc(arg)
|
|
|
|
try:
|
|
os.makedirs(file_output)
|
|
except:
|
|
print _("Error: directory %s was not created") % file_output
|
|
arg.output = None
|
|
exit_proc(arg)
|
|
if not file_output.endswith('/'):
|
|
file_output = file_output + '/'
|
|
arg.output = file_output
|
|
else:
|
|
if(os.path.isfile(file_output)):
|
|
print _("Error: File %s already exists") % file_output
|
|
arg.output = None
|
|
exit_proc(arg)
|
|
else:
|
|
dirname = os.path.dirname(file_output)
|
|
if(dirname == '') or (os.path.exists(dirname)):
|
|
try:
|
|
arg.output = open(file_output, "w")
|
|
except IOError:
|
|
print _("Error: File %s cannot be created") % file_output
|
|
arg.output = None
|
|
exit_proc(arg)
|
|
else:
|
|
print _("Error: Path %s does not exist.") % dirname
|
|
arg.output = None
|
|
exit_proc(arg)
|
|
|
|
def GetFile(urlpath, filename, localdir, arg):
|
|
"""Donwload archive.
|
|
"""
|
|
ifnotquiet = arg.quiet
|
|
|
|
if not os.path.isdir(localdir):
|
|
os.makedirs(os.path.realpath(localdir))
|
|
if ifnotquiet:
|
|
print (_("getting file %s from ") % filename) + "\n " + urlpath + filename
|
|
if os.path.isdir(urlpath):
|
|
try:
|
|
shutil.copyfile(urlpath + filename, localdir + filename)
|
|
except:
|
|
print _("Error: file %s was not copied") % filename
|
|
exit_proc(arg)
|
|
else:
|
|
try:
|
|
file_from = urllib2.urlopen(urllib2.Request(urlpath + filename), None, timeout)
|
|
file_to = open(localdir + filename, "w")
|
|
shutil.copyfileobj(file_from, file_to)
|
|
except:
|
|
print _("Error: file %(from)s was not downloaded to %(to)s") %{"from": urlpath + filenam, "to": localdir + filename}
|
|
exit_proc(arg)
|
|
file_from.close()
|
|
file_to.close()
|
|
|
|
def RenameSynthFile(localdir, arg):
|
|
"""Rename.
|
|
|
|
Rename Synthesis file so zgip can understand format.
|
|
"""
|
|
ifnotquiet = arg.quiet
|
|
|
|
if not os.path.isfile(localdir + synthesis_arch):
|
|
print _("Error: file not found: ") + localdir + synthesis_arch
|
|
exit_proc(arg)
|
|
try:
|
|
os.rename(localdir + synthesis_arch, localdir + synthesis_arch_renamed)
|
|
except OSError:
|
|
print _("Error: cannot rename file %(from)s to %(to)s") % {"from": synthesis_arch, "to": synthesis_arch_renamed}
|
|
|
|
exit_proc(arg)
|
|
if not os.path.isfile(localdir + synthesis_arch_renamed):
|
|
print _("Error: file %s is missing.") % (localdir + synthesis_arch_renamed)
|
|
exit_proc(arg)
|
|
else:
|
|
if ifnotquiet:
|
|
print _("file %(from)s was renamed to %(to)s") % {"from": synthesis_arch, "to": synthesis_arch_renamed}
|
|
|
|
def UnpackSynthFile(localdir, arg):
|
|
"""Unpack Synthesis file.
|
|
|
|
Unpack renamed synthesis file using gzip.
|
|
"""
|
|
ifnotquiet = arg.quiet
|
|
|
|
if ifnotquiet:
|
|
print _("unpacking file ") + synthesis_arch_renamed
|
|
if not os.path.isfile(localdir + synthesis_arch_renamed):
|
|
print _("Error: file %s is missing.") % (localdir + synthesis_arch_renamed)
|
|
exit_proc(arg)
|
|
subprocess.call(["gzip", "-df", localdir + synthesis_arch_renamed])
|
|
|
|
def PrepareSynthFile(localdir, arg):
|
|
"""Prepare Synthesis file for parsing.
|
|
"""
|
|
RenameSynthFile(localdir, arg)
|
|
UnpackSynthFile(localdir, arg)
|
|
|
|
def ParseVersion(names_list):
|
|
"""Parse version info if present.
|
|
|
|
Parse version information from the field. e.g. provided_name[>= 1.2.3-4.5.6]
|
|
is parsed to (provided_name, sign, (epoch, version, release))
|
|
"""
|
|
new_names_list = []
|
|
for name in names_list:
|
|
match = re_search_unver.match(name)
|
|
if match:
|
|
tmp_entry = match.group(1)
|
|
else:
|
|
tmp_entry = name
|
|
match = re_search_verrel.search(name)
|
|
if match:
|
|
sign = match.group(1)[:-1]
|
|
epoch = match.group(2)
|
|
if epoch:
|
|
epoch = epoch[:-1]
|
|
else:
|
|
epoch = ''
|
|
version = match.group(3)
|
|
release = match.group(4)
|
|
if release:
|
|
release = release[1:]
|
|
else:
|
|
release = ''
|
|
verrel = (epoch, version, release)
|
|
else:
|
|
sign = ''
|
|
verrel = ('','','')
|
|
new_names_list.append((tmp_entry, sign, verrel))
|
|
return new_names_list
|
|
|
|
def TagEpoch(i):
|
|
"""Return disttagepoch value.
|
|
"""
|
|
if len(i) == 4:
|
|
return '-'
|
|
elif len(i) == 5:
|
|
disttag = i[4]
|
|
distepoch = ''
|
|
return disttag + distepoch
|
|
elif len(i) == 6:
|
|
disttag = i[4]
|
|
distepoch = i[5]
|
|
return disttag + distepoch
|
|
else:
|
|
print _("REPODIFF-Warning: strange <info>: ") + str(i)
|
|
|
|
def RPMNameFilter(rpmname, disttagepoch):
|
|
"""Parse name and verrel.
|
|
|
|
Function that parses name, version and release of a package.
|
|
"""
|
|
string = rpmname.split('-')
|
|
lastpart = string.pop()
|
|
tmp = lastpart.split('.')
|
|
tmp.pop()
|
|
lastpart = '.'.join(tmp)
|
|
if (lastpart[0].isdigit() or (not lastpart.startswith(disttagepoch))) and\
|
|
(not lastpart.isdigit()):
|
|
name = '-'.join(string[:-1])
|
|
else:
|
|
name = '-'.join(string[:-2])
|
|
return name
|
|
|
|
def ParseSynthFile(dict_provides, dict_asks, localdir, arg):
|
|
"""Collect packages information.
|
|
|
|
Parse synthesis.hdlist file.
|
|
dict_provides[phrase]=[(name, sign, verrel)] contain names of packages providing phrase
|
|
dict_asks[pkg_name]=[(name, sign, verrel)] contain everything
|
|
that pkg_name package asks
|
|
"""
|
|
ifnotquiet = arg.quiet
|
|
ifrequires = arg.requires
|
|
ifsuggests = arg.suggests
|
|
ifverbose = arg.verbose
|
|
iftagepoch = arg.requires_recursive or arg.whatrequires
|
|
ifnothide = not iftagepoch
|
|
|
|
if not os.path.isfile(localdir + synthesis_file):
|
|
print _("Error: Synthesis file %s was not found.") % (localdir + synthesis_file)
|
|
exit_proc(-1)
|
|
if ifnotquiet:
|
|
print _("Parsing synthesis.")
|
|
try:
|
|
synth = open(localdir + synthesis_file)
|
|
tmp = ['', [], [], []]
|
|
for synthline in synth:
|
|
if synthline.endswith('\n'):
|
|
synthline = synthline[:-1]
|
|
tmpline = synthline.split('@')
|
|
tag = tmpline[1]
|
|
if(tag == synthesis_search_field[1]) and ifrequires:
|
|
tmp[1] = tmpline[2:]
|
|
elif(tag == synthesis_search_field[2]) and ifsuggests:
|
|
tmp[2] = tmpline[2:]
|
|
elif tag == synthesis_search_field[3]:
|
|
tmp[3] = tmpline[2:]
|
|
elif tag == synthesis_search_field[0]:
|
|
if (iftagepoch):
|
|
tmp[0] = tmpline[2:]
|
|
disttagepoch = TagEpoch(tmp[0])
|
|
tmp[0] = tmp[0][0]
|
|
else:
|
|
tmp[0] = tmpline[2]
|
|
|
|
parsed_tmp = ParseVersion(tmp[3])
|
|
for (phrase, sign, verrel) in parsed_tmp:
|
|
if ((ifverbose and ifnothide) and (sign != '==') and (sign != '')):
|
|
print _("Warning: Unexpected sign %(sign)s in 'provides' section of %(of)s") %\
|
|
{"sign": sign, "of": tmp[0]}
|
|
if (not phrase in dict_provides):
|
|
dict_provides[phrase] = [(tmp[0], sign, verrel)]
|
|
else:
|
|
dict_provides[phrase].append((tmp[0], sign, verrel))
|
|
tmp_list = []
|
|
tmp_list.extend(tmp[1])
|
|
tmp_list.extend(tmp[2])
|
|
if (iftagepoch):
|
|
dict_asks[tmp[0]] = (ParseVersion(tmp_list), RPMNameFilter(tmp[0], disttagepoch))
|
|
else:
|
|
dict_asks[tmp[0]] = [ParseVersion(tmp_list)]
|
|
tmp = ['', [], [], []]
|
|
synth.close()
|
|
except IOError:
|
|
print _("Error: Failed to open synthesis file ") + localdir + synthesis_file
|
|
exit_proc(-1)
|
|
return (dict_provides, dict_asks)
|
|
|
|
def compare_verrel(verrel1, sign, verrel2):
|
|
"""Compare versions.
|
|
|
|
Compare versions with attention to sign.
|
|
"""
|
|
(e1, v1, r1) = verrel1
|
|
(e2, v2, r2) = verrel2
|
|
# checks
|
|
if (v2 == '') or (v1 == ''):
|
|
return 1
|
|
if (e1 == '') or (e2 == ''):
|
|
e1 = '0'
|
|
e2 = '0'
|
|
if (r1 == '') or (r2 == ''):
|
|
r1 = '0'
|
|
r2 = '0'
|
|
# compare
|
|
compare = rpm.labelCompare((e1, v1, r1), (e2, v2, r2))
|
|
if (sign == "=="):
|
|
if (compare == 0):
|
|
return 1
|
|
elif (sign == ">"):
|
|
if (compare == 1):
|
|
return 1
|
|
elif (sign == "<"):
|
|
if (compare == -1):
|
|
return 1
|
|
elif (sign == ">="):
|
|
if (compare > -1):
|
|
return 1
|
|
elif (sign == "<="):
|
|
if (compare < 1):
|
|
return 1
|
|
return 0
|
|
|
|
def compare_2signs_verrel(provide_verrel, provide_sign, verrel, sign):
|
|
"""Compare versions.
|
|
|
|
Compare versions with attention to two signs.
|
|
"""
|
|
(e1, v1, r1) = provide_verrel
|
|
(e2, v2, r2) = verrel
|
|
if ((sign == '>') or (sign == '>=')) and ((provide_sign == '>') or (provide_sign == '>=')):
|
|
return 1
|
|
if ((sign == '<') or (sign == '<=')) and ((provide_sign == '<') or (provide_sign == '<=')):
|
|
return 1
|
|
if (v1 == '') or (v2 == ''):
|
|
return 1
|
|
if (e1 == '') or (e2 == ''):
|
|
e1 = '0'
|
|
e2 = '0'
|
|
if (r1 == '') or (r2 == ''):
|
|
r1 = '0'
|
|
r2 = '0'
|
|
compare = rpm.labelCompare((e1, v1, r1), (e2, v2, r2))
|
|
if (compare == 0):
|
|
return 1
|
|
if ((provide_sign == '<') or (provide_sign == '<=')) and (compare == 1):
|
|
return 1
|
|
if ((provide_sign == '>') or (provide_sign == '>=')) and (compare == -1):
|
|
return 1
|
|
return 0
|
|
|
|
def print_verrel(verrel):
|
|
"""Output version info.
|
|
|
|
Formatted output of version info.
|
|
"""
|
|
(e, v, r) = verrel
|
|
result = ''
|
|
if (e != ''):
|
|
result = e + ":"
|
|
if (v != ''):
|
|
result = result + v
|
|
if (r != ''):
|
|
result = result + '-' + r
|
|
return result
|
|
|
|
def unpack_fileslist(localdir, arg):
|
|
"""Unpack files.xml file.
|
|
|
|
Unpack files.xml.lzma using lzma.
|
|
"""
|
|
ifnotquiet = arg.quiet
|
|
|
|
if ifnotquiet:
|
|
print _("unpacking file ") + fileslist_arch
|
|
if not os.path.isfile(localdir + fileslist_arch):
|
|
print _("Error: file %s is missing.") % (localdir + fileslist_arch)
|
|
exit_proc(arg)
|
|
subprocess.call(["lzma", "-df", localdir + fileslist_arch])
|
|
|
|
def parse_fileslist(filename_check, filename_found, count_depend, dict_depend, localdir, ifcry, arg):
|
|
"""Parse files.xml.
|
|
"""
|
|
ifnotquiet = arg.quiet
|
|
ifverbose = arg.verbose
|
|
ifnothide = (not arg.requires_recursive) and (not arg.whatrequires)
|
|
|
|
if ifnotquiet:
|
|
print _("Reading fileslist")
|
|
if not os.path.isfile(localdir + fileslist_file):
|
|
print _("Error: Can't find fileslist ") + localdir + fileslist_file
|
|
exit_proc(arg)
|
|
doc = libxml2.parseFile(localdir + fileslist_file)
|
|
if (not doc):
|
|
print _("Error: Can't read fileslist ") + localdir + fileslist_file
|
|
exit_proc(arg)
|
|
root = doc.children
|
|
if root.name != "media_info":
|
|
print _("Error: Wrong fileslist.")
|
|
doc.freeDoc()
|
|
exit_proc(arg)
|
|
tag_package = root.children
|
|
while(tag_package):
|
|
if(tag_package.name != "files"):
|
|
tag_package = tag_package.next
|
|
continue
|
|
|
|
tag_property = tag_package.properties
|
|
while(tag_property) and (tag_property.name != "fn"):
|
|
tag_property = tag_property.next
|
|
if not tag_property:
|
|
print _("Error: Corrupted fileslist")
|
|
doc.freeDoc()
|
|
exit_proc(arg)
|
|
name = tag_property.content
|
|
files = tag_package.content.split('\n')
|
|
for filename in files:
|
|
if filename in filename_check:
|
|
for packagename in filename_check[filename]:
|
|
if (packagename != name):
|
|
if (ifcry > 0):
|
|
if (filename_check[filename][packagename] == 1):
|
|
continue
|
|
else:
|
|
isdotted = 1
|
|
else:
|
|
if (filename_check[filename][packagename] == 1):
|
|
isdotted = 1
|
|
else:
|
|
isdotted = 0
|
|
if packagename not in dict_depend:
|
|
dict_depend[packagename]={}
|
|
if name not in dict_depend[packagename]:
|
|
dict_depend[packagename][name] = isdotted
|
|
if packagename not in count_depend:
|
|
count_depend[packagename] = 1
|
|
else:
|
|
count_depend[packagename] = count_depend[packagename] + 1
|
|
if filename not in filename_found:
|
|
filename_found.append(filename)
|
|
if (ifverbose and ifnothide) and (ifcry == None):
|
|
print _("Warning: cross-repository dependency: ") + packagename +\
|
|
"\n -> " + name
|
|
else:
|
|
if (ifverbose and ifnothide):
|
|
print _("Warning: package has self-dependecies: ") + packagename +\
|
|
"\n <" + filename + ">"
|
|
tag_package = tag_package.next
|
|
doc.freeDoc()
|
|
#found!!! update count_depend dict_depend add to filename_found
|
|
|
|
def process_fileslist(filename_check, filename_found, count_depend, dict_depend, localdir, ifcry, arg):
|
|
"""Process files.xml.
|
|
|
|
Make necessary steps to process files.xml.
|
|
"""
|
|
if (ifcry == None):
|
|
path = arg.repository
|
|
else:
|
|
path = arg.crossurl[ifcry]
|
|
if (not os.path.isfile(localdir + fileslist_file)):
|
|
GetFile(path, fileslist_arch, localdir, arg)
|
|
unpack_fileslist(localdir, arg)
|
|
parse_fileslist(filename_check, filename_found, count_depend, dict_depend, localdir, ifcry, arg)
|
|
|
|
def remake_count_depend(count_depend):
|
|
"""Build count_depend.
|
|
|
|
Build count_depend in case of using --file option.
|
|
"""
|
|
result = {}
|
|
for packagename in count_depend:
|
|
length = count_depend[packagename]
|
|
if length not in result:
|
|
result[length] = 1
|
|
else:
|
|
result[length] = result[length] + 1
|
|
return result
|
|
|
|
def AddDepend(provides, temp_dict, packagename, asked, mode, dict_cross_error, ifshow):
|
|
"""Add dependency to temp dictionary.
|
|
|
|
Used in FillDepend function.
|
|
"""
|
|
if (provides not in temp_dict) and (provides != packagename):
|
|
if mode == 0:
|
|
temp_dict[provides] = 0
|
|
else:
|
|
temp_dict[provides] = 1
|
|
dict_cross_error[packagename] = ""
|
|
if (ifshow):
|
|
print _("Warning: cross-repository dependency:\n package %(pkg)s is dependent from\n <- %(from)s located in another repository") %\
|
|
{"pkg": packagename, "from": provides}
|
|
elif (provides == packagename):
|
|
if (ifshow):
|
|
print _("Warning: package has self-dependecies: ") + packagename +\
|
|
"\n <" + asked + ">"
|
|
|
|
def FillDepend(dict_tmp_provides, asked, temp_dict, packagename, sign, verrel,
|
|
dict_error, dict_cross_error, mode, ifshow, ifshowunprovided):
|
|
"""Fill dependency dictionary.
|
|
|
|
Used in FindDepend function.
|
|
"""
|
|
found = 0
|
|
tmp = 0
|
|
for (provides, provide_sign, provide_verrel) in dict_tmp_provides[asked]:
|
|
if (sign == '') or (provide_sign == ''):
|
|
AddDepend(provides, temp_dict, packagename, asked, mode, dict_cross_error, ifshow)
|
|
tmp = 1
|
|
found = 1
|
|
elif (provide_sign == '=='):
|
|
if compare_verrel(provide_verrel, sign, verrel):
|
|
AddDepend(provides, temp_dict, packagename, asked, mode, dict_cross_error, ifshow)
|
|
tmp = 2
|
|
found = 1
|
|
else:
|
|
if compare_2signs_verrel(provide_verrel, provide_sign, verrel, sign):
|
|
AddDepend(provides, temp_dict, packagename, asked, mode, dict_cross_error, ifshow)
|
|
tmp = 3
|
|
found = 1
|
|
if found == 0:
|
|
dict_error[packagename] = ''
|
|
if (ifshow):
|
|
print _("Warning: needed version is absent <%(ver)s> %(rel)s required by package") %\
|
|
{"ver": asked, "rel": print_verrel(verrel)} + "\n <%s>" % packagename
|
|
if (ifshowunprovided):
|
|
if asked not in temp_dict:
|
|
temp_dict[asked] = 2
|
|
|
|
def generate_error_dict(filename_check, filename_found, dict_error, dict_depend, count_depend, ifshow, ifshowunprovided):
|
|
"""Generate Warnings about unprovided packages.
|
|
|
|
Used in FindDepend function.
|
|
"""
|
|
for filename in filename_check:
|
|
if filename not in filename_found:
|
|
for packagename in filename_check[filename]:
|
|
if (filename_check[filename][packagename] == 1):
|
|
continue
|
|
if (ifshow):
|
|
print _("Warning: Package %(pkg)s unprovided by %(by)s") %{'pkg': packagename, 'by': filename}
|
|
if (ifshowunprovided):
|
|
if filename not in dict_depend[packagename]:
|
|
dict_depend[packagename][filename] = 2
|
|
if packagename not in count_depend:
|
|
count_depend[packagename] = 1
|
|
else:
|
|
count_depend[packagename] = count_depend[packagename] + 1
|
|
if packagename not in dict_error:
|
|
dict_error[packagename] = ''
|
|
#if in filename_check but not in filename_found then update dict_error by contents of filename_check
|
|
|
|
def FindDepend(dict_provides, dict_asks, dict_cross_provides, dict_cross_asks, arg):
|
|
"""Find dependencies.
|
|
|
|
Find dependencies and tell about unprovided packages.
|
|
"""
|
|
ifnotquiet = arg.quiet
|
|
ifcheckfiles = arg.file
|
|
ifcross = arg.cross
|
|
ifverbose = arg.verbose
|
|
ifnothide = (not arg.requires_recursive) and (not arg.whatrequires)
|
|
ifshow = ifverbose and ifnothide
|
|
ifshowunprovided = arg.unprovided or arg.broken
|
|
|
|
dict_error = {}
|
|
dict_cross_error = {}
|
|
dict_depend = {}
|
|
count_depend = {}
|
|
filename_check = {}
|
|
filename_found = []
|
|
if (ifnotquiet and ifnothide):
|
|
print _("Finding dependencies.")
|
|
for packagename in dict_asks:
|
|
temp_dict = {}
|
|
for (asked, sign, verrel) in dict_asks[packagename][0]:
|
|
if asked not in dict_provides:
|
|
if asked not in dict_cross_provides:
|
|
if not asked.startswith('/'):
|
|
dict_error[packagename] = ''
|
|
if (ifshow):
|
|
print _("Warning: can't find <%(ask)s> required by package\n <%(pkg)s>") %\
|
|
{'ask': asked, 'pkg': packagename}
|
|
if (ifshowunprovided):
|
|
if asked not in temp_dict:
|
|
temp_dict[asked] = 2
|
|
elif ifcheckfiles:
|
|
if asked not in filename_check:
|
|
filename_check[asked] = {}
|
|
filename_check[asked][packagename] = 0 # usual
|
|
else:
|
|
FillDepend(dict_cross_provides, asked, temp_dict, packagename,
|
|
sign, verrel, dict_error, dict_cross_error, 1, ifshow, ifshowunprovided)
|
|
else:
|
|
FillDepend(dict_provides, asked, temp_dict, packagename,
|
|
sign, verrel, dict_error, dict_cross_error, 0, ifshow, ifshowunprovided)
|
|
dict_depend[packagename] = temp_dict
|
|
if not ifcheckfiles:
|
|
length = len(temp_dict)
|
|
if length not in count_depend:
|
|
count_depend[length] = 1
|
|
else:
|
|
count_depend[length] = count_depend[length] + 1
|
|
else:
|
|
count_depend[packagename] = len(temp_dict)
|
|
|
|
for packagename in dict_cross_asks: # cross-rep dependency
|
|
if packagename in dict_depend:
|
|
continue
|
|
temp_dict = {}
|
|
for (asked, sign, verrel) in dict_cross_asks[packagename][0]:
|
|
if asked in dict_provides:
|
|
FillDepend(dict_provides, asked, temp_dict, packagename,
|
|
sign, verrel, dict_error, dict_cross_error, 2, ifshow, ifshowunprovided)
|
|
else:
|
|
if (asked not in dict_cross_provides) and (asked.startswith('/')) and (ifcheckfiles):
|
|
if (asked not in filename_check):
|
|
filename_check[asked] = {}
|
|
filename_check[asked][packagename] = 1 # from cross-repo
|
|
|
|
if packagename not in dict_depend:
|
|
dict_depend[packagename] = temp_dict
|
|
else:
|
|
temp_dict.update(dict_depend[packagename])
|
|
dict_depend[packagename] = temp_dict
|
|
if not ifcheckfiles:
|
|
length = len(temp_dict)
|
|
if length not in count_depend:
|
|
count_depend[length] = 1
|
|
else:
|
|
count_depend[length] = count_depend[length] + 1
|
|
else:
|
|
count_depend[packagename] = len(temp_dict)
|
|
|
|
if ifcheckfiles:
|
|
process_fileslist(filename_check, filename_found, count_depend, dict_depend, arg.tmp_dir, None, arg)
|
|
if ifcross:
|
|
for i in range(len(ifcross)):
|
|
process_fileslist(filename_check, filename_found, count_depend, dict_depend, get_temp(i, arg), i, arg)
|
|
generate_error_dict(filename_check, filename_found, dict_error, dict_depend, count_depend, ifshow, ifshowunprovided)
|
|
count_depend = remake_count_depend(count_depend)
|
|
if (ifshow):
|
|
if (ifcross):
|
|
sorted_tmp = sorted(dict_cross_error)
|
|
print "\n" + _("Total cross-referenced packages: ") + str(len(sorted_tmp))
|
|
for tmp_ent in sorted_tmp:
|
|
print tmp_ent
|
|
sorted_tmp = sorted(dict_error)
|
|
print "\n" + _("Total unprovided packages: ") + str(len(sorted_tmp))
|
|
for tmp_ent in sorted_tmp:
|
|
print tmp_ent
|
|
return dict_depend, count_depend
|
|
|
|
def AssignColors(dict_depend, count_depend, arg):
|
|
"""Assign colors.
|
|
|
|
Assign colors for graph output.
|
|
"""
|
|
ifnotquiet = arg.quiet
|
|
ifchangecolors = arg.whatrequires
|
|
|
|
dict_colors = {}
|
|
dict_count = {}
|
|
|
|
if ifnotquiet:
|
|
print _("Calculating colors.")
|
|
sorted_count = sorted(count_depend)
|
|
length = len(count_depend)
|
|
normalized_count = {}
|
|
i = 0
|
|
for number in sorted_count:
|
|
normalized_count[number] = float(i) / length
|
|
dict_count[number] = count_depend[number]
|
|
i = i + 1
|
|
for package_name in dict_depend:
|
|
number = len(dict_depend[package_name])
|
|
if (ifchangecolors):
|
|
h = float(dict_count[number]) / count_depend[number]
|
|
s = 0.6 + 0.4 * normalized_count[number]
|
|
else:
|
|
h = normalized_count[number]
|
|
s = 0.6 + (0.4 * dict_count[number]) / count_depend[number]
|
|
b = 1.0
|
|
dict_colors[package_name] = (h, s, b)
|
|
dict_count[number] = dict_count[number] - 1
|
|
return dict_colors
|
|
|
|
def OutputGraphHead(file_output):
|
|
"""Output Graph head.
|
|
|
|
Static information about graph.
|
|
"""
|
|
file_output.write('\n\ndigraph packages {\nsize="20.69,25.52";\nratio="fill";\n' +\
|
|
'rankdir="TB";\nnode[style="filled"];\nnode[shape="box"];\n\n')
|
|
|
|
def print_color(color_tuple):
|
|
"""Format color.
|
|
|
|
Format color for outputting.
|
|
"""
|
|
return str(color_tuple[0]) + ' ' + str(color_tuple[1]) + ' ' +\
|
|
str(color_tuple[2])
|
|
|
|
def OutputGraphLoopBody(loop, loop_color, file_output):
|
|
"""Output Graph body in --loop case.
|
|
"""
|
|
beg = 1
|
|
for pkg in loop:
|
|
if (beg):
|
|
beg = 0
|
|
tmp_string = '"' + pkg + '"'
|
|
else:
|
|
tmp_string = tmp_string + ' -> "' + pkg + '"'
|
|
file_output.write(tmp_string + ' [color="' + str(loop_color) + ' 1.0 1.0"];\n')
|
|
|
|
def OutputGraphAltBody(phrase, alt, alt_color, file_output):
|
|
"""Output Graph body in --alternative case.
|
|
"""
|
|
tmp_string = '"' + phrase + '" -> {\n'
|
|
sorted_list = sorted(alt)
|
|
for packagename in sorted_list:
|
|
tmp_string = tmp_string + '"' + packagename + '"\n'
|
|
tmp_string = tmp_string + '} [color="' + str(alt_color) + ' 1.0 1.0"];\n\n'
|
|
file_output.write(tmp_string)
|
|
|
|
def OutputGraphBody(some_list, dict_color, file_output, packagename, node_type):
|
|
"""Output Graph body.
|
|
|
|
Output Graph.
|
|
"""
|
|
tmp_string = '"' + packagename + '" -> {\n'
|
|
sorted_depend = sorted(some_list)
|
|
if (node_type == 1):
|
|
arrow_style = ', style="dotted"'
|
|
else:
|
|
arrow_style = ''
|
|
if (node_type == 2):
|
|
tmp_string = tmp_string + 'node[shape="ellipse", fillcolor="0.0 1.0 1.0"];\n'
|
|
for dependfrom in sorted_depend:
|
|
tmp_string = tmp_string + '"' + dependfrom + '"\n'
|
|
if (node_type == 0) or (node_type == 1):
|
|
tmp_string = tmp_string + '} [color="' +\
|
|
print_color(dict_color[packagename]) +\
|
|
'"' + arrow_style + '];\n\n'
|
|
elif (node_type == 2):
|
|
tmp_string = tmp_string + '};\n\n'
|
|
file_output.write(tmp_string)
|
|
|
|
|
|
def OutputGraphTail(file_output):
|
|
"""Finish the graph.
|
|
"""
|
|
file_output.write('}\n')
|
|
|
|
def OutputGraph(dict_depend, dict_color, arg):
|
|
"""Output the graph.
|
|
"""
|
|
file_output = arg.output
|
|
if arg.whatrequires:
|
|
selected_node = arg.whatrequires[0]
|
|
elif arg.requires_recursive:
|
|
selected_node = arg.requires_recursive[0]
|
|
else:
|
|
selected_node = None
|
|
OutputGraphHead(file_output)
|
|
|
|
if (selected_node):
|
|
file_output.write('"' + selected_node + '" [color="0.4 1.0 1.0"];\n')
|
|
sorted_list = sorted(dict_depend)
|
|
for packagename in sorted_list:
|
|
if not dict_depend[packagename]:
|
|
continue
|
|
usual_list = []
|
|
cross_list = []
|
|
missed_list = []
|
|
for pkg in dict_depend[packagename]:
|
|
mode = dict_depend[packagename][pkg]
|
|
if (mode == 0):
|
|
usual_list.append(pkg)
|
|
elif (mode == 1):
|
|
cross_list.append(pkg)
|
|
elif (mode == 2):
|
|
missed_list.append(pkg)
|
|
|
|
if (len(usual_list) > 0):
|
|
OutputGraphBody(usual_list, dict_color, file_output, packagename, 0)
|
|
if (len(cross_list) > 0):
|
|
OutputGraphBody(cross_list, dict_color, file_output, packagename, 1)
|
|
if (len(missed_list) > 0):
|
|
OutputGraphBody(missed_list, None, file_output, packagename, 2)
|
|
|
|
OutputGraphTail(file_output)
|
|
|
|
def CountPor(number):
|
|
tmp = number / 10
|
|
por = 0
|
|
while tmp:
|
|
tmp = tmp / 10
|
|
por = por + 1
|
|
return por
|
|
|
|
def LeadingZeroes(number, por):
|
|
por2 = CountPor(number)
|
|
return (por-por2)*'0' + str(number)
|
|
|
|
def OutputLoopGraph(loops, colors, arg):
|
|
"""Output graph(s) of loops.
|
|
"""
|
|
ifdifferent = arg.different
|
|
if arg.whatrequires:
|
|
selected_node = arg.whatrequires[0]
|
|
elif arg.requires_recursive:
|
|
selected_node = arg.requires_recursive[0]
|
|
else:
|
|
selected_node = None
|
|
|
|
output = arg.output
|
|
file_output = output
|
|
if not ifdifferent:
|
|
OutputGraphHead(file_output)
|
|
if (selected_node):
|
|
file_output.write('"' + selected_node + '" [color="0.4 1.0 1.0"];\n')
|
|
|
|
length = len(colors)
|
|
por = CountPor(length)
|
|
for i in range(length):
|
|
if ifdifferent:
|
|
filename = output + loopdotfile + LeadingZeroes(i, por) + '.dot'
|
|
file_output = open(filename, 'w')
|
|
OutputGraphHead(file_output)
|
|
if (selected_node):
|
|
file_output.write('"' + selected_node + '" [color="0.4 1.0 1.0"];\n')
|
|
OutputGraphLoopBody(loops[i], colors[i], file_output)
|
|
if ifdifferent:
|
|
OutputGraphTail(file_output)
|
|
file_output.close()
|
|
|
|
if not ifdifferent:
|
|
OutputGraphTail(file_output)
|
|
|
|
def OutputAltGraph(alternatives, colors, arg):
|
|
"""Output graph(s) of alternatives.
|
|
"""
|
|
ifdifferent = arg.different
|
|
if arg.whatrequires:
|
|
selected_node = arg.whatrequires[0]
|
|
elif arg.requires_recursive:
|
|
selected_node = arg.requires_recursive[0]
|
|
else:
|
|
selected_node = None
|
|
|
|
output = arg.output
|
|
file_output = output
|
|
if not ifdifferent:
|
|
OutputGraphHead(file_output)
|
|
if (selected_node):
|
|
file_output.write('"' + selected_node + '" [color="0.4 1.0 1.0"];\n')
|
|
|
|
i = 0
|
|
length = len(colors)
|
|
por = CountPor(length)
|
|
for phrase in alternatives:
|
|
if ifdifferent:
|
|
filename = output + altdotfile + LeadingZeroes(i, por) + '.dot'
|
|
file_output = open(filename, 'w')
|
|
OutputGraphHead(file_output)
|
|
if (selected_node):
|
|
file_output.write('"' + selected_node + '" [color="0.4 1.0 1.0"];\n')
|
|
OutputGraphAltBody(phrase, alternatives[phrase], colors[i], file_output)
|
|
if ifdifferent:
|
|
OutputGraphTail(file_output)
|
|
file_output.close()
|
|
i = i + 1
|
|
|
|
if not ifdifferent:
|
|
OutputGraphTail(file_output)
|
|
|
|
def BuildGraph(dict_depend):
|
|
"""Build additional structures.
|
|
|
|
Build structures used in algorithm that finds loops. And later in --pkg-... options.
|
|
"""
|
|
dict_out = {}
|
|
dict_in = {}
|
|
for packagename in dict_depend:
|
|
for pkg2 in dict_depend[packagename]:
|
|
if pkg2 not in dict_out:
|
|
dict_out[pkg2] = []
|
|
if packagename not in dict_in:
|
|
dict_in[packagename] = []
|
|
dict_out[pkg2].append(packagename)
|
|
dict_in[packagename].append(pkg2)
|
|
return (dict_in, dict_out)
|
|
|
|
def RemoveNonCycle(dict_in, dict_out, arg):
|
|
"""Remove non-cycle nodes from graph.
|
|
|
|
Remove all nodes that are not present in any loop.
|
|
Linear algorithm. On each step it checks all marked nodes.
|
|
If node hasn't got any nodes dependent from it or it's not
|
|
dependent on any node, then this node cannot be present in any loop.
|
|
So we exlude this node and mark all nodes that are connected to this node.
|
|
Because only for them the situation has been changed a little.
|
|
All remained nodes are included in some loop.
|
|
"""
|
|
ifnotquiet = arg.quiet
|
|
|
|
check = [] #items for further checks
|
|
to_remove = [] #items for remove
|
|
for pkg in dict_in:
|
|
check.append(pkg)
|
|
for pkg in dict_out:
|
|
if pkg not in check:
|
|
check.append(pkg)
|
|
|
|
ischanged = 1
|
|
removed = 0
|
|
while(ischanged):
|
|
ischanged = 0
|
|
for pkg in check:
|
|
if (pkg not in dict_in) or (pkg not in dict_out):
|
|
to_remove.append(pkg)
|
|
removed = removed + 1
|
|
ischanged = 1
|
|
check = []
|
|
for pkg in to_remove:
|
|
if (pkg in dict_in):
|
|
for pkg2 in dict_in[pkg]:
|
|
dict_out[pkg2].remove(pkg)
|
|
if (len(dict_out[pkg2]) == 0):
|
|
dict_out.pop(pkg2)
|
|
if pkg2 not in check:
|
|
check.append(pkg2)
|
|
dict_in.pop(pkg)
|
|
if (pkg in dict_out):
|
|
for pkg2 in dict_out[pkg]:
|
|
dict_in[pkg2].remove(pkg)
|
|
if (len(dict_in[pkg2]) == 0):
|
|
dict_in.pop(pkg2)
|
|
if pkg2 not in check:
|
|
check.append(pkg2)
|
|
dict_out.pop(pkg)
|
|
to_remove = []
|
|
if ifnotquiet:
|
|
print _("Non-cycle nodes removed: ") + str(removed)
|
|
print _("Cyclic packages: ") + str(len(dict_in))
|
|
|
|
def FindLoops(dict_depend, arg):
|
|
"""Find all simple loops in oriented graph.
|
|
|
|
First, remove all nodes, that are not present in any loop.
|
|
Then search for all loops in what has remained.
|
|
"""
|
|
ifnotquiet = arg.quiet
|
|
ifverbose = arg.verbose
|
|
file_output = arg.output
|
|
|
|
benchtime = time.clock()
|
|
(dict_in, dict_out) = BuildGraph(dict_depend)
|
|
RemoveNonCycle(dict_in, dict_out, arg)
|
|
if ifnotquiet:
|
|
benchtime1 = time.clock() - benchtime
|
|
print _("Worktime: %s seconds") % str(benchtime1)
|
|
G = DiGraph()
|
|
for pkg1 in dict_in:
|
|
for pkg2 in dict_in[pkg1]:
|
|
G.add_edge(pkg1, pkg2)
|
|
if ifnotquiet:
|
|
print _("Searching loops.")
|
|
loops = simple_cycles(G)
|
|
if ifnotquiet:
|
|
benchtime2 = time.clock() - benchtime
|
|
print _("End of search.")
|
|
print _("Loops search: %s seconds") % str(benchtime2)
|
|
|
|
if ifverbose:
|
|
i = 1
|
|
print _("Total: %s loops.") % str(len(loops))
|
|
for loop in loops:
|
|
beg = 1
|
|
for pkg in loop:
|
|
if beg:
|
|
beg = 0
|
|
tmpstr = _("Loop ") + str(i) + ": " + pkg
|
|
else:
|
|
tmpstr = tmpstr + " -> " + pkg
|
|
print tmpstr
|
|
i = i + 1
|
|
|
|
return loops
|
|
|
|
def FindAlternatives(dict_provides, arg):
|
|
"""Find Alternatives.
|
|
|
|
Select all phrases that are provided by more than one package.
|
|
"""
|
|
ifverbose = arg.verbose
|
|
ifnotquiet = arg.quiet
|
|
|
|
if (ifnotquiet):
|
|
print _("Searching alternatives.")
|
|
altlist = {}
|
|
for phrase in dict_provides:
|
|
if len(dict_provides[phrase]) > 1:
|
|
altlist[phrase] = []
|
|
for (packagename, r1, r2) in dict_provides[phrase]:
|
|
altlist[phrase].append(packagename)
|
|
|
|
if ifverbose:
|
|
length = len(altlist)
|
|
i = 1
|
|
sorted_list = sorted(altlist)
|
|
print _("Total: %d alternatives.") % length
|
|
for phrase in sorted_list:
|
|
print _("Alternative ") + str(i) + ": " + phrase + _(" is provided by:")
|
|
for packagename in altlist[phrase]:
|
|
print " -> " + packagename
|
|
i = i + 1
|
|
|
|
if (ifnotquiet):
|
|
print _("End of search.")
|
|
return altlist
|
|
|
|
def FindBroken(dict_depend, count_depend, dict_asks, dict_provides, dict_cross_asks, dict_cross_provides, arg):
|
|
"""Find Broken packages.
|
|
|
|
Select all unprovided packages (with unprovided dependencies or dependent from packages with unprovided dependencies.
|
|
"""
|
|
startlist = []
|
|
for packagename in dict_depend:
|
|
for pkg in dict_depend[packagename]:
|
|
if dict_depend[packagename][pkg] == 2:
|
|
if packagename not in startlist:
|
|
startlist.append(packagename)
|
|
return RemakeDicts(dict_depend, count_depend, dict_asks, dict_provides, dict_cross_asks, dict_cross_provides, arg, startlist)
|
|
|
|
def AssignDictColors(tmpdict):
|
|
"""Assign color for every loop.
|
|
"""
|
|
length = len(tmpdict)
|
|
colors = []
|
|
for i in range(length):
|
|
colors.append ((i * 1.) / length)
|
|
return colors
|
|
|
|
def get_temp(i, arg):
|
|
"""Get numbered temporarily directory name.
|
|
"""
|
|
return arg.tmp_dir + tmp_cross_path + str(i) + '/'
|
|
|
|
def PkgCheck(pkgname, dict_asks, dict_cross_asks):
|
|
"""Check that PKG from --pkg-require or --pkg-provide is existent in repository.
|
|
|
|
Searches PKG in file names and package names from repository.
|
|
"""
|
|
if pkgname in dict_asks:
|
|
return pkgname
|
|
else:
|
|
for filename in dict_asks:
|
|
if (pkgname == dict_asks[filename][1]):
|
|
return filename
|
|
|
|
if pkgname in dict_cross_asks:
|
|
return pkgname
|
|
else:
|
|
for filename in dict_cross_asks:
|
|
if (pkgname == dict_cross_asks[filename][1]):
|
|
return filename
|
|
return None
|
|
|
|
def RemakeAsks(startlist, dict_asks, dict_depend, dict_cross_asks, arg, ifbroken):
|
|
"""Select needed packages, so we can rebuild everything else.
|
|
"""
|
|
ifwhatrequires = arg.whatrequires
|
|
ifrequires_recursive = arg.requires_recursive
|
|
ifnotquite = arg.quiet
|
|
ifverbose = arg.verbose
|
|
|
|
(dict_in, dict_out) = BuildGraph(dict_depend)
|
|
if (ifbroken != None):
|
|
dict_tmp = dict_out
|
|
elif (ifwhatrequires):
|
|
dict_tmp = dict_out
|
|
elif (ifrequires_recursive):
|
|
dict_tmp = dict_in
|
|
|
|
list_selected = []
|
|
list_selected.extend(startlist)
|
|
list_append = []
|
|
list_append.extend(startlist)
|
|
if (ifnotquite):
|
|
if (ifbroken != None):
|
|
print _("Searching for broken packages.")
|
|
if (ifverbose):
|
|
sorted_list = sorted(startlist)
|
|
for pkgname in sorted_list:
|
|
print " -> " + pkgname
|
|
elif (ifrequires_recursive):
|
|
print _("Searching for packages REQUIRED by ") + startlist[0]
|
|
elif (ifwhatrequires):
|
|
print _("Searching for packages that REQUIRE ") + startlist[0]
|
|
#select what we need, show what we have found (if --verbose option is used)
|
|
level_cnt = 0
|
|
ischanged = 1
|
|
while (ischanged == 1):
|
|
if (ifverbose):
|
|
if (level_cnt > 0):
|
|
if (ifnotquite):
|
|
print _("Level %d dependency.") % level_cnt
|
|
for tmppkg in list_append:
|
|
print " -> " + tmppkg
|
|
|
|
ischanged = 0
|
|
tmp_append = []
|
|
#check for every filename in custody if it in list_selected.
|
|
for name in list_append:
|
|
if name in dict_tmp:
|
|
for tmpname in dict_tmp[name]:
|
|
#if we haven't met it yet - put it undet custody
|
|
if (tmpname not in list_selected) and (tmpname not in tmp_append):
|
|
tmp_append.append(tmpname)
|
|
ischanged = 1
|
|
|
|
list_selected.extend(list_append)
|
|
list_append = tmp_append
|
|
level_cnt = level_cnt + 1
|
|
#remove what has remained unselected
|
|
new_dict_asks = {}
|
|
new_dict_cross_asks = {}
|
|
for filename in list_selected:
|
|
if filename in dict_asks:
|
|
new_dict_asks[filename] = dict_asks[filename]
|
|
else:
|
|
if not filename in dict_cross_asks:
|
|
new_dict_asks[filename] = [[], ""]
|
|
else:
|
|
new_dict_cross_asks[filename] = dict_cross_asks[filename]
|
|
return (new_dict_asks, new_dict_cross_asks)
|
|
|
|
def RemoveExternal(dict_asks, dict_provides, dict_cross_asks, dict_cross_provides, ifshow):
|
|
"""Remove dependecies external to group.
|
|
"""
|
|
new_dict_asks = {}
|
|
new_dict_provides = {}
|
|
for filename in dict_asks:
|
|
new_dict_asks[filename] = ([], filename)
|
|
for asks in dict_asks[filename][0]:
|
|
if asks[0] in dict_provides:
|
|
found = 0
|
|
for pkg in dict_provides[asks[0]]:
|
|
if pkg[0] in dict_asks:
|
|
found = 1
|
|
if asks[0] not in new_dict_provides:
|
|
new_dict_provides[asks[0]] = []
|
|
if not pkg in new_dict_provides[asks[0]]:
|
|
new_dict_provides[asks[0]].append(pkg)
|
|
if (found == 1):
|
|
new_dict_asks[filename][0].append(asks)
|
|
elif asks[0] in dict_cross_provides:
|
|
new_dict_asks[filename][0].append(asks)
|
|
elif ifshow:
|
|
new_dict_asks[filename][0].append(asks)
|
|
|
|
for filename in dict_cross_asks:
|
|
for asks in dict_cross_asks[filename][0]:
|
|
if asks[0] in dict_provides:
|
|
for pkg in dict_provides[asks[0]]:
|
|
if pkg[0] in dict_asks:
|
|
if asks[0] not in new_dict_provides:
|
|
new_dict_provides[asks[0]] = []
|
|
if not pkg in new_dict_provides[asks[0]]:
|
|
new_dict_provides[asks[0]].append(pkg)
|
|
|
|
return (new_dict_asks, new_dict_provides)
|
|
|
|
def RemakeDicts(dict_depend, count_depend, dict_asks, dict_provides, dict_cross_asks, dict_cross_provides, arg, brokenlist=None):
|
|
"""Procedure for rebuilding packages lists.
|
|
|
|
for --whatrequires and --requires-recursive options
|
|
and for --broken option
|
|
"""
|
|
ifnotquiet = arg.quiet
|
|
whatrequires = arg.whatrequires
|
|
requires_recursive = arg.requires_recursive
|
|
ifshow = arg.unprovided or arg.broken
|
|
|
|
if (ifnotquiet):
|
|
print _("Remaking structures.")
|
|
if (brokenlist == None):
|
|
if (whatrequires):
|
|
pkgname = whatrequires[0]
|
|
else:
|
|
pkgname = requires_recursive[0]
|
|
filename = PkgCheck(pkgname, dict_asks, dict_cross_asks)
|
|
if (whatrequires):
|
|
arg.whatrequires[0] = filename
|
|
else:
|
|
arg.requires_recursive[0] = filename
|
|
if (not filename):
|
|
print _("Error: can't find package name or filename \"") + pkgname + "\"."
|
|
exit_proc(arg)
|
|
startlist = [filename]
|
|
else:
|
|
startlist = brokenlist
|
|
|
|
(dict_asks, dict_cross_asks) = RemakeAsks(startlist, dict_asks, dict_depend, dict_cross_asks, arg, brokenlist)
|
|
(new_dict_asks, new_dict_provides) = RemoveExternal(dict_asks, dict_provides, dict_cross_asks, dict_cross_provides, ifshow)
|
|
(new_dict_cross_asks, new_dict_cross_provides) = RemoveExternal(dict_cross_asks, dict_cross_provides, dict_asks, dict_provides, ifshow)
|
|
(dict_depend, count_depend) = FindDepend(new_dict_provides, new_dict_asks, new_dict_cross_provides, new_dict_cross_asks, arg)
|
|
return (dict_depend, count_depend, new_dict_asks, new_dict_provides, new_dict_cross_asks, new_dict_cross_provides)
|
|
|
|
def main(args):
|
|
#define arguments namespace
|
|
arg = ParseCommandLine()
|
|
ifnotquiet = arg.quiet
|
|
ifverbose = arg.verbose
|
|
ifnograph = arg.nograph
|
|
ifrequires_recursive = arg.requires_recursive
|
|
ifwhatrequires = arg.whatrequires
|
|
ifloops = arg.loops
|
|
ifalternatives = arg.alternatives
|
|
ifbroken = arg.broken
|
|
ifoptact = ifloops or ifalternatives or ifbroken
|
|
ifunprovided = arg.unprovided
|
|
|
|
arg.crossurl = []
|
|
arg.tmp_dir = ""
|
|
if (arg.output):
|
|
file_output = arg.output[0]
|
|
else:
|
|
file_output = default_output
|
|
arg.output = None
|
|
if (not ifnotquiet) and (not ifverbose) and (ifnograph):
|
|
print _("Do not use -q/--quiet and -n/--nograph without -v/--verbose together.")
|
|
print _("That way there is no information to output anywhere. Nothing will be done.")
|
|
exit_proc(arg)
|
|
if (ifunprovided and ifbroken):
|
|
print _("Do not use -u/--unprovided and -b/--broken options together.")
|
|
print _("-b does everything that do -u and a little more.")
|
|
exit_proc(arg)
|
|
arg.repository = arg.repository[0]
|
|
arg.repository = CheckURLPATH(arg.repository, arg)
|
|
if (arg.cross):
|
|
crossrange = range(len(arg.cross))
|
|
for i in crossrange:
|
|
arg.crossurl.append(CheckURLPATH(arg.cross[i], arg))
|
|
CheckOptions(arg)
|
|
arg.tmp_dir = tempfile.mkdtemp() + '/'
|
|
#get all needed files
|
|
GetFile(arg.repository, synthesis_arch, arg.tmp_dir, arg)
|
|
PrepareSynthFile(arg.tmp_dir, arg)
|
|
if (arg.cross):
|
|
for i in crossrange:
|
|
temp_subdir = get_temp(i, arg)
|
|
GetFile(arg.crossurl[i], synthesis_arch, temp_subdir, arg)
|
|
PrepareSynthFile(temp_subdir, arg)
|
|
|
|
#generate dictionaries
|
|
dict_provides = {}
|
|
dict_asks = {}
|
|
dict_cross_provides = {}
|
|
dict_cross_asks = {}
|
|
ParseSynthFile(dict_provides, dict_asks, arg.tmp_dir, arg)
|
|
if (arg.cross):
|
|
for i in crossrange:
|
|
temp_subdir = get_temp(i, arg)
|
|
ParseSynthFile(dict_cross_provides, dict_cross_asks, temp_subdir, arg)
|
|
(dict_depend, count_depend) = FindDepend(dict_provides, dict_asks, dict_cross_provides, dict_cross_asks, arg)
|
|
|
|
if (ifrequires_recursive or ifwhatrequires):
|
|
answer = RemakeDicts(dict_depend, count_depend, dict_asks, dict_provides, dict_cross_asks, dict_cross_provides, arg)
|
|
if (answer):
|
|
(dict_depend, count_depend, dict_asks, dict_provides, dict_cross_asks, dict_cross_provides) = answer
|
|
|
|
arg.output = file_output
|
|
CheckOutput(arg)
|
|
if (ifoptact): ##REMAKE (MUTUALLY EXCLUSIVE)
|
|
if (ifloops):
|
|
loops = FindLoops(dict_depend, arg)
|
|
if (ifnograph):
|
|
exit_proc(arg)
|
|
colors = AssignDictColors(loops)
|
|
OutputLoopGraph(loops, colors, arg)
|
|
elif (ifalternatives):
|
|
alternatives = FindAlternatives(dict_provides, arg)
|
|
if ifnograph:
|
|
exit_proc(arg)
|
|
colors = AssignDictColors(alternatives)
|
|
OutputAltGraph(alternatives, colors, arg)
|
|
elif (ifbroken):
|
|
brokengraph = FindBroken(dict_depend, count_depend, dict_asks, dict_provides, dict_cross_asks, dict_cross_provides, arg)
|
|
if ifnograph:
|
|
exit_proc(arg)
|
|
dict_color = AssignColors(brokengraph[0], brokengraph[1], arg)
|
|
OutputGraph(brokengraph[0], dict_color, arg)
|
|
else:
|
|
if ifnograph:
|
|
exit_proc(arg)
|
|
dict_color = AssignColors(dict_depend, count_depend, arg)
|
|
OutputGraph(dict_depend, dict_color, arg)
|
|
|
|
exit_proc(arg)
|
|
|
|
if __name__ == "__main__":
|
|
main(sys.argv)
|