mirror of
https://abf.rosa.ru/djam/repo-analyzer.git
synced 2025-02-23 10:02:54 +00:00
DOT graph render;Dependency loopback check
This commit is contained in:
parent
38966cb244
commit
4e4efb831d
1 changed files with 121 additions and 2 deletions
|
@ -10,8 +10,18 @@ import string
|
|||
import rpm
|
||||
import re
|
||||
|
||||
gettext.install('urpm-tools')
|
||||
|
||||
DB = 'repo.db'
|
||||
|
||||
def parseargs():
|
||||
parser = argparse.ArgumentParser(description=_('analyze repositories metadata '
|
||||
' from repo.db'))
|
||||
parser.add_argument('-d', '--dot-graphs', action='store_true',
|
||||
help=_('visualize dependencies in .DOT graphs'))
|
||||
opts = parser.parse_args()
|
||||
return opts
|
||||
|
||||
def detect_broken_dependencies(dbc):
|
||||
def print_broken_packages():
|
||||
for rpb_name in sorted(repo_packages_broken.keys()):
|
||||
|
@ -90,13 +100,11 @@ SELECT packages.id, nvra, repodir_id, repodirs.name,
|
|||
dep_package_id IN (%(pids)s) AND
|
||||
packages.id NOT IN (%(pids)s)
|
||||
ORDER BY repodir_id, nvra""" % {'pids': pids}).fetchall()
|
||||
# print len(packages_broken_recurs)
|
||||
for packb in packages_broken_recurs:
|
||||
all_broken[packb[0]] = {'repo': packb[2], 'nvra': packb[1],
|
||||
'reqname': packb[4], 'build_arch': packb[5],
|
||||
'depid': packb[6]}
|
||||
broken_recursive.append(packb[0])
|
||||
#print len(all_broken.keys())
|
||||
|
||||
if broken_recursive:
|
||||
print 'Recursive broken dependencies:'
|
||||
|
@ -122,6 +130,113 @@ SELECT DISTINCT build_arch FROM rpm_requires
|
|||
print_broken_packages()
|
||||
|
||||
|
||||
def OutputGraphHead(file_output, dg_name):
|
||||
"""Output Graph head.
|
||||
|
||||
Static information about graph.
|
||||
"""
|
||||
file_output.write('\n\ndigraph "%s" {\n' % dg_name + \
|
||||
'size="20.69,25.52";\nratio="fill";\n' + \
|
||||
'rankdir="TB";\nnode[style="filled"];\nnode[shape="box"];\n\n')
|
||||
|
||||
def OutputGraphTail(file_output):
|
||||
"""Finish the graph.
|
||||
"""
|
||||
file_output.write('}\n')
|
||||
|
||||
def render_dot_graphs(dbc):
|
||||
repodirs = dbc.execute("""
|
||||
SELECT id, name, sources, path FROM repodirs ORDER BY id
|
||||
""").fetchall()
|
||||
for repodir in repodirs:
|
||||
(rd_id, rd_name) = (repodir[0], repodir[1])
|
||||
|
||||
dot_file = open('repo-%d.dot' % rd_id, 'w')
|
||||
packages_processed = {}
|
||||
low_level_pkgs = dbc.execute("""
|
||||
SELECT packages.id, packages.nvra FROM packages
|
||||
WHERE repodir_id = ? AND
|
||||
NOT EXISTS (SELECT 1 FROM package_depend_res, packages dp
|
||||
WHERE package_id = packages.id AND
|
||||
dp.id = dep_package_id AND dp.repodir_id = ?)
|
||||
ORDER BY packages.id""", [rd_id, rd_id]).fetchall()
|
||||
for pkg_rec in low_level_pkgs:
|
||||
packages_processed[pkg_rec[0]] = pkg_rec[1]
|
||||
|
||||
OutputGraphHead(dot_file, rd_name)
|
||||
pkg_linked = {}
|
||||
level = 0
|
||||
curr_level_pkgs = [pkg_rec[0] for pkg_rec in low_level_pkgs]
|
||||
while len(curr_level_pkgs) > 0:
|
||||
in_curr_pkgs = ','.join([str(pkg_id)
|
||||
for pkg_id in curr_level_pkgs])
|
||||
depend_pkgs = dbc.execute("""
|
||||
SELECT DISTINCT packages.id, packages.nvra, package_depend_res.dep_package_id
|
||||
FROM package_depend_res, packages
|
||||
WHERE repodir_id = ? AND package_depend_res.dep_package_id IN (%s)
|
||||
AND package_depend_res.package_id = packages.id
|
||||
ORDER BY packages.id""" % in_curr_pkgs, [rd_id]).fetchall()
|
||||
next_level_pkgs = []
|
||||
for pkg_rec in depend_pkgs:
|
||||
if level == 0:
|
||||
pkg_linked[pkg_rec[2]] = True
|
||||
if pkg_rec[0] not in packages_processed:
|
||||
packages_processed[pkg_rec[0]] = pkg_rec[1]
|
||||
next_level_pkgs.append(pkg_rec[0])
|
||||
if pkg_rec[0] != pkg_rec[2]:
|
||||
dot_file.write('"%s" -> "%s" [color="0.66 1 0.66"];\n' %
|
||||
(packages_processed[pkg_rec[0]],
|
||||
packages_processed[pkg_rec[2]]))
|
||||
if level == 0:
|
||||
for ll_rec in low_level_pkgs:
|
||||
if ll_rec[0] not in pkg_linked:
|
||||
dot_file.write('"%s" [color="0.66 0.66 1"];\n' %
|
||||
packages_processed[ll_rec[0]])
|
||||
curr_level_pkgs = next_level_pkgs
|
||||
level += 1
|
||||
|
||||
OutputGraphTail(dot_file)
|
||||
|
||||
def detect_loops(dbc):
|
||||
header = '===\n' \
|
||||
'Loopbacks:'
|
||||
repodirs = dbc.execute("""
|
||||
SELECT id, name, sources, path FROM repodirs ORDER BY id
|
||||
""").fetchall()
|
||||
for repodir in repodirs:
|
||||
(rd_id, rd_name) = (repodir[0], repodir[1])
|
||||
loopbacks = dbc.execute("""
|
||||
SELECT p.id, p.nvra, rpm_requires.name
|
||||
FROM package_depend_res pdr, packages p, rpm_requires
|
||||
WHERE pdr.package_id = p.id AND pdr.package_id = dep_package_id AND
|
||||
rpm_requires.id = pdr.requires_id and p.repodir_id = ?
|
||||
ORDER BY p.nvra, rpm_requires.name
|
||||
""", [rd_id]).fetchall()
|
||||
if loopbacks:
|
||||
if header:
|
||||
print header
|
||||
header = None
|
||||
print '%d) %s' % (rd_id, rd_name)
|
||||
pre_pkg_id = None
|
||||
pre_pkg_name = None
|
||||
requires = []
|
||||
cnt = 0
|
||||
for lb_rec in loopbacks:
|
||||
pkg_id = lb_rec[0]
|
||||
pkg_name = lb_rec[1]
|
||||
if pkg_id != pre_pkg_id:
|
||||
cnt += 1
|
||||
if pre_pkg_id is not None:
|
||||
print '\t%s (%s)' % (pre_pkg_name, ','.join(requires))
|
||||
requires = []
|
||||
pre_pkg_id = pkg_id
|
||||
pre_pkg_name = pkg_name
|
||||
requires.append(lb_rec[2])
|
||||
if pre_pkg_id is not None:
|
||||
print '\t%s (%s)' % (pre_pkg_name, ','.join(requires))
|
||||
print 'Total: %d' % cnt
|
||||
|
||||
|
||||
def detect_lost_sources(dbc):
|
||||
print '==='
|
||||
print 'Lost sources:'
|
||||
|
@ -387,10 +502,14 @@ SELECT packages.nvra, package_files.path, obj_symbols.name
|
|||
print 'Total: %d' % len(symbols_not_resolved)
|
||||
|
||||
def main(args):
|
||||
options = parseargs()
|
||||
|
||||
conn = sqlite3.connect(DB)
|
||||
dbc = conn.cursor()
|
||||
detect_broken_dependencies(dbc)
|
||||
if options.dot_graphs:
|
||||
render_dot_graphs(dbc)
|
||||
#detect_loops(dbc)
|
||||
detect_lost_sources(dbc)
|
||||
analyze_partitioning(dbc)
|
||||
detect_lost_object_files(dbc)
|
||||
|
|
Loading…
Add table
Reference in a new issue