mirror of
https://abf.rosa.ru/djam/abf-console-client-src.git
synced 2025-02-23 18:02:50 +00:00
new features: locate, abfcd
This commit is contained in:
parent
ea917ec1b0
commit
dd6727e016
8 changed files with 541 additions and 164 deletions
2
Makefile
2
Makefile
|
@ -34,7 +34,9 @@ install:
|
|||
|
||||
mkdir -p $(DESTDIR)$(USRSHAREDIR)/bash-completion
|
||||
mkdir -p $(DESTDIR)$(ETCDIR)/bash_completion.d
|
||||
mkdir -p $(DESTDIR)$(ETCDIR)/profile.d
|
||||
cp "bash_autocomplete" $(DESTDIR)$(USRSHAREDIR)/bash-completion/abf
|
||||
cp "abfcd.sh" $(DESTDIR)$(USRSHAREDIR)/profile.d/abfcd.sh
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
Name: abf-console-client
|
||||
Version: 1.1
|
||||
Release: 2
|
||||
Version: 1.3
|
||||
Release: 0
|
||||
Summary: Python API to ABF (https://abf.rosalinux.ru)
|
||||
Group: System/Configuration/Packaging
|
||||
License: GPLv2
|
||||
|
@ -8,10 +8,11 @@ URL: http://wiki.rosalab.ru/en/index.php/ABF_Console_Client
|
|||
Source0: %{name}-%{version}.tar.gz
|
||||
BuildArch: noarch
|
||||
|
||||
Requires: python-abf >= 1.1-2
|
||||
Requires: python-abf >= 1.2-0
|
||||
Requires: python-beaker
|
||||
Requires: python-rpm
|
||||
Requires: git
|
||||
Requires: python-yaml
|
||||
|
||||
%description
|
||||
Console client for ABF (https://abf.rosalinux.ru).
|
||||
|
@ -35,3 +36,4 @@ ln -s %{_datadir}/bash-completion/abf %{buildroot}/%{_sysconfdir}/bash_completio
|
|||
#bash_completion files
|
||||
%{_datadir}/bash-completion/abf
|
||||
%{_sysconfdir}/bash_completion.d/abf
|
||||
%{_sysconfdir}/profile.d/abfcd.sh
|
304
abf.py
304
abf.py
|
@ -6,7 +6,6 @@ import argparse
|
|||
from argparse import RawDescriptionHelpFormatter
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
import pdb
|
||||
|
||||
|
||||
|
@ -14,6 +13,7 @@ import pdb
|
|||
from abf.console.config import Config
|
||||
from abf.console.log import Log
|
||||
cfg = Config()
|
||||
projects_cfg = Config(conf_path='~/.abf_projects', main_conf=False)
|
||||
log = Log('abf')
|
||||
|
||||
|
||||
|
@ -30,11 +30,13 @@ default_build_platform = cfg['user']['default_build_platform']
|
|||
|
||||
#models = Models(domain, login, password)
|
||||
|
||||
#r = Platform(models, init_data={'id':64, 'name': 'AAAA'})
|
||||
#r = models.platforms[64]
|
||||
#r = models.repositories[1]
|
||||
#r = models.projects[('abf_core', 'abf_core')]
|
||||
#r = models.projects['akirilenko/libqb']
|
||||
#r = models.projects['akirilenko/fba']
|
||||
#r = Platform(models, ID=64)
|
||||
#r = Repository(models, ID=71)
|
||||
|
||||
#r = Project(models, 'akirilenko/mock-urpm')
|
||||
#r = BuildList(models, ID=750988)
|
||||
#r = models.buildlists['715552']
|
||||
#r = models.arches['1']
|
||||
|
||||
|
@ -44,9 +46,22 @@ default_build_platform = cfg['user']['default_build_platform']
|
|||
|
||||
#r = models.get_arches()
|
||||
|
||||
#print r
|
||||
|
||||
#print r.name
|
||||
#print r.owner
|
||||
#print r.visibility
|
||||
#print r.repositories
|
||||
|
||||
#print r
|
||||
#print r.platform.params_dict
|
||||
|
||||
#r = models.repositories[1]
|
||||
#print 'WELL DONE'
|
||||
#print r
|
||||
#print r.owner
|
||||
#print r.owner.email
|
||||
|
||||
#print r.repositories[0].platform.repositories[2].platform
|
||||
|
||||
#exit()
|
||||
|
||||
|
@ -61,9 +76,9 @@ def parse_command_line():
|
|||
subparsers = parser.add_subparsers()
|
||||
|
||||
# help
|
||||
parser_get = subparsers.add_parser('help', help='show a help for command')
|
||||
parser_get.add_argument('command', action='store', nargs='?', help='a command to show help for')
|
||||
parser_get.set_defaults(func=help)
|
||||
parser_help = subparsers.add_parser('help', help='show a help for command')
|
||||
parser_help.add_argument('command', action='store', nargs='?', help='a command to show help for')
|
||||
parser_help.set_defaults(func=help)
|
||||
|
||||
# get
|
||||
parser_get = subparsers.add_parser('get', help='clone a project from ABF')
|
||||
|
@ -73,9 +88,27 @@ def parse_command_line():
|
|||
parser_get.set_defaults(func=get)
|
||||
|
||||
# put
|
||||
parser_get = subparsers.add_parser('put', help='run "git add --all", "git commit -m <your message>", "git push"')
|
||||
parser_get.add_argument('message', action='store', help='a message to commit with')
|
||||
parser_get.set_defaults(func=put)
|
||||
parser_put = subparsers.add_parser('put', help='Execute "git add --all", "git commit -m <your message>", "git push"')
|
||||
parser_put.add_argument('message', action='store', help='a message to commit with')
|
||||
parser_put.set_defaults(func=put)
|
||||
|
||||
# show
|
||||
parser_show = subparsers.add_parser('show', help='show some general information. Bash autocomplete uses it.')
|
||||
show_choices = ['build-repos', 'build-platforms', 'save-to-repos', 'save-to-platforms']
|
||||
parser_show.add_argument('type', action='store', choices=show_choices,help='The type of information to show')
|
||||
parser_show.add_argument('-p', '--project', action='store', help='Project to show information for (if needed). Format: '
|
||||
'"[group/]name". If no group specified, default group will be used.')
|
||||
parser_show.set_defaults(func=show)
|
||||
|
||||
# locate
|
||||
parser_locate = subparsers.add_parser('locate', help='tool can remember the project location and use it for some reasons.')
|
||||
locate_choices = ['update', 'update-recursive']
|
||||
parser_locate.add_argument('action', action='store', choices=locate_choices, nargs='?', help='The type of information to show')
|
||||
parser_locate.add_argument('-p', '--project', action='store', help='Project to show information for (if needed). Format: '
|
||||
'"[group/]name". If no group specified, default group will be used.')
|
||||
parser_locate.add_argument('-d', '--directory', action='store', help='Directory to update locations for. It should be a '
|
||||
'git repository for "update" and any directory for "update-recursive". If not specified - the current directory will be used')
|
||||
parser_locate.set_defaults(func=locate)
|
||||
|
||||
# build
|
||||
parser_build = subparsers.add_parser('build', help='Initiate a build task on ABF.', formatter_class=RawDescriptionHelpFormatter,
|
||||
|
@ -107,24 +140,31 @@ def parse_command_line():
|
|||
upd_types = ['security', 'bugfix', 'enhancement', 'recommended', 'newpackage']
|
||||
parser_build.add_argument('--update-type', action='store', choices=upd_types, help='Update type. Default is "%s".' %
|
||||
(BuildList.update_types[0]) )
|
||||
parser_build.add_argument('--skip-spec-check', action='store_true', help='Do not check spec file.' )
|
||||
parser_build.set_defaults(func=build)
|
||||
|
||||
# publish
|
||||
parser_build = subparsers.add_parser('publish', help='Publish the task that have already been built.')
|
||||
parser_build.add_argument('task_ids', action='store', nargs="+", help='The IDs of the tasks to publish.')
|
||||
parser_build.set_defaults(func=publish)
|
||||
parser_publish = subparsers.add_parser('publish', help='Publish the task that have already been built.')
|
||||
parser_publish.add_argument('task_ids', action='store', nargs="+", help='The IDs of tasks to publish.')
|
||||
parser_publish.set_defaults(func=publish)
|
||||
|
||||
# backport
|
||||
parser_build = subparsers.add_parser('backport', help='Copy all the files from SRC_BRANCH to DST_BRANCH')
|
||||
parser_build.add_argument('src_branch', action='store', help='source branch')
|
||||
parser_build.add_argument('dst_branch', action='store', nargs='?', help='destination branch. If not specified, it\'s assumed to be the current branch')
|
||||
parser_build.add_argument('-p', '--pack', action='store_true', help='Create a tar.gz from the src_branch and put this archive and spec file to dst_branch')
|
||||
parser_build.set_defaults(func=backport)
|
||||
parser_backport = subparsers.add_parser('backport', help='Copy all the files from SRC_BRANCH to DST_BRANCH')
|
||||
parser_backport.add_argument('src_branch', action='store', help='source branch')
|
||||
parser_backport.add_argument('dst_branch', action='store', nargs='?', help='destination branch. If not specified, it\'s assumed to be the current branch')
|
||||
parser_backport.add_argument('-p', '--pack', action='store_true', help='Create a tar.gz from the src_branch and put this archive and spec file to dst_branch')
|
||||
parser_backport.set_defaults(func=backport)
|
||||
|
||||
# buildstatus
|
||||
parser_build = subparsers.add_parser('buildstatus', help='get a build-task status')
|
||||
parser_build.add_argument('ID', action='store', nargs='?', help='build list ID')
|
||||
parser_build.set_defaults(func=buildstatus)
|
||||
parser_clean = subparsers.add_parser('buildstatus', help='get a build-task status')
|
||||
parser_clean.add_argument('ID', action='store', nargs='?', help='build list ID')
|
||||
parser_clean.set_defaults(func=buildstatus)
|
||||
|
||||
# clean
|
||||
parser_clean = subparsers.add_parser('clean', help='Analyze spec file and show missing and unnecessary files from '
|
||||
'the current git repository directory.')
|
||||
parser_clean.add_argument('--auto-remove', action='store_true', help='automatically remove all the unnecessary files')
|
||||
parser_clean.set_defaults(func=clean)
|
||||
|
||||
command_line = parser.parse_args(sys.argv[1:])
|
||||
|
||||
|
@ -135,6 +175,42 @@ def help():
|
|||
sys.argv = [sys.argv[0], '-h']
|
||||
parse_command_line()
|
||||
|
||||
def get_project(models, must_exist=True, name=None):
|
||||
if name:
|
||||
tmp = name.split('/')
|
||||
if len(tmp) > 2:
|
||||
log.error('The project format is "[owner_name/]project_name"')
|
||||
exit(1)
|
||||
elif len(tmp) == 1:
|
||||
project_name = tmp[0]
|
||||
log.info("The project group is assumed to be " + default_group)
|
||||
owner_name = default_group
|
||||
else: # len == 2
|
||||
owner_name = tmp[0]
|
||||
project_name = tmp[1]
|
||||
else:
|
||||
owner_name, project_name = get_project_name()
|
||||
if not project_name:
|
||||
if must_exist:
|
||||
log.error('You are not in a git repository directory. Specify the project name please!')
|
||||
exit(1)
|
||||
else:
|
||||
return None
|
||||
_update_location()
|
||||
|
||||
try:
|
||||
proj = Project(models, '%s/%s' % (owner_name, project_name))
|
||||
except PageNotFoundError:
|
||||
log.error('The project %s/%s does not exist!' % (owner_name, project_name))
|
||||
exit(1)
|
||||
except ForbiddenError:
|
||||
log.error('You do not have acces to the project %s/%s!' % (owner_name, project_name))
|
||||
exit(1)
|
||||
|
||||
log.debug('Project: %s' % proj)
|
||||
return proj
|
||||
|
||||
|
||||
def get():
|
||||
log.debug('GET started')
|
||||
proj = command_line.project
|
||||
|
@ -142,26 +218,34 @@ def get():
|
|||
if len(tmp) > 2:
|
||||
log.error('Specify a project name as "group_name/project_name" or just "project_name"')
|
||||
exit(1)
|
||||
if len(tmp) == 1:
|
||||
elif len(tmp) == 1:
|
||||
project_name = proj
|
||||
proj = '%s/%s' % (cfg['user']['default_group'], proj)
|
||||
elif len(tmp) == 2:
|
||||
project_name = tmp[1]
|
||||
|
||||
uri = "%s/%s.git" % (cfg['user']['git_uri'], proj)
|
||||
cmd = ['git', 'clone', uri]
|
||||
if command_line.branch:
|
||||
cmd += ['-b', command_line.branch]
|
||||
execute_command(cmd, log=log, print_to_stdout=True, exit_on_error=True)
|
||||
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
|
||||
|
||||
projects_cfg[proj]['location'] = os.path.join(os.getcwd(), project_name)
|
||||
|
||||
def put():
|
||||
log.debug('PUT started')
|
||||
|
||||
_update_location()
|
||||
|
||||
cmd = ['git', 'add', '--all']
|
||||
execute_command(cmd, log=log, print_to_stdout=True, exit_on_error=True)
|
||||
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
|
||||
|
||||
cmd = ['git', 'commit', '-m', command_line.message]
|
||||
execute_command(cmd, log=log, print_to_stdout=True, exit_on_error=True)
|
||||
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
|
||||
|
||||
log.info('Commited.')
|
||||
cmd = ['git', 'push']
|
||||
execute_command(cmd, log=log, print_to_stdout=True, exit_on_error=True)
|
||||
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
|
||||
log.info('Pushed')
|
||||
|
||||
|
||||
|
@ -185,22 +269,24 @@ def backport():
|
|||
path = get_root_git_dir()
|
||||
log.debug("Repository root folder is " + path)
|
||||
|
||||
_update_location(path=path)
|
||||
|
||||
stage = 0
|
||||
try:
|
||||
if start_branch != dbrn:
|
||||
cmd = ['git', 'checkout', dbrn]
|
||||
execute_command(cmd, log, print_to_stdout=True, cwd=path)
|
||||
execute_command(cmd, print_to_stdout=True, cwd=path)
|
||||
stage = 1
|
||||
cmd = ['rm', '-rf', './*']
|
||||
execute_command(cmd, log=log, print_to_stdout=True, cwd=path)
|
||||
execute_command(cmd, print_to_stdout=True, cwd=path)
|
||||
stage = 2
|
||||
cmd = ['git', 'checkout', sbrn, '*']
|
||||
execute_command(cmd, log=log, print_to_stdout=True, cwd=path)
|
||||
execute_command(cmd, print_to_stdout=True, cwd=path)
|
||||
stage = 3
|
||||
if command_line.pack:
|
||||
pack_project(log, path)
|
||||
pack_project(path)
|
||||
cmd = ['git', 'reset']
|
||||
execute_command(cmd, log=log, print_to_stdout=True, cwd=path)
|
||||
execute_command(cmd, print_to_stdout=True, cwd=path)
|
||||
except Exception, ex:
|
||||
if type(ex) == ReturnCodeNotZero:
|
||||
log.error(str(ex))
|
||||
|
@ -210,7 +296,7 @@ def backport():
|
|||
if stage == 1 or stage == 2:
|
||||
log.info("Checking out the initial branch (%s)" % start_branch)
|
||||
cmd = ['git', 'reset', '--hard', start_branch]
|
||||
execute_command(cmd, log=log, print_to_stdout=True, cwd=path)
|
||||
execute_command(cmd, print_to_stdout=True, cwd=path)
|
||||
|
||||
log.info('Done')
|
||||
|
||||
|
@ -228,33 +314,9 @@ def build():
|
|||
|
||||
|
||||
# get project
|
||||
if command_line.project:
|
||||
tmp = command_line.project.split('/')
|
||||
if len(tmp) > 2:
|
||||
log.error('The project format is "[owner_name/]project_name"')
|
||||
exit(1)
|
||||
elif len(tmp) == 1:
|
||||
project_name = tmp[0]
|
||||
log.info("The project group is assumed to be " + default_group)
|
||||
owner_name = default_group
|
||||
else: # len == 2
|
||||
owner_name = tmp[0]
|
||||
project_name = tmp[1]
|
||||
else:
|
||||
owner_name, project_name = get_project_name()
|
||||
if not project_name:
|
||||
log.error('You are not in git repository directory. Specify the project name please!')
|
||||
exit(1)
|
||||
try:
|
||||
proj = models.projects['%s/%s' % (owner_name, project_name)]
|
||||
except PageNotFoundError:
|
||||
log.error('The project %s/%s does not exist!' % (owner_name, project_name))
|
||||
exit(1)
|
||||
except ForbiddenError:
|
||||
log.error('You do not have acces to the project %s/%s!' % (owner_name, project_name))
|
||||
exit(1)
|
||||
|
||||
log.debug('Project: %s' % proj)
|
||||
proj = get_project(models, must_exist=True, name=command_line.project)
|
||||
if not command_line.project and not command_line.skip_spec_check: # local git repository
|
||||
find_spec_problems()
|
||||
if not proj.is_package:
|
||||
log.error('The project %s is not a package and can not be built.' % proj)
|
||||
exit(1)
|
||||
|
@ -262,7 +324,7 @@ def build():
|
|||
|
||||
# get architectures
|
||||
arches = []
|
||||
all_arches = models.get_arches()
|
||||
all_arches = Arch.get_arches(models)
|
||||
if command_line.arch:
|
||||
for arch in command_line.arch:
|
||||
a = models.arches.get_string_key(arch)
|
||||
|
@ -301,6 +363,7 @@ def build():
|
|||
ref_type = (branch_def and 'commit') or (tag_def and 'tag')
|
||||
refs = proj.get_refs_list(models)
|
||||
for ref in refs:
|
||||
|
||||
if ref['ref'] == to_resolve and ref['object']['type'] == ref_type:
|
||||
commit_hash = ref['object']['sha']
|
||||
if commit_hash == None:
|
||||
|
@ -359,7 +422,7 @@ def build():
|
|||
|
||||
|
||||
# get the list of build repositories
|
||||
build_platforms = models.get_build_platforms()
|
||||
build_platforms = Platform.get_build_platforms(models)
|
||||
build_platform_names = [x.name for x in build_platforms]
|
||||
build_repositories = []
|
||||
if command_line.repository:
|
||||
|
@ -419,11 +482,11 @@ def publish():
|
|||
models = Models(domain, login, password)
|
||||
for task_id in command_line.task_ids:
|
||||
try:
|
||||
bl = models.buildlists[task_id]
|
||||
bl = BuildList(models, task_id)
|
||||
if bl.status != 0:
|
||||
log.error("The status of build task %s is \"%s\", can not publish it!" % (bl.id, bl.status_by_id[bl.status]))
|
||||
log.error("The status of build task %s is \"%s\", can not publish it!" % (bl.id, bl.status_string))
|
||||
continue
|
||||
res = bl.publish(models)
|
||||
res = bl.publish()
|
||||
except AbfApiException, ex:
|
||||
log.error('Could not publish task %s: %s' %(task_id, str(ex)))
|
||||
|
||||
|
@ -435,22 +498,121 @@ def buildstatus():
|
|||
exit(1)
|
||||
try:
|
||||
models = Models(domain, login, password)
|
||||
bl = models.buildlists[command_line.ID]
|
||||
bl = BuildList(models, command_line.ID)
|
||||
except AbfApiException, ex:
|
||||
log.error(str(ex))
|
||||
exit(3)
|
||||
print '%-20s%s' %('Owner:', bl.owner['name'])
|
||||
print '%-20s%s' %('Status:', BuildList.status_by_id[bl.status])
|
||||
print '%-20s%s' %('Owner:', bl.owner.name)
|
||||
print '%-20s%s' %('Status:', bl.status_string)
|
||||
print '%-20s%s' %('Build for platform:', bl.build_for_platform)
|
||||
print '%-20s%s' %('Save to repository:', bl.save_to_repository)
|
||||
print '%-20s%s' %('Build repositories:', bl.include_repos)
|
||||
print '%-20s%s' %('Architecture:', bl.arch.name)
|
||||
print '%-20s%s' %('Created at:', datetime.fromtimestamp(float(bl.created_at)))
|
||||
print '%-20s%s' %('Updated at:', datetime.fromtimestamp(float(bl.updated_at)))
|
||||
print '%-20s%s' %('Created at:', bl.created_at)
|
||||
print '%-20s%s' %('Updated at:', bl.updated_at)
|
||||
|
||||
def _update_location(path=None):
|
||||
try:
|
||||
if not path:
|
||||
path = os.getcwd()
|
||||
log.debug("Updating project location for %s" % path)
|
||||
group, name = get_project_name(path)
|
||||
if group:
|
||||
proj = '%s/%s' % (group, name)
|
||||
projects_cfg[proj]['location'] = path
|
||||
log.info("Project %s has been located in %s" % (proj, path))
|
||||
except:
|
||||
pass
|
||||
|
||||
def _update_location_recursive(path):
|
||||
items = os.listdir(path)
|
||||
if '.git' in items: # it's a git directory!
|
||||
_update_location(path)
|
||||
return
|
||||
|
||||
for item in items:
|
||||
item_path = os.path.join(path, item)
|
||||
if not os.path.isdir(item_path) or os.path.islink(item_path):
|
||||
continue
|
||||
_update_location_recursive(item_path)
|
||||
|
||||
|
||||
def locate():
|
||||
log.debug('LOCATE started')
|
||||
|
||||
if not command_line.action: # show location
|
||||
if not command_line.project:
|
||||
print "To show a project location, you have to specify a project name ('-p' option)"
|
||||
return
|
||||
|
||||
tmp = command_line.project.split('/')
|
||||
if len(tmp) > 2:
|
||||
log.error('error: the project format is "[owner_name/]project_name"')
|
||||
exit(1)
|
||||
elif len(tmp) == 1:
|
||||
proj = '%s/%s' % (default_group, tmp[0])
|
||||
else: # len == 2
|
||||
proj = command_line.project
|
||||
|
||||
if proj not in projects_cfg or 'location' not in projects_cfg[proj] or not projects_cfg[proj]['location']:
|
||||
print 'error: project %s can not be located' % proj
|
||||
exit(1)
|
||||
path = projects_cfg[proj]['location']
|
||||
if not os.path.isdir(path):
|
||||
print 'error: project is not located in "%s" anymore' % path
|
||||
projects_cfg[proj]['location'] = ''
|
||||
exit(1)
|
||||
print path
|
||||
return
|
||||
else:
|
||||
if command_line.action == 'update':
|
||||
path = command_line.directory or os.getcwd()
|
||||
_update_location(path)
|
||||
elif command_line.action == 'update-recursive':
|
||||
path = command_line.directory or os.getcwd()
|
||||
_update_location_recursive(path)
|
||||
|
||||
def show():
|
||||
log.debug('SHOW started')
|
||||
Log.set_silent()
|
||||
t = command_line.type
|
||||
models = Models(domain, login, password)
|
||||
|
||||
if t in ['build-platforms', 'build-repos']:
|
||||
build_platforms = Platform.get_build_platforms(models)
|
||||
platform_names = []
|
||||
repo_names = []
|
||||
for plat in build_platforms:
|
||||
if plat.repositories:
|
||||
platform_names.append(plat.name)
|
||||
for repo in plat.repositories:
|
||||
repo_names.append(str(repo))
|
||||
out = (t == 'build-platforms' and platform_names) or (t == 'build-repos' and repo_names)
|
||||
|
||||
if t in ['save-to-platforms', 'save-to-repos']:
|
||||
proj = get_project(models, must_exist=True, name=command_line.project)
|
||||
repos = proj.repositories
|
||||
platform_names = []
|
||||
repo_names = []
|
||||
for repo in repos:
|
||||
platform_names.append(repo.platform.name)
|
||||
repo_names.append(str(repo))
|
||||
platform_names = list(set(platform_names))
|
||||
out = (t == 'save-to-platforms' and platform_names) or (t == 'save-to-repos' and repo_names)
|
||||
print ' '.join(out)
|
||||
|
||||
|
||||
|
||||
|
||||
def clean():
|
||||
log.debug("CLEAN started")
|
||||
_update_location()
|
||||
find_spec_problems(auto_remove=command_line.auto_remove)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parse_command_line()
|
||||
|
||||
if command_line.verbose:
|
||||
Log.set_verbose()
|
||||
if command_line.quiet:
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import ConfigParser
|
||||
import os
|
||||
from abf.console.misc import mkdirs, ask_user
|
||||
import sys
|
||||
import getpass
|
||||
|
||||
#####################################################
|
||||
# USAGE:
|
||||
|
@ -16,6 +17,36 @@ from abf.console.misc import mkdirs, ask_user
|
|||
|
||||
VERSION = 1
|
||||
|
||||
def mkdirs(path):
|
||||
''' the equivalent of mkdir -p path'''
|
||||
if os.path.exists(path):
|
||||
return
|
||||
path = os.path.normpath(path)
|
||||
items = path.split('/')
|
||||
p = ''
|
||||
for item in items:
|
||||
p += '/' + item
|
||||
if not os.path.isdir(p):
|
||||
os.mkdir(p)
|
||||
|
||||
def ask_user(prompt, can_be_empty=False, variants=None):
|
||||
while True:
|
||||
sys.stdout.write(prompt)
|
||||
sys.stdout.flush()
|
||||
res = sys.stdin.readline()
|
||||
res = res.strip()
|
||||
if not can_be_empty and not res:
|
||||
continue
|
||||
|
||||
if variants:
|
||||
if res in variants:
|
||||
break
|
||||
else:
|
||||
continue
|
||||
break
|
||||
|
||||
return res
|
||||
|
||||
class Section(dict):
|
||||
def __init__(self, config, conf_path, section):
|
||||
self.section = section
|
||||
|
@ -55,8 +86,9 @@ class Section(dict):
|
|||
class Config(dict):
|
||||
default_url = 'https://abf.rosalinux.ru'
|
||||
default_log_path = '/var/log/abf.log'
|
||||
def __init__(self, conf_path='~/.abfcfg'):
|
||||
def __init__(self, conf_path='~/.abfcfg', main_conf=True):
|
||||
self.conf_path = os.path.expanduser(conf_path)
|
||||
self.main_conf = main_conf
|
||||
init = False
|
||||
if not os.path.isfile(self.conf_path):
|
||||
mkdirs(os.path.dirname(self.conf_path))
|
||||
|
@ -76,11 +108,11 @@ class Config(dict):
|
|||
self[section][opt] = self.config.get(section, opt)
|
||||
|
||||
|
||||
if 'config_version' not in self['main'] or int(self['main']['config_version']) != VERSION:
|
||||
if main_conf and ('config_version' not in self['main'] or int(self['main']['config_version']) != VERSION):
|
||||
print "Sorry, but configuration schema have been changed or config file have been corrupted, so you need to reinitialize the configuration."
|
||||
init = True
|
||||
|
||||
if init:
|
||||
if init and main_conf:
|
||||
self.first_start()
|
||||
|
||||
|
||||
|
@ -121,10 +153,12 @@ class Config(dict):
|
|||
|
||||
self['main']['domain'] = domain
|
||||
|
||||
user = ask_user('User: ', can_be_empty=False)
|
||||
self['user']['login'] = user
|
||||
user_default = getpass.getuser()
|
||||
user = ask_user('User [%s]: ' % user_default, can_be_empty=True)
|
||||
self['user']['login'] = user or user_default
|
||||
|
||||
password = ask_user('Password: ', can_be_empty=False)
|
||||
#password = ask_user('Password: ', can_be_empty=False)
|
||||
password = getpass.getpass()
|
||||
self['user']['password'] = password
|
||||
|
||||
git_uri = "%(protocol)s//%(user)s@%(domain)s" % \
|
||||
|
@ -175,7 +209,8 @@ class Config(dict):
|
|||
|
||||
self['main']['config_version'] = VERSION
|
||||
print('Initial configuration have been completed')
|
||||
exit()
|
||||
print 'Now you can execute "abf locate update-recursive PATH", where PATH is your directory with ' + \
|
||||
'cloned ABF projects. It will let you use "abfcd <project>" command to simply cd to project directory.\n\n'
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -18,6 +18,11 @@ class Log:
|
|||
logging.getLogger("abf").handlers[0].setLevel(logging.ERROR)
|
||||
logging.getLogger("models").handlers[0].setLevel(logging.ERROR)
|
||||
|
||||
@staticmethod
|
||||
def set_silent():
|
||||
Log.set_verbose()
|
||||
logging.getLogger("").handlers[0].setLevel(logging.CRITICAL)
|
||||
|
||||
def __init__(self, name=''):
|
||||
logging.config.fileConfig(os.path.expanduser('~/.abfcfg'))
|
||||
self.log = logging.getLogger(name)
|
||||
|
|
|
@ -4,41 +4,17 @@ import time
|
|||
import select
|
||||
import subprocess
|
||||
import fcntl
|
||||
import rpm
|
||||
|
||||
from glob import glob
|
||||
import shutil
|
||||
import re
|
||||
import yaml
|
||||
import tempfile
|
||||
|
||||
def mkdirs(path):
|
||||
''' the equivalent of mkdir -p path'''
|
||||
if os.path.exists(path):
|
||||
return
|
||||
path = os.path.normpath(path)
|
||||
items = path.split('/')
|
||||
p = ''
|
||||
for item in items:
|
||||
p += '/' + item
|
||||
if not os.path.isdir(p):
|
||||
os.mkdir(p)
|
||||
from abf.console.log import Log
|
||||
log = Log('models')
|
||||
|
||||
def ask_user(prompt, can_be_empty=False, variants=None):
|
||||
while True:
|
||||
sys.stdout.write(prompt)
|
||||
sys.stdout.flush()
|
||||
res = sys.stdin.readline()
|
||||
res = res.strip()
|
||||
if not can_be_empty and not res:
|
||||
continue
|
||||
|
||||
if variants:
|
||||
if res in variants:
|
||||
break
|
||||
else:
|
||||
continue
|
||||
break
|
||||
|
||||
return res
|
||||
|
||||
class CommandTimeoutExpired(Exception):
|
||||
pass
|
||||
|
@ -48,9 +24,9 @@ class ReturnCodeNotZero(Exception):
|
|||
super(ReturnCodeNotZero, self).__init__(message)
|
||||
self.code = code
|
||||
|
||||
def get_project_name():
|
||||
def get_project_name(path=None):
|
||||
try:
|
||||
output = execute_command(['git', 'remote', 'show', 'origin', '-n'])
|
||||
output = execute_command(['git', 'remote', 'show', 'origin', '-n'], cwd=path)
|
||||
|
||||
for line in output.split('\n'):
|
||||
if line.startswith(' Fetch URL:'):
|
||||
|
@ -63,6 +39,7 @@ def get_project_name():
|
|||
|
||||
def get_project_name_version(spec_path):
|
||||
try:
|
||||
rpm = __import__('rpm') # it's initialization is too long to place it to the top of the file
|
||||
ts = rpm.TransactionSet()
|
||||
rpm_spec = ts.parseSpec(spec_path)
|
||||
name = rpm.expandMacro("%{name}")
|
||||
|
@ -71,6 +48,25 @@ def get_project_name_version(spec_path):
|
|||
except:
|
||||
return None
|
||||
|
||||
def get_project_data(spec_path):
|
||||
rpm = __import__('rpm') # it's initialization is too long to place it to the top of the file
|
||||
ts = rpm.TransactionSet()
|
||||
rpm_spec = ts.parseSpec(spec_path)
|
||||
name = rpm.expandMacro("%{name}")
|
||||
version = rpm.expandMacro("%{version}")
|
||||
sources_all = rpm_spec.sources()
|
||||
|
||||
sources = []
|
||||
patches = []
|
||||
for src in sources_all:
|
||||
name, number, flag = src
|
||||
if flag & 65536: # source file
|
||||
sources.append((name, number))
|
||||
elif flag & 131072:
|
||||
patches.append((name, number))
|
||||
return {'name': name, 'version': version, 'sources': sources, 'patches': patches}
|
||||
|
||||
|
||||
def get_branch_name():
|
||||
try:
|
||||
output = execute_command(['git', 'branch'])
|
||||
|
@ -115,13 +111,12 @@ def get_tag_hash(tag, cwd=None):
|
|||
return h
|
||||
return None
|
||||
|
||||
def clone_git_repo_tmp(uri, log=None, depth=None):
|
||||
if log:
|
||||
log.info('Cloning git repository (temporary workaround)')
|
||||
def clone_git_repo_tmp(uri, depth=None):
|
||||
log.info('Cloning git repository (temporary workaround)')
|
||||
tmp_dir = tempfile.mkdtemp(prefix='tmp_abf_')
|
||||
log.info("Temporary directory os " + tmp_dir)
|
||||
cmd = ['git', 'clone', uri, tmp_dir]
|
||||
execute_command(cmd, log=log, print_to_stdout=True, exit_on_error=True)
|
||||
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
|
||||
return tmp_dir
|
||||
|
||||
|
||||
|
@ -139,15 +134,117 @@ def get_root_git_dir(path=None):
|
|||
else:
|
||||
return p
|
||||
|
||||
def pack_project(log, root_path):
|
||||
# look for a spec file
|
||||
def get_spec_file(root_path):
|
||||
specs = glob(os.path.join(root_path, '*.spec'))
|
||||
log.debug("Spec files found: " + str(specs))
|
||||
if len(specs) == 1:
|
||||
spec = specs[0]
|
||||
return spec
|
||||
else:
|
||||
log.error("Could not find single spec file")
|
||||
return
|
||||
raise Excpetion("Could not find single spec file")
|
||||
|
||||
def find_spec_problems(exit_on_error=True, strict=False, auto_remove=False):
|
||||
path = get_root_git_dir()
|
||||
files = os.listdir(path)
|
||||
|
||||
files_present = []
|
||||
specs_present = []
|
||||
dirs_present = []
|
||||
yaml_files = []
|
||||
for fl in files:
|
||||
if fl.startswith('.'):
|
||||
continue
|
||||
if os.path.isdir(fl):
|
||||
dirs_present.append(fl)
|
||||
continue
|
||||
if fl.endswith('.spec'):
|
||||
specs_present.append(fl)
|
||||
continue
|
||||
files_present.append(fl)
|
||||
|
||||
yaml_path = os.path.join(path, '.abf.yml')
|
||||
if os.path.isfile(yaml_path):
|
||||
with open(yaml_path, 'r') as fd:
|
||||
yaml_data = yaml.load(fd)
|
||||
if not 'sources' in yaml_data:
|
||||
log.error("Incorrect .abf.yml file: no 'sources' key")
|
||||
exit(1)
|
||||
for fl in yaml_data['sources']:
|
||||
yaml_files.append(fl)
|
||||
|
||||
if len(specs_present) == 0:
|
||||
raise Exception("No spec files found!")
|
||||
elif len(specs_present) > 1:
|
||||
raise Exception("There are more than one found!")
|
||||
|
||||
spec_path = specs_present[0]
|
||||
|
||||
for d in dirs_present:
|
||||
log.info("warning: directory '%s' was found" % d)
|
||||
if auto_remove:
|
||||
shutil.rmtree(os.path.join(path,d) )
|
||||
|
||||
res = get_project_data(spec_path)
|
||||
|
||||
errors = False
|
||||
warnings = False
|
||||
files_required = []
|
||||
for fl in res['sources'] + res['patches']:
|
||||
fname, n = fl
|
||||
fname_base = os.path.basename(fname)
|
||||
|
||||
files_required.append(fname_base)
|
||||
|
||||
is_url = fname.startswith('http://')
|
||||
presents = fname_base in files_present
|
||||
in_yaml = fname_base in yaml_files
|
||||
|
||||
if is_url and in_yaml:
|
||||
warnings = True
|
||||
log.info('warning: file "%s" presents in spec (url) and in .abf.yml' % fname_base)
|
||||
|
||||
if is_url and not presents:
|
||||
warnings = True
|
||||
log.info('warning: file "%s" is listed in spec as a URL, but does not present in the current directory or in .abf.yml file' % fname_base)
|
||||
|
||||
if presents and in_yaml:
|
||||
warnings = True
|
||||
log.info('warning: file "%s" presents in the git directory and in .abf.yml' % fname_base)
|
||||
|
||||
if not presents and not in_yaml and not is_url:
|
||||
errors = True
|
||||
log.info("error: missing file %s" % fname)
|
||||
|
||||
remove_from_yaml = []
|
||||
for fl in set(files_present + yaml_files):
|
||||
if fl in files_required:
|
||||
continue # file have already been processed
|
||||
presents = fl in files_present
|
||||
in_yaml = fl in yaml_files
|
||||
if presents:
|
||||
warnings = True
|
||||
log.info('warning: unnecessary file "%s"' % fl)
|
||||
if auto_remove:
|
||||
os.remove( os.path.join(path, fl) )
|
||||
|
||||
if in_yaml:
|
||||
warnings = True
|
||||
log.info('warning: unnecessary file "%s" in .abf.yml' % fl)
|
||||
remove_from_yaml.append(fl)
|
||||
|
||||
if auto_remove:
|
||||
for fl in remove_from_yaml:
|
||||
yaml_data['sources'].pop(fl)
|
||||
with open(yaml_path, 'w') as fd:
|
||||
yaml.dump(yaml_data, fd, default_flow_style=False)
|
||||
log.info('.abf.yml file was rewritten')
|
||||
|
||||
if exit_on_error and (errors or (strict and warnings)):
|
||||
exit(1)
|
||||
|
||||
def pack_project(root_path):
|
||||
# look for a spec file
|
||||
spec = get_spec_file(root_path)
|
||||
|
||||
if spec:
|
||||
name, version = get_project_name_version(spec)
|
||||
|
@ -167,7 +264,7 @@ def pack_project(log, root_path):
|
|||
#open(full_tarball_path, 'w').close()
|
||||
cmd = ['tar', 'czf', full_tarball_path, '--exclude-vcs', os.path.basename(root_path)]
|
||||
try:
|
||||
execute_command(cmd, log=log, cwd=os.path.dirname(root_path), exit_on_error=False)
|
||||
execute_command(cmd, cwd=os.path.dirname(root_path), exit_on_error=False)
|
||||
except ReturnCodeNotZero, ex:
|
||||
if ex.code != 1:
|
||||
raise
|
||||
|
@ -188,13 +285,12 @@ def pack_project(log, root_path):
|
|||
|
||||
|
||||
|
||||
def execute_command(command, log=None, shell=False, cwd=None, timeout=0, raiseExc=True, print_to_stdout=False, exit_on_error=False):
|
||||
def execute_command(command, shell=False, cwd=None, timeout=0, raiseExc=True, print_to_stdout=False, exit_on_error=False):
|
||||
output = ""
|
||||
start = time.time()
|
||||
try:
|
||||
child = None
|
||||
if log:
|
||||
log.debug("Executing command: %s" % command)
|
||||
log.debug("Executing command: %s" % command)
|
||||
child = subprocess.Popen(
|
||||
command,
|
||||
shell=shell,
|
||||
|
@ -230,8 +326,7 @@ def execute_command(command, log=None, shell=False, cwd=None, timeout=0, raiseEx
|
|||
if not niceExit and raiseExc:
|
||||
raise CommandTimeoutExpired("Timeout(%s) expired for command:\n # %s\n%s" % (timeout, command, output))
|
||||
|
||||
if log:
|
||||
log.debug("Child returncode was: %s" % str(child.returncode))
|
||||
log.debug("Child returncode was: %s" % str(child.returncode))
|
||||
if child.returncode:
|
||||
if exit_on_error:
|
||||
exit(child.returncode)
|
||||
|
|
14
abfcd.sh
Normal file
14
abfcd.sh
Normal file
|
@ -0,0 +1,14 @@
|
|||
abfcd()
|
||||
{
|
||||
if [ $2 ] || [ -z $1 ] ; then
|
||||
echo "Syntax: abfcd [group/]project"
|
||||
return 1
|
||||
fi
|
||||
output=`abf locate -p $1`
|
||||
if [[ $output == error* ]] || [[ -z $output ]] ; then
|
||||
echo $output;
|
||||
return;
|
||||
fi
|
||||
|
||||
cd $output
|
||||
}
|
|
@ -2,7 +2,7 @@
|
|||
# bash-completion add-on for rpmlint
|
||||
# http://bash-completion.alioth.debian.org/
|
||||
|
||||
_opts()
|
||||
__abf_opts()
|
||||
{
|
||||
|
||||
if [[ ${cur} == -* ]] ; then
|
||||
|
@ -11,71 +11,133 @@ _opts()
|
|||
fi
|
||||
}
|
||||
|
||||
_get()
|
||||
__abf_get()
|
||||
{
|
||||
_opts "--branch"
|
||||
__abf_opts "--branch"
|
||||
}
|
||||
|
||||
_put()
|
||||
__abf_put()
|
||||
{
|
||||
_opts ""
|
||||
__abf_opts ""
|
||||
}
|
||||
|
||||
_build()
|
||||
__abf_show()
|
||||
{
|
||||
_opts "--branch --tag --target-platform --arches --repository"
|
||||
__abf_opts "--project"
|
||||
shows="build-repos build-platforms save-to-repos save-to-platforms"
|
||||
if [[ ${cur} != -* ]] ; then
|
||||
if [[ ${prev} == -* ]] ; then
|
||||
return 0;
|
||||
fi
|
||||
COMPREPLY=( $(compgen -W "${shows}" -- "${cur}") )
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
_backport()
|
||||
__abf_locate()
|
||||
{
|
||||
_opts "--pack"
|
||||
__abf_opts "--project --directory"
|
||||
actions="update update-recursive"
|
||||
if [[ ${cur} != -* ]] ; then
|
||||
if [[ ${prev} == -* ]] ; then
|
||||
return 0;
|
||||
fi
|
||||
COMPREPLY=( $(compgen -W "${actions}" -- "${cur}") )
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
_buildstatus()
|
||||
__abf_build()
|
||||
{
|
||||
_opts "--logs"
|
||||
__abf_opts "--branch --tag --commit --target-platform --arch --repository --save-to-repository --auto-publish --update-type --skip-spec-check"
|
||||
update_types="security bugfix enhancement recommended newpackage"
|
||||
if [ ${prev} == -r ] || [ ${prev} == --repository ] ; then
|
||||
COMPREPLY=( $(compgen -W "`abf-local show build-repos`" -- "${cur}") )
|
||||
return 0
|
||||
fi
|
||||
if [ ${prev} == -s ] || [ ${prev} == --save-to-repository ] ; then
|
||||
proj=""
|
||||
next=0
|
||||
for i in ${COMP_WORDS[@]}
|
||||
do
|
||||
|
||||
if [[ $next == 1 ]] ; then
|
||||
proj=$i;
|
||||
next=0;
|
||||
fi
|
||||
if [[ "$i" == "-p" || "$i" == "--project" ]] ; then
|
||||
next=1;
|
||||
fi;
|
||||
|
||||
done
|
||||
|
||||
if [ -n "${proj}" ] ; then
|
||||
COMPREPLY=( $(compgen -W "`abf-local show save-to-repos -p ${proj}`" -- "${cur}") )
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [ ${prev} == --update-type ] ; then
|
||||
|
||||
COMPREPLY=( $(compgen -W "${update_types}" -- "${cur}") )
|
||||
return 0
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
__abf_publish()
|
||||
{
|
||||
__abf_opts "--pack"
|
||||
}
|
||||
|
||||
__abf_backport()
|
||||
{
|
||||
__abf_opts "--pack"
|
||||
|
||||
|
||||
if [[ ${cur} != -* ]] ; then
|
||||
branches=`git branch --no-color | sed 's/^..//' | xargs echo`
|
||||
COMPREPLY=( $(compgen -W "${branches}" -- "${cur}") )
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
__abf_buildstatus()
|
||||
{
|
||||
__abf_opts ""
|
||||
}
|
||||
|
||||
__abf_help()
|
||||
{
|
||||
__abf_opts ""
|
||||
}
|
||||
|
||||
|
||||
_abf()
|
||||
|
||||
__abf()
|
||||
{
|
||||
local opts modes
|
||||
modes="help get put build backport buildstatus"
|
||||
modes="help get put show build publish backport buildstatus locate"
|
||||
COMPREPLY=()
|
||||
mode="${COMP_WORDS[1]}"
|
||||
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
|
||||
if [ "$COMP_CWORD" == "1" ]; then
|
||||
if [ "$COMP_CWORD" == "1" ] || ( [ "$COMP_CWORD" == "2" ] && [ "$mode" == "help" ] ); then
|
||||
COMPREPLY=( $(compgen -W "${modes}" -- ${cur}) )
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [ "$mode" == "get" ]; then
|
||||
_get
|
||||
return 0
|
||||
fi
|
||||
if [ "$mode" == "put" ]; then
|
||||
_put
|
||||
return 0
|
||||
fi
|
||||
if [ "$mode" == "build" ]; then
|
||||
_build
|
||||
return 0
|
||||
fi
|
||||
if [ "$mode" == "backport" ]; then
|
||||
_backport
|
||||
return 0
|
||||
fi
|
||||
if [ "$mode" == "buildstatus" ]; then
|
||||
_buildstatus
|
||||
return 0
|
||||
fi
|
||||
|
||||
for i in ${modes}
|
||||
do
|
||||
if [[ $i == $mode ]] ; then
|
||||
eval __abf_${i};
|
||||
fi
|
||||
done
|
||||
|
||||
}
|
||||
|
||||
|
||||
complete -F _abf abf
|
||||
complete -F __abf abf
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue