abf-console-client-src/abf.py

1058 lines
44 KiB
Python
Raw Normal View History

2012-09-18 13:26:44 +04:00
#!/usr/bin/python -tt
# -*- coding: UTF-8 -*-
import sys
import argparse
from argparse import RawDescriptionHelpFormatter
2012-09-18 13:26:44 +04:00
import os
2012-09-19 13:38:52 +04:00
import shutil
2012-12-11 17:08:55 +04:00
import platform
from glob import glob
2013-05-17 12:11:20 +04:00
import shlex
2012-09-18 13:26:44 +04:00
2012-12-11 17:08:55 +04:00
from abf.console.config import Config, mkdirs
2012-09-18 13:26:44 +04:00
from abf.console.log import Log
cfg = Config()
2012-10-25 15:26:22 +04:00
projects_cfg = Config(conf_path='~/.abf_projects', main_conf=False)
2012-09-18 13:26:44 +04:00
log = Log('abf')
from abf.console.misc import *
from abf.api.exceptions import *
from abf.model import *
2012-09-18 13:26:44 +04:00
2012-11-14 15:04:19 +04:00
abf_url = cfg['main']['abf_url']
file_store_url = cfg['main']['file_store_url']
2012-09-18 13:26:44 +04:00
login = cfg['user']['login']
password = cfg['user']['password']
2012-09-25 16:21:45 +04:00
default_group = cfg['user']['default_group']
default_build_platform = cfg['user']['default_build_platform']
2012-11-14 15:04:19 +04:00
models_params = ((abf_url, file_store_url, login, password))
2012-09-18 13:26:44 +04:00
2012-11-14 15:04:19 +04:00
models = Models(*models_params)
2012-12-11 17:08:55 +04:00
configs_dir = '/etc/abf/mock-urpm/configs/'
2012-11-23 14:20:54 +04:00
def test():
log.debug('TEST started')
pls_import_personal = Platform.search(models, 'import_personal')
grs_import = Group.search(models, 'import')
prs_abfcc = Project.search(models, 'abf-console-client')
uss_akirilenko = User.search(models, 'akirilenko')
assert pls_import_personal
assert grs_import
assert prs_abfcc
assert uss_akirilenko
assert pls_import_personal[0].repositories[0].platform == pls_import_personal[0]
# check last items
assert Platform(models, ID=pls_import_personal[0].id).name == pls_import_personal[0].name
assert Group(models, ID=grs_import[0].id).uname == grs_import[0].uname
assert Project(models, ID=prs_abfcc[0].id).name == prs_abfcc[0].name
assert User(models, ID=uss_akirilenko[0].id).uname == uss_akirilenko[0].uname
# make models load the whole object
pls_import_personal[0].description
grs_import[0].description
prs_abfcc[0].description
uss_akirilenko[0].professional_experience
pr_abfcc = Project.get_by_name(models, 'akirilenko/abf-console-client')
assert pr_abfcc in prs_abfcc
#bl = BuildList(models, ID=750988)
Platform.get_user_platforms_main(models)
Platform.get_user_platforms_personal(models)
Platform.get_build_platforms(models)
arches = Arch.get_arches(models)
arch_x86_64 = Arch.get_arch_by_name(models, 'x86_64')
assert arch_x86_64 in arches
log.info('Datamodel seems to work fine')
2012-10-11 18:13:59 +04:00
2012-09-18 13:26:44 +04:00
2013-05-17 12:11:20 +04:00
def apply_aliases():
# check if the current command is 'alias'
if 'alias' in sys.argv:
ind = sys.argv.index('alias')
found = False
for i in range(1, ind):
if sys.argv[i] not in ['-h', '-v', '--help', '--verbose', 'help']:
found = True
if not found:
return
for alias_name in cfg['alias']:
alias = shlex.split(cfg['alias'][alias_name])
if alias_name in sys.argv:
ind = sys.argv.index(alias_name)
del sys.argv[ind]
for item in alias:
sys.argv.insert(ind, item)
ind += 1
2012-09-18 13:26:44 +04:00
def parse_command_line():
global command_line
2012-09-18 13:44:38 +04:00
parser = argparse.ArgumentParser(description='ABF Console Client')
parser.add_argument('-v', '--verbose', action='store_true', help='be verbose, display even debug messages')
parser.add_argument('-c', '--clear-cache', action='store_true', help='clear cached information about repositories, platforms, projects, etc.')
2012-10-11 18:13:59 +04:00
parser.add_argument('-q', '--quiet', action='store_true', help='Do not display info messages')
2012-10-25 18:26:51 +04:00
subparsers = parser.add_subparsers(title='command')
2012-09-18 13:26:44 +04:00
2012-09-19 14:02:56 +04:00
# help
2012-10-25 15:26:22 +04:00
parser_help = subparsers.add_parser('help', help='show a help for command')
parser_help.add_argument('command', action='store', nargs='?', help='a command to show help for')
parser_help.set_defaults(func=help)
2012-09-19 14:02:56 +04:00
2013-05-17 12:11:20 +04:00
# alias
parser_alias = subparsers.add_parser('alias', help='Manage aliases')
alias_commands = ['list', 'add', 'remove']
parser_alias.add_argument('command', action='store', choices=alias_commands)
parser_alias.add_argument('options', action='store', nargs='*', help='name and alias (not quoted, e. g. "abf alias add sg search groups") for adding, only name for removing.')
parser_alias.set_defaults(func=alias)
2012-09-18 13:26:44 +04:00
# get
2012-09-18 13:44:38 +04:00
parser_get = subparsers.add_parser('get', help='clone a project from ABF')
parser_get.add_argument('project', action='store', help='project name. ([group/]project). If no group specified, '
'it\'s assumed to be your default group.')
2012-09-18 13:26:44 +04:00
parser_get.add_argument('-b', '--branch', action='store', help='branch to checkout')
parser_get.set_defaults(func=get)
# put
2013-05-17 12:11:20 +04:00
parser_put = subparsers.add_parser('put', help='Upload large binary files to File-Store and update (or create) .abf.yml file. Can also commit and push changes.')
parser_put.add_argument('-m', '--message', action='store', help='With this option specified, "git add --all", "git commit -m MSG" and "git push" will be executed.')
parser_put.add_argument('-s', '--minimal-file-size', default='0', action='store', help='The minimal file size to upload to File-Store. '
'Default is 0B.')
2013-05-17 12:11:20 +04:00
parser_put.add_argument('-n', '--do-not-remove-files', action='store_true', help='By default files are being removed on uploading. Override this behavior.')
parser_put.add_argument('-u', '--upload-only', action='store_true', help='Deprecated! Affects nothing. Saved for compatibility reasons and will be removed later.')
2012-10-25 15:26:22 +04:00
parser_put.set_defaults(func=put)
2013-05-17 12:11:20 +04:00
# store
parser_store = subparsers.add_parser('store', help='Upload a given file to File-Store. Prints a sha1 hash or error message (with non-zero return code).')
parser_store.add_argument('path', action='store', help='Path to file')
parser_store.set_defaults(func=store)
2012-11-14 15:04:19 +04:00
# fetch
parser_fetch = subparsers.add_parser('fetch', help='Download all the files listed in .abf.yml from File-Store to local directory.')
2012-11-23 14:20:54 +04:00
parser_fetch.add_argument('-o', '--only', action='append', help='Limit the list of downloaded files to this file name(s). This option can be specified more than once.')
2012-11-14 15:04:19 +04:00
parser_fetch.set_defaults(func=fetch)
2012-10-25 15:26:22 +04:00
# show
parser_show = subparsers.add_parser('show', help='show some general information. Bash autocomplete uses it.')
show_choices = ['build-repos', 'build-platforms', 'save-to-repos', 'save-to-platforms']
parser_show.add_argument('type', action='store', choices=show_choices,help='The type of information to show')
parser_show.add_argument('-p', '--project', action='store', help='Project to show information for (if needed). Format: '
'"[group/]name". If no group specified, default group will be used.')
parser_show.set_defaults(func=show)
# locate
2012-10-26 12:14:30 +04:00
parser_locate = subparsers.add_parser('locate', help='tool can remember the project location and use it for some reasons (abfcd, etc.).',
epilog='Every interaction with git repository (build, get, put, etc.) updates the cached location of the project (overriding '
'an existing one if needed). For any cached project you can execute "abfcd <project>" and you will cd to the project directory.')
2012-10-25 15:26:22 +04:00
locate_choices = ['update', 'update-recursive']
parser_locate.add_argument('action', action='store', choices=locate_choices, nargs='?', help='The type of information to show')
parser_locate.add_argument('-p', '--project', action='store', help='Project to show information for (if needed). Format: '
'"[group/]name". If no group specified, default group will be used.')
parser_locate.add_argument('-d', '--directory', action='store', help='Directory to update locations for. It should be a '
'git repository for "update" and any directory for "update-recursive". If not specified - the current directory will be used')
parser_locate.set_defaults(func=locate)
2012-09-18 13:26:44 +04:00
# build
parser_build = subparsers.add_parser('build', help='Initiate a build task on ABF.', formatter_class=RawDescriptionHelpFormatter,
epilog= 'NOTES:\n'
'API takes git commit hash to build. So client have to resolve it.\n'
'1) If you\'ve specified commit hash - it will be used "as is".\n'
'2) If you\'ve specified branch or tag name - it will be resolved automatically\n'
'using ABF API. (the hash of top commit will be used for branch)\n'
'3) If you\'ve specified no git commit related options and you\'ve\n'
' specified a project name - this project\'s default branch will be used.\n'
'4) If you\'ve specified no git commit related options and you\'ve\n'
'not specified a project name (you have to be in a git repository) -\n'
'the top remote commit of your current branch will be used.\n')
parser_build.add_argument('-p', '--project', action='store', help='project name ([group/]project). If no group '
'specified, it is assumed to be your default group. If the option is not specified and you are in a git '
'repository directory - resolve a project name from it.')
2012-10-11 18:13:59 +04:00
parser_build.add_argument('-b', '--branch', action='store', help='branch to build.')
parser_build.add_argument('-t', '--tag', action='store', help='tag to build.')
parser_build.add_argument('-c', '--commit', action='store', help='commit sha hash to build.')
parser_build.add_argument('-s', '--save-to-repository', action='store', help='repository to save results to '
'([platform/]repository). If no platform part specified, it is assumed to be "<default_group>_personal". '
'If this option is not specified at all, "<default_group>_personal/main" will be used.')
parser_build.add_argument('-a', '--arch', action='append', help='architectures to build, '
'can be set more than once. If not set - use all the available architectures.')
parser_build.add_argument('-r', '--repository', action='append', help='repositories to build with ([platform/]repository). '
2012-11-14 18:29:38 +04:00
'Can be set more than once. If no platform part specified, it is assumed to be your "<default_build_platform>".'
' If no repositories were specified at all, use the "main" repository from save-to platform.')
parser_build.add_argument('--auto-publish', action='store_true', help='enable automatic publishing.')
2013-05-29 15:33:07 +04:00
parser_build.add_argument('--skip-personal', action='store_true', help='do not use personal repository to resolve dependencies.')
2012-10-11 18:13:59 +04:00
upd_types = ['security', 'bugfix', 'enhancement', 'recommended', 'newpackage']
parser_build.add_argument('--update-type', action='store', choices=upd_types, help='Update type. Default is "%s".' %
2012-10-11 18:13:59 +04:00
(BuildList.update_types[0]) )
2012-10-25 15:26:22 +04:00
parser_build.add_argument('--skip-spec-check', action='store_true', help='Do not check spec file.' )
2012-09-18 13:26:44 +04:00
parser_build.set_defaults(func=build)
2012-12-11 17:08:55 +04:00
# mock-urpm
2012-12-13 14:58:28 +04:00
parser_mock_urpm = subparsers.add_parser('mock-urpm', help='Build a project locally using mock-urpm.', epilog='No checkouts will be made,'
'the current git repository state will be used')
2012-12-11 17:08:55 +04:00
parser_mock_urpm.add_argument('-c', '--config', action='store', help='A config template to use. Specify owne of the config names '
'from %s. Directory path should be omitted. If no config specified, "default.cfg" will be used' % configs_dir)
parser_mock_urpm.set_defaults(func=localbuild_mock_urpm)
# rpmbuild
2012-12-13 14:58:28 +04:00
parser_rpmbuild = subparsers.add_parser('rpmbuild', help='Build a project locally using rpmbuild.', epilog='No checkouts will be made,'
2012-12-11 17:08:55 +04:00
'the current git repository state will be used')
2012-12-13 14:58:28 +04:00
parser_rpmbuild.add_argument('-b', '--build', action='store', choices=['b', 's', 'a'], default='a', help='Build src.rpm (s), rpm (b) or both (a)')
2012-12-11 17:08:55 +04:00
parser_rpmbuild.set_defaults(func=localbuild_rpmbuild)
2012-11-14 18:29:38 +04:00
2012-10-11 18:13:59 +04:00
# publish
2012-10-25 15:26:22 +04:00
parser_publish = subparsers.add_parser('publish', help='Publish the task that have already been built.')
parser_publish.add_argument('task_ids', action='store', nargs="+", help='The IDs of tasks to publish.')
parser_publish.set_defaults(func=publish)
2012-10-11 18:13:59 +04:00
2012-12-11 17:46:03 +04:00
# copy
parser_copy = subparsers.add_parser('copy', help='Copy all the files from SRC_BRANCH to DST_BRANCH')
parser_copy.add_argument('src_branch', action='store', help='source branch')
parser_copy.add_argument('dst_branch', action='store', nargs='?', help='destination branch. If not specified, it\'s assumed to be the current branch')
parser_copy.add_argument('-p', '--pack', action='store_true', help='Create a tar.gz from the src_branch and put this archive and spec file to dst_branch')
parser_copy.set_defaults(func=copy)
2012-09-19 13:38:52 +04:00
2012-12-13 14:58:28 +04:00
# status
parser_status = subparsers.add_parser('status', help='get a build-task status', epilog='If a project specified '
2012-10-25 18:26:51 +04:00
' or you are in a git repository - try to get the IDs from the last build task sent for this project. If you are not'
' in a git repository directory and project is not specified - try to get build IDs from the last build you\'ve done '
'with console client.')
2012-12-13 14:58:28 +04:00
parser_status.add_argument('ID', action='store', nargs='*', help='build list ID')
parser_status.add_argument('-p', '--project', action='store', help='Project. If last IDs for this project can be found - use them')
parser_status.add_argument('-s', '--short', action='store_true', help='Show one-line information including id, project, '
2012-10-25 18:26:51 +04:00
'arch and status')
2012-12-13 14:58:28 +04:00
parser_status.set_defaults(func=status)
2012-10-25 15:26:22 +04:00
# clean
parser_clean = subparsers.add_parser('clean', help='Analyze spec file and show missing and unnecessary files from '
'the current git repository directory.')
parser_clean.add_argument('--auto-remove', action='store_true', help='automatically remove all the unnecessary files')
parser_clean.set_defaults(func=clean)
2012-09-18 13:26:44 +04:00
2012-11-23 14:20:54 +04:00
# search
parser_search = subparsers.add_parser('search', help='Search for something on ABF.', epilog='NOTE: only first 100 results of any request will be shown')
search_choices = ['users', 'groups', 'platforms', 'projects']
parser_search.add_argument('type', action='store', choices=search_choices, help='what to search for')
parser_search.add_argument('query', action='store', help='a string to search for')
parser_search.set_defaults(func=search)
# test
parser_test = subparsers.add_parser('test', help='Execute a set of internal datamodel tests')
parser_test.set_defaults(func=test)
for s in subparsers._name_parser_map:
subparsers._name_parser_map[s].add_argument('-v', '--verbose', action='store_true', help='be verbose, display even debug messages')
2012-12-11 17:08:55 +04:00
command_line = parser.parse_args(sys.argv[1:])
2012-12-13 14:58:28 +04:00
def fix_default_config():
if not os.path.exists('/etc/abf/mock-urpm/configs/default.cfg'):
if os.getuid() != 0:
print "To set up a default configuration file, symbolic link in " +\
"/etc/abf/mock-urpm/configs have to be created. I need sudo rights to do it."
exit(1)
files = os.listdir('/etc/abf/mock-urpm/configs')
print 'Avaliable configurations: '
out = []
for f in files:
if not f.endswith('.cfg'):
continue
if f == 'site-defaults.cfg':
continue
out.append(f[:-4])
print ', '.join(out)
res = None
while res not in out:
if res is not None:
print '"%s" is not a valid configuration.' % res
res = raw_input('Select one (it will be remembered): ')
os.symlink('/etc/abf/mock-urpm/configs/%s.cfg' % res, '/etc/abf/mock-urpm/configs/default.cfg')
2012-12-11 17:08:55 +04:00
def run_mock_urpm(binary=True):
2012-12-13 14:58:28 +04:00
fix_default_config()
2012-12-11 17:08:55 +04:00
if not command_line.config:
command_line.config = 'default.cfg'
if command_line.config.endswith('.cfg'):
command_line.config = command_line.config[:-4]
config_path = os.path.join(configs_dir, command_line.config + '.cfg')
if not os.path.exists(config_path):
2012-12-11 17:46:03 +04:00
log.error("Config file %s can not be found." % config_path)
if os.path.basename(config_path) == 'default.cfg':
log.error("You should create this file or a symbolic link to another config in order to execute 'abf mock-urpm' withow --config")
2012-12-11 17:08:55 +04:00
exit(1)
config_opts = {'plugins': [], 'scm_opts': {}}
config_opts['plugin_conf'] = {'ccache_opts': {}, 'root_cache_opts': {}, 'bind_mount_opts': {'dirs': []}, 'tmpfs_opts': {}, 'selinux_opts': {}}
try:
execfile(config_path)
except Exception, ex:
log.error("Could not read the contents of '%s': %s" % (config_path, str(ex)))
exit(2)
basedir = ('basedir' in config_opts and config_opts['basedir']) or '/var/lib/abf/mock-urpm'
root = config_opts['root']
resultsdir = ('resultdir' in config_opts and config_opts['resultdir']) or '%s/%s/result' % (basedir, root)
src_dir = basedir + '/src'
if os.path.exists(src_dir):
shutil.rmtree(src_dir)
src = get_root_git_dir()
2013-05-17 12:11:20 +04:00
if os.path.exists(os.path.join(src, '.abf.yml')):
cmd = ['abf', 'fetch']
if command_line.verbose:
cmd.append('-v')
execute_command(cmd, print_to_stdout=True, exit_on_error=True, cwd=src)
2012-12-11 17:08:55 +04:00
shutil.copytree(src, src_dir, symlinks=True)
2012-09-19 14:02:56 +04:00
2012-12-11 17:08:55 +04:00
spec_path = find_spec(src_dir)
if not spec_path:
log.error('Can not locate a spec file in %s' % src_dir)
exit(1)
spec_path = os.path.join(src_dir, spec_path)
cmd = ['mock-urpm', '-r', command_line.config, '--buildsrpm', '--spec', spec_path, '--sources', src_dir, '--configdir', configs_dir ]
if command_line.verbose:
cmd.append('-v')
log.info('Executing mock-urpm...')
2012-12-13 14:58:28 +04:00
try:
res = execute_command(cmd, print_to_stdout=True, exit_on_error=False, shell=False)
except OSError, ex:
log.error("Can not execute mock-urpm (%s). Maybe it is not installed?" % str(ex))
exit(1)
finally:
shutil.rmtree(src_dir)
2012-12-11 17:08:55 +04:00
srpm_path = glob(os.path.join(resultsdir, '*.src.rpm'))
if len (srpm_path) != 1:
log.error('Could not find a single src.rpm file in %s' % resultsdir)
exit(1)
srpm_path = srpm_path[0]
srpm_path_new = os.path.join(os.getcwd(), os.path.basename(srpm_path))
if os.path.exists(srpm_path_new):
os.remove(srpm_path_new)
shutil.move(srpm_path, os.getcwd())
log.info('\nSRPM: %s\n' % srpm_path_new)
if binary:
2012-12-13 14:58:28 +04:00
cmd = ['mock-urpm', '-r', command_line.config, '--configdir', configs_dir, srpm_path_new]
2012-12-11 17:08:55 +04:00
if command_line.verbose:
cmd.append('-v')
log.info('Executing mock-urpm...')
res = execute_command(cmd, print_to_stdout=True, exit_on_error=False, shell=False)
os.remove(srpm_path)
rpms = glob(os.path.join(resultsdir, '*.rpm'))
print ''
for rpm in rpms:
new_path = os.path.join(os.getcwd(), os.path.basename(rpm))
if os.path.exists(new_path):
os.remove(new_path)
shutil.move(rpm, os.getcwd())
print('RPM: ' + os.path.join(os.getcwd(), os.path.basename(rpm)))
def localbuild_mock_urpm():
# get project
proj = get_project(models, must_exist=True)
find_spec_problems()
try:
run_mock_urpm(binary=True)
except OSError, ex:
log.error(str(ex))
exit(1)
2013-05-17 12:11:20 +04:00
def alias():
log.debug('ALIAS started')
if command_line.command == 'list':
if not cfg['alias']:
log.info('No aliases found')
return
for al_name in cfg['alias']:
print '%10s: %s' % (al_name, cfg['alias'][al_name])
elif command_line.command == 'add':
if len(command_line.options) < 2:
log.error('Not enough options. Use it like "abf alias add <alias_name> opt1 [opt2 ...]"')
exit(1)
al_name = command_line.options[0]
if ' ' in al_name or '=' in al_name:
log.error('Do not use " " or "=" for alias name!')
exit(1)
alias = ''
for al in command_line.options[1:]:
if ' ' in al:
alias += '"%s" ' % al
else:
alias += al + ' '
if al_name in cfg['alias']:
log.warning('Alias "%s" already exists and will be overwritten.' % al_name)
cfg['alias'][al_name] = alias
log.info('Done')
elif command_line.command == 'remove':
if not command_line.options:
log.error("Enter the alias name!")
exit(1)
al_name = command_line.options[0]
if al_name not in cfg['alias']:
log.error('Alias "%s" not found' % al_name)
exit(1)
cfg['alias'].pop(al_name)
log.info('Done')
2012-11-14 18:29:38 +04:00
def localbuild_rpmbuild():
2012-12-13 14:58:28 +04:00
log.debug('RPMBUILD started')
src_dir = '/tmp/abf/rpmbuild'
mkdirs('/tmp/abf')
if os.path.exists(src_dir):
shutil.rmtree(src_dir)
src = get_root_git_dir()
cmd = ['abf', 'fetch']
if command_line.verbose:
cmd.append('-v')
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
shutil.copytree(src, src_dir, symlinks=True)
spec_path = find_spec(src_dir)
if not spec_path:
log.error('Can not locate a spec file in %s' % src_dir)
exit(1)
spec_path = os.path.join(src_dir, spec_path)
cmd = ['rpmbuild', '-b'+command_line.build, '--define', '_topdir '+src_dir, '--define', '_sourcedir '+src_dir, spec_path]
if command_line.verbose:
cmd.append('-v')
log.info('Executing rpmbuild...')
try:
res = execute_command(cmd, print_to_stdout=True, exit_on_error=False, shell=False)
except OSError, ex:
log.error("Can not execute rpmbuild (%s). Maybe it is not installed?" % str(ex))
exit(1)
log.info('Moving files to current directory...')
items = [x for x in os.walk(src_dir+'/SRPMS')] + [x for x in os.walk(src_dir+'/RPMS')]
for item in items:
path, dirs, files = item
for f in files:
if not f.endswith('.rpm'):
continue
ff = os.path.join(path, f)
new_ff = os.path.join(os.getcwd(), f)
if os.path.exists(new_ff):
os.remove(new_ff)
shutil.move(ff, os.getcwd())
if new_ff.endswith('.src.rpm'):
log.info('SRPM: ' + new_ff)
else:
log.info('RPM: ' + new_ff)
shutil.rmtree(src_dir)
2012-11-14 18:29:38 +04:00
2012-09-19 14:02:56 +04:00
def help():
if command_line.command:
sys.argv = [sys.argv[0], command_line.command, '-h']
else:
sys.argv = [sys.argv[0], '-h']
parse_command_line()
2012-10-25 18:26:51 +04:00
2012-11-23 14:20:54 +04:00
def search():
log.debug('SEARCH started')
st = command_line.type
sq = command_line.query
cl = {'groups': Group, 'users': User, 'platforms': Platform, 'projects': Project}
items = cl[st].search(models, sq)
for item in items:
print item
2012-10-25 18:26:51 +04:00
def get_project_name_only(must_exist=True, name=None):
2012-10-25 15:26:22 +04:00
if name:
tmp = name.split('/')
if len(tmp) > 2:
log.error('The project format is "[owner_name/]project_name"')
exit(1)
elif len(tmp) == 1:
project_name = tmp[0]
log.info("The project group is assumed to be " + default_group)
owner_name = default_group
else: # len == 2
owner_name = tmp[0]
project_name = tmp[1]
else:
owner_name, project_name = get_project_name()
if not project_name:
if must_exist:
log.error('You are not in a git repository directory. Specify the project name please!')
exit(1)
else:
return None
_update_location()
2012-10-25 18:26:51 +04:00
return (owner_name, project_name)
2012-10-25 15:26:22 +04:00
2012-10-25 18:26:51 +04:00
def get_project(models, must_exist=True, name=None):
owner_name, project_name = get_project_name_only(must_exist, name)
2012-10-25 15:26:22 +04:00
try:
2012-10-25 18:26:51 +04:00
proj = Project.get_by_name(models, '%s/%s' % (owner_name, project_name))
2012-10-25 15:26:22 +04:00
except PageNotFoundError:
log.error('The project %s/%s does not exist!' % (owner_name, project_name))
exit(1)
except ForbiddenError:
log.error('You do not have acces to the project %s/%s!' % (owner_name, project_name))
exit(1)
log.debug('Project: %s' % proj)
return proj
2012-09-18 13:26:44 +04:00
def get():
log.debug('GET started')
proj = command_line.project
tmp = proj.split('/')
if len(tmp) > 2:
log.error('Specify a project name as "group_name/project_name" or just "project_name"')
exit(1)
2012-10-25 15:26:22 +04:00
elif len(tmp) == 1:
project_name = proj
2012-09-18 13:26:44 +04:00
proj = '%s/%s' % (cfg['user']['default_group'], proj)
2012-10-25 15:26:22 +04:00
elif len(tmp) == 2:
project_name = tmp[1]
2012-09-18 13:26:44 +04:00
uri = "%s/%s.git" % (cfg['user']['git_uri'], proj)
cmd = ['git', 'clone', uri]
if command_line.branch:
cmd += ['-b', command_line.branch]
2012-10-25 15:26:22 +04:00
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
projects_cfg[proj]['location'] = os.path.join(os.getcwd(), project_name)
2012-11-14 15:04:19 +04:00
2012-09-18 13:26:44 +04:00
def put():
log.debug('PUT started')
2012-11-14 15:04:19 +04:00
path = get_root_git_dir()
yaml_path = os.path.join(path, '.abf.yml')
if not path:
log.error("You have to be in a git repository directory")
exit(1)
2012-10-25 15:26:22 +04:00
_update_location()
2012-11-14 15:04:19 +04:00
2013-05-17 12:11:20 +04:00
try:
min_size = human2bytes(command_line.minimal_file_size)
except ValueError, ex:
log.error('Incorrect "--minimal-file-size" value: %s' % command_line.minimal_file_size)
exit(1)
error_count = upload_files(models, min_size, remove_files=not command_line.do_not_remove_files, path=path)
if error_count:
log.info('There were errors while uploading, stopping.')
exit(1)
if not command_line.message:
2012-11-14 15:04:19 +04:00
return
cmd = ['git', 'add', '--all']
2012-10-25 15:26:22 +04:00
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
2012-11-14 15:04:19 +04:00
if os.path.isfile(yaml_path):
2013-05-17 12:11:20 +04:00
cmd = ['git', 'add', '-f', yaml_path]
2012-11-14 15:04:19 +04:00
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
cmd = ['git', 'commit', '-m', command_line.message]
2012-10-25 15:26:22 +04:00
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
2012-09-18 13:26:44 +04:00
log.info('Commited.')
cmd = ['git', 'push']
2012-10-25 15:26:22 +04:00
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
2012-09-18 13:26:44 +04:00
log.info('Pushed')
2012-11-14 15:04:19 +04:00
def fetch():
log.debug('FETCH started')
path = get_root_git_dir()
if not path:
log.error("You have to be in a git repository directory")
exit(1)
path = os.path.join(path, '.abf.yml')
if not os.path.isfile(path):
2012-11-23 15:29:20 +04:00
log.error('File "%s" can not be found' % path)
2012-11-14 15:04:19 +04:00
exit(1)
2013-05-17 12:11:20 +04:00
try:
fetch_files(models, path, command_line.only)
except yaml.scanner.ScannerError, ex:
log.error('Invalid yml file %s!\nProblem in line %d column %d: %s' % (path, ex.problem_mark.line, ex.problem_mark.column, ex.problem))
except yaml.composer.ComposerError, ex:
log.error('Invalid yml file %s!\n%s' % (path, ex))
2012-09-19 13:38:52 +04:00
2013-05-17 12:11:20 +04:00
def store():
log.debug('STORE started')
p = os.path.expanduser(command_line.path)
if not os.path.exists(p):
log.error('File "%s" does not exist!' % p)
exit(1)
if not os.path.isfile(p):
log.error('"%s" is not a regular file!' % p)
exit(1)
res = models.jsn.upload_file(p, silent=True)
print res
2012-12-11 17:46:03 +04:00
def copy():
log.debug('COPY started')
2012-09-19 13:38:52 +04:00
sbrn = command_line.src_branch
start_branch = get_branch_name()
if not start_branch:
log.error("You are not in a git directory")
exit(1)
log.debug("Current brunch is " + start_branch)
if command_line.dst_branch:
dbrn = command_line.dst_branch
else:
dbrn = start_branch
if sbrn == dbrn:
log.error("Source and destination branches shold be different branches!")
exit(1)
path = get_root_git_dir()
log.debug("Repository root folder is " + path)
2012-10-25 15:26:22 +04:00
_update_location(path=path)
2012-09-19 13:38:52 +04:00
stage = 0
try:
if start_branch != dbrn:
cmd = ['git', 'checkout', dbrn]
2012-10-25 15:26:22 +04:00
execute_command(cmd, print_to_stdout=True, cwd=path)
2012-09-19 13:38:52 +04:00
stage = 1
cmd = ['rm', '-rf', './*']
2012-10-25 15:26:22 +04:00
execute_command(cmd, print_to_stdout=True, cwd=path)
2012-09-19 13:38:52 +04:00
stage = 2
cmd = ['git', 'checkout', sbrn, '*']
2012-10-25 15:26:22 +04:00
execute_command(cmd, print_to_stdout=True, cwd=path)
2012-09-19 13:38:52 +04:00
stage = 3
if command_line.pack:
2012-10-25 15:26:22 +04:00
pack_project(path)
2012-09-19 13:38:52 +04:00
cmd = ['git', 'reset']
2012-10-25 15:26:22 +04:00
execute_command(cmd, print_to_stdout=True, cwd=path)
2012-09-19 13:38:52 +04:00
except Exception, ex:
if type(ex) == ReturnCodeNotZero:
log.error(str(ex))
else:
log.exception(ex)
if stage == 1 or stage == 2:
log.info("Checking out the initial branch (%s)" % start_branch)
cmd = ['git', 'reset', '--hard', start_branch]
2012-10-25 15:26:22 +04:00
execute_command(cmd, print_to_stdout=True, cwd=path)
2012-09-19 13:38:52 +04:00
log.info('Done')
2012-09-18 13:26:44 +04:00
def build():
log.debug('BUILD started')
2012-12-11 17:08:55 +04:00
if command_line.project and not (command_line.branch or command_line.tag or command_line.commit):
log.error("You've specified a project name without brnach, tag or commit (-b, -t or -c)")
exit(1)
tag_def = bool(command_line.tag)
branch_def = bool(command_line.branch)
commit_def = bool(command_line.commit)
tmp = tag_def + branch_def + commit_def
if tmp > 1:
log.error("You should specify ONLY ONE of the following options: branch, tag or commit.")
exit(1)
2012-09-18 13:26:44 +04:00
IDs = {
'arches':[],
'version':None,
'target_platform':None,
'repositories':[],
}
# get project
2012-10-25 15:26:22 +04:00
proj = get_project(models, must_exist=True, name=command_line.project)
if not command_line.project and not command_line.skip_spec_check: # local git repository
find_spec_problems()
if not proj.is_package:
log.error('The project %s is not a package and can not be built.' % proj)
exit(1)
2012-09-18 13:26:44 +04:00
# get architectures
arches = []
2012-10-25 15:26:22 +04:00
all_arches = Arch.get_arches(models)
if command_line.arch:
for arch in command_line.arch:
2012-11-22 17:18:41 +04:00
a = Arch.get_arch_by_name(models, arch)
if not a:
log.error("Invalid architecture: %s" % arch)
2012-09-18 13:26:44 +04:00
exit(1)
arches.append(a)
2012-09-18 13:26:44 +04:00
else:
2013-05-17 11:41:10 +04:00
# arches = all_arches
2013-05-17 12:11:20 +04:00
for arch in ['i586','i686','x86_64']:
2013-05-17 11:41:10 +04:00
a = Arch.get_arch_by_name(models, arch)
2013-05-17 12:11:20 +04:00
if a:
arches.append(a)
2012-09-25 16:21:45 +04:00
log.info("Arches are assumed to be " + str(arches))
2012-09-18 13:26:44 +04:00
log.debug('Architectures: %s' % arches)
2012-12-11 17:08:55 +04:00
# try to automatically resolve all the options, discarding all the other options except --branch
# If we couldn't - use only user-given options. If we could, but user specified other parameters -
# reject averything we've resolved and use only user-given options.
auto_resolved = True
def auto_resolve():
as_branch = None
as_commit = None
as_saveto = None
if not command_line.project: # we are in a git repository and it is the project we are building
as_branch = get_branch_name()
if command_line.branch:
as_branch = command_line.branch
#log.debug('Auto resolved branch: ' + as_branch)
if not as_branch:
log.info('You\'ve specified a project without a branch.')
return (None, None, None)
2013-05-17 12:11:20 +04:00
2012-12-11 17:08:55 +04:00
for ref in proj.get_refs_list(models):
if ref['ref'] == as_branch and ref['object']['type'] == 'commit':
as_commit = ref['object']['sha']
break
if not as_commit:
log.error("Could not resolve hash for branch '%s'" % (as_branch))
return (None, None, None)
2013-05-17 12:11:20 +04:00
2012-12-11 17:08:55 +04:00
for repo in proj.repositories:
if repo.platform.name == as_branch or (as_branch == 'master' and repo.platform.name == 'cooker'):
2012-12-11 17:08:55 +04:00
as_saveto = repo
if not as_saveto:
log.info('Could not resolve a platform to save to from the branch name "%s".' % as_branch)
return (as_branch, as_commit, None)
2013-05-17 12:11:20 +04:00
2012-12-11 17:08:55 +04:00
return (as_branch, as_commit, as_saveto)
2013-05-17 12:11:20 +04:00
2012-12-11 17:08:55 +04:00
as_branch, as_commit, as_saveto = auto_resolve()
opts = 'Branch: %s, commit: %s, save-to-repo: %s' % (as_branch, as_commit, as_saveto)
log.debug('A list of options which could be resolved automatically: %s' % opts)
2013-05-17 12:11:20 +04:00
2012-12-11 17:08:55 +04:00
# get git commit hash
commit_hash = None
if tmp == 0:
2012-12-11 17:08:55 +04:00
if as_commit: # use autoresolved commit hash
commit_hash = as_commit
else:
log.error("Git branch, tag or commit can not be resolved automatically. Specify it by -b, -t or -c.")
exit(1)
2012-10-11 18:13:59 +04:00
if tmp == 1:
if commit_def:
commit_hash = command_line.commit
2012-09-18 13:26:44 +04:00
else:
to_resolve = command_line.branch or command_line.tag
ref_type = (branch_def and 'commit') or (tag_def and 'tag')
refs = proj.get_refs_list(models)
for ref in refs:
if ref['ref'] == to_resolve and ref['object']['type'] == ref_type:
commit_hash = ref['object']['sha']
if commit_hash == None:
log.error("Could not resolve hash for %s '%s'" % (ref_type, to_resolve))
2012-10-11 18:13:59 +04:00
exit(1)
2012-12-11 17:08:55 +04:00
if commit_hash != as_commit:
as_saveto = None
log.debug('Aitoresolved options were rejected.')
log.debug('Git commit hash: %s' % commit_hash)
2012-12-11 17:08:55 +04:00
# get save-to repository
save_to_repository = None
build_for_platform = None
2012-10-11 18:13:59 +04:00
available_repos = proj.repositories
if command_line.save_to_repository:
items = command_line.save_to_repository.split('/')
else:
items = []
2012-10-11 18:13:59 +04:00
if len(items) == 2:
repo_name = items[1]
pl_name = items[0]
elif len(items) == 1:
repo_name = items[0]
2012-12-11 17:08:55 +04:00
pl_name = default_build_platform
2012-10-11 18:13:59 +04:00
log.info("Save-to platform is assumed to be " + pl_name)
elif len(items) == 0:
2012-12-11 17:08:55 +04:00
if as_saveto and as_saveto in available_repos:
pl_name = as_saveto.platform.name
repo_name = as_saveto.name
else:
log.error("Save-to repository can not be resolved automatically. Specify it (-s).")
exit(1)
2012-10-11 18:13:59 +04:00
else:
log.error("save-to-repository option format: [platform/]repository")
exit(1)
2012-09-18 13:26:44 +04:00
2012-12-11 17:08:55 +04:00
if (as_saveto and as_saveto in available_repos and as_saveto.platform.name == pl_name
and as_saveto.name == repo_name):
save_to_repository = as_saveto
else:
pls = []
for repo in available_repos:
if repo.platform.name == pl_name:
build_for_platform = repo.platform
pls.append(repo.platform.name)
if not build_for_platform:
log.error("Can not build for platform %s. Select one of the following:\n%s" % (pl_name, ', '.join(pls)))
exit(1)
for repo in build_for_platform.repositories:
if repo.name == repo_name:
save_to_repository = repo
break
2012-09-18 13:26:44 +04:00
2012-12-11 17:08:55 +04:00
if not save_to_repository:
log.error("Incorrect save-to repository %s/%s.\nSelect one of the following:\n%s" % (pl_name, repo_name,
', '.join([str(x) for x in build_for_platform.repositories])))
exit(1)
2012-09-18 13:26:44 +04:00
2012-12-11 17:08:55 +04:00
log.debug('Save-to repository: ' + str(save_to_repository))
2012-10-11 18:13:59 +04:00
# get the list of build repositories
2012-10-25 15:26:22 +04:00
build_platforms = Platform.get_build_platforms(models)
build_platform_names = [x.name for x in build_platforms]
build_repositories = []
2012-09-18 13:26:44 +04:00
if command_line.repository:
2012-09-25 16:21:45 +04:00
for repo in command_line.repository:
items = repo.split('/')
if len(items) == 2:
repo_name = items[1]
pl_name = items[0]
elif len(items) == 1:
repo_name = items[0]
pl_name = default_build_platform
2012-12-11 17:08:55 +04:00
log.debug("Platform for selected repository %s is assumed to be %s" % (repo_name, pl_name))
else:
log.error("'repository' option format: [platform/]repository")
exit(1)
if pl_name not in build_platform_names:
2012-12-11 17:08:55 +04:00
log.error("Can not use build repositories from platform %s!\nSelect one of the following:\n%s" % (pl_name,
', '.join(build_platform_names)))
exit(1)
for pl in build_platforms:
if pl.name == pl_name:
build_platform = pl
break
build_repo = None
for repo in build_platform.repositories:
if repo.name == repo_name:
build_repo = repo
break
if not build_repo:
log.error("Platform %s does not have repository %s!\nSelect one of the following:\n%s" % (pl_name, repo_name,
', '.join([x.name for x in build_platform.repositories])))
exit(1)
build_repositories.append(build_repo)
2012-10-11 18:13:59 +04:00
else:
build_platform = save_to_repository.platform
if build_platform.name not in build_platform_names or not build_platform.repositories:
log.error("Could not resolve repositories to build with. Please specify it (-r option)")
exit(1)
2012-12-11 17:08:55 +04:00
build_repositories = []
2012-10-11 18:13:59 +04:00
for repo in build_platform.repositories:
2012-12-11 17:08:55 +04:00
if repo.name == 'main' or repo.name == save_to_repository.name:
build_repositories.append(repo)
log.info("Repositories to build with are assumed to be: " + str(build_repositories))
2012-10-11 18:13:59 +04:00
if not build_repositories:
log.error("You have to specify the repository(s) to build with (-r option)")
exit(1)
log.debug("Build repositories: " + str(build_repositories))
2012-12-11 17:08:55 +04:00
#exit()
2012-10-25 18:26:51 +04:00
build_ids = BuildList.new_build_task(models, proj, save_to_repository, build_repositories, commit_hash,
2013-05-29 15:33:07 +04:00
command_line.update_type or BuildList.update_types[0], command_line.auto_publish, arches, command_line.skip_personal)
2012-10-25 18:26:51 +04:00
ids = ','.join([str(i) for i in build_ids])
projects_cfg['main']['last_build_ids'] = ids
projects_cfg[str(proj)]['last_build_ids'] = ids
2012-10-11 18:13:59 +04:00
def publish():
log.debug('PUBLISH started')
for task_id in command_line.task_ids:
try:
2012-10-25 15:26:22 +04:00
bl = BuildList(models, task_id)
if bl.status != 0:
2012-12-11 17:08:55 +04:00
log.error("The status of build task %s is \"%s\", can not published!" % (bl.id, bl.status_string))
continue
2012-10-25 15:26:22 +04:00
res = bl.publish()
except AbfApiException, ex:
log.error('Could not publish task %s: %s' %(task_id, str(ex)))
2012-10-11 18:13:59 +04:00
2012-10-25 18:26:51 +04:00
def _print_build_status(models, ID):
try:
2012-10-25 18:26:51 +04:00
bl = BuildList(models, ID)
2012-10-11 18:13:59 +04:00
except AbfApiException, ex:
2012-10-26 12:14:30 +04:00
log.error("Can not read buildlist %s: %s" % (ID, ex))
exit(3)
2012-10-25 18:26:51 +04:00
if command_line.short:
print repr(bl)
else:
print '%-20s%s' %('Buildlist ID:', bl.id)
print '%-20s%s' %('Owner:', bl.owner.uname)
print '%-20s%s' %('Project:', bl.project.fullname)
print '%-20s%s' %('Status:', bl.status_string)
print '%-20s%s' %('Build for platform:', bl.build_for_platform)
print '%-20s%s' %('Save to repository:', bl.save_to_repository)
print '%-20s%s' %('Build repositories:', bl.include_repos)
2013-05-31 12:45:47 +04:00
print '%-20s%s' %('Extra repositories:', bl.extra_repositories)
2012-10-25 18:26:51 +04:00
print '%-20s%s' %('Architecture:', bl.arch.name)
print '%-20s%s' %('Created at:', bl.created_at)
print '%-20s%s' %('Updated at:', bl.updated_at)
print ''
2012-12-13 14:58:28 +04:00
def status():
log.debug('STATUS started')
2012-10-25 18:26:51 +04:00
ids = []
if command_line.ID:
ids = command_line.ID
2012-12-13 14:58:28 +04:00
else:
res = get_project_name_only(must_exist=False, name=command_line.project)
if res:
proj = '%s/%s' % res
ids += projects_cfg[proj]['last_build_ids'].split(',')
elif not command_line.ID:
if 'main' not in projects_cfg or 'last_build_ids' not in projects_cfg['main']:
log.error("Can not find last build IDs. Specify a project name or ID")
exit(1)
ids += projects_cfg['main']['last_build_ids'].split(',')
2012-10-25 18:26:51 +04:00
ids = list(set(ids))
for i in ids:
2012-11-21 18:36:07 +04:00
try:
i = int(i)
except:
log.error('"%s" is not a number' % i)
continue
2012-10-25 18:26:51 +04:00
_print_build_status(models, i)
2012-10-25 15:26:22 +04:00
2012-10-26 12:14:30 +04:00
def _update_location(path=None, silent=True):
2012-10-25 15:26:22 +04:00
try:
if not path:
path = os.getcwd()
log.debug("Updating project location for %s" % path)
group, name = get_project_name(path)
if group:
proj = '%s/%s' % (group, name)
projects_cfg[proj]['location'] = path
2012-10-26 12:14:30 +04:00
text = "Project %s has been located in %s" % (proj, path)
if silent:
log.debug(text)
else:
log.info(text)
2012-10-25 15:26:22 +04:00
except:
pass
def _update_location_recursive(path):
items = os.listdir(path)
if '.git' in items: # it's a git directory!
2012-10-26 12:14:30 +04:00
_update_location(path, silent=False)
2012-10-25 15:26:22 +04:00
return
for item in items:
item_path = os.path.join(path, item)
if not os.path.isdir(item_path) or os.path.islink(item_path):
continue
_update_location_recursive(item_path)
def locate():
log.debug('LOCATE started')
if not command_line.action: # show location
if not command_line.project:
print "To show a project location, you have to specify a project name ('-p' option)"
return
tmp = command_line.project.split('/')
if len(tmp) > 2:
log.error('error: the project format is "[owner_name/]project_name"')
exit(1)
elif len(tmp) == 1:
proj = '%s/%s' % (default_group, tmp[0])
else: # len == 2
proj = command_line.project
if proj not in projects_cfg or 'location' not in projects_cfg[proj] or not projects_cfg[proj]['location']:
print 'error: project %s can not be located' % proj
exit(1)
path = projects_cfg[proj]['location']
if not os.path.isdir(path):
print 'error: project is not located in "%s" anymore' % path
projects_cfg[proj]['location'] = ''
exit(1)
print path
return
else:
if command_line.action == 'update':
path = command_line.directory or os.getcwd()
_update_location(path)
elif command_line.action == 'update-recursive':
path = command_line.directory or os.getcwd()
_update_location_recursive(path)
def show():
log.debug('SHOW started')
Log.set_silent()
t = command_line.type
if t in ['build-platforms', 'build-repos']:
build_platforms = Platform.get_build_platforms(models)
platform_names = []
repo_names = []
for plat in build_platforms:
if plat.repositories:
platform_names.append(plat.name)
for repo in plat.repositories:
repo_names.append(str(repo))
out = (t == 'build-platforms' and platform_names) or (t == 'build-repos' and repo_names)
if t in ['save-to-platforms', 'save-to-repos']:
proj = get_project(models, must_exist=True, name=command_line.project)
repos = proj.repositories
platform_names = []
repo_names = []
for repo in repos:
platform_names.append(repo.platform.name)
repo_names.append(str(repo))
platform_names = list(set(platform_names))
out = (t == 'save-to-platforms' and platform_names) or (t == 'save-to-repos' and repo_names)
print ' '.join(out)
def clean():
log.debug("CLEAN started")
_update_location()
find_spec_problems(auto_remove=command_line.auto_remove)
2012-09-18 13:26:44 +04:00
if __name__ == '__main__':
2013-05-17 12:11:20 +04:00
apply_aliases()
2012-09-18 13:26:44 +04:00
parse_command_line()
2012-10-25 15:26:22 +04:00
2012-09-18 13:26:44 +04:00
if command_line.verbose:
Log.set_verbose()
2012-10-11 18:13:59 +04:00
if command_line.quiet:
Log.set_quiet()
if command_line.clear_cache:
Models.clear_cache()
2012-09-18 13:26:44 +04:00
command_line.func()