Merge pull request #2 from din/abf-console-client:master

proposed new command 'info'
This commit is contained in:
dsilakov 2014-07-16 13:15:55 +04:00
commit edbde7e144
3 changed files with 332 additions and 215 deletions

255
abf.py
View file

@ -44,38 +44,38 @@ def test():
grs_import = Group.search(models, 'import')
prs_abfcc = Project.search(models, 'abf-console-client')
uss_akirilenko = User.search(models, 'akirilenko')
assert pls_import_personal
assert grs_import
assert prs_abfcc
assert uss_akirilenko
assert pls_import_personal[0].repositories[0].platform == pls_import_personal[0]
# check last items
assert Platform(models, ID=pls_import_personal[0].id).name == pls_import_personal[0].name
assert Group(models, ID=grs_import[0].id).uname == grs_import[0].uname
assert Project(models, ID=prs_abfcc[0].id).name == prs_abfcc[0].name
assert User(models, ID=uss_akirilenko[0].id).uname == uss_akirilenko[0].uname
# make models load the whole object
pls_import_personal[0].description
grs_import[0].description
prs_abfcc[0].description
uss_akirilenko[0].professional_experience
pr_abfcc = Project.get_by_name(models, 'akirilenko/abf-console-client')
assert pr_abfcc in prs_abfcc
#bl = BuildList(models, ID=750988)
Platform.get_user_platforms_main(models)
Platform.get_user_platforms_personal(models)
Platform.get_build_platforms(models)
arches = Arch.get_arches(models)
arch_x86_64 = Arch.get_arch_by_name(models, 'x86_64')
assert arch_x86_64 in arches
log.info('Datamodel seems to work fine')
@ -88,7 +88,7 @@ def apply_aliases():
if sys.argv[i] not in ['-h', '-v', '--help', '--verbose', 'help']:
found = True
if not found:
return
return
for alias_name in cfg['alias']:
alias = shlex.split(cfg['alias'][alias_name])
if alias_name in sys.argv:
@ -97,7 +97,7 @@ def apply_aliases():
for item in alias:
sys.argv.insert(ind, item)
ind += 1
def parse_command_line():
global command_line
@ -106,26 +106,26 @@ def parse_command_line():
parser.add_argument('-c', '--clear-cache', action='store_true', help='clear cached information about repositories, platforms, projects, etc.')
parser.add_argument('-q', '--quiet', action='store_true', help='Do not display info messages')
subparsers = parser.add_subparsers(title='command')
# help
parser_help = subparsers.add_parser('help', help='show a help for command')
parser_help.add_argument('command', action='store', nargs='?', help='a command to show help for')
parser_help.set_defaults(func=help)
# alias
parser_alias = subparsers.add_parser('alias', help='Manage aliases')
alias_commands = ['list', 'add', 'remove']
parser_alias.add_argument('command', action='store', choices=alias_commands)
parser_alias.add_argument('options', action='store', nargs='*', help='name and alias (not quoted, e. g. "abf alias add sg search groups") for adding, only name for removing.')
parser_alias.set_defaults(func=alias)
# get
parser_get = subparsers.add_parser('get', help='clone a project from ABF')
parser_get.add_argument('project', action='store', help='project name. ([group/]project). If no group specified, '
'it\'s assumed to be your default group.')
parser_get.add_argument('-b', '--branch', action='store', help='branch to checkout')
parser_get.set_defaults(func=get)
# put
parser_put = subparsers.add_parser('put', help='Upload large binary files to File-Store and update (or create) .abf.yml file. Can also commit and push changes.')
parser_put.add_argument('-m', '--message', action='store', help='With this option specified, "git add --all", "git commit -m MSG" and "git push" will be executed.')
@ -134,17 +134,17 @@ def parse_command_line():
parser_put.add_argument('-n', '--do-not-remove-files', action='store_true', help='By default files are being removed on uploading. Override this behavior.')
parser_put.add_argument('-u', '--upload-only', action='store_true', help='Deprecated! Affects nothing. Saved for compatibility reasons and will be removed later.')
parser_put.set_defaults(func=put)
# store
parser_store = subparsers.add_parser('store', help='Upload a given file to File-Store. Prints a sha1 hash or error message (with non-zero return code).')
parser_store.add_argument('path', action='store', help='Path to file')
parser_store.set_defaults(func=store)
# fetch
parser_fetch = subparsers.add_parser('fetch', help='Download all the files listed in .abf.yml from File-Store to local directory.')
parser_fetch.add_argument('-o', '--only', action='append', help='Limit the list of downloaded files to this file name(s). This option can be specified more than once.')
parser_fetch.set_defaults(func=fetch)
# show
parser_show = subparsers.add_parser('show', help='show some general information. Bash autocomplete uses it.')
show_choices = ['build-repos', 'build-platforms', 'save-to-repos', 'save-to-platforms']
@ -152,7 +152,7 @@ def parse_command_line():
parser_show.add_argument('-p', '--project', action='store', help='Project to show information for (if needed). Format: '
'"[group/]name". If no group specified, default group will be used.')
parser_show.set_defaults(func=show)
# locate
parser_locate = subparsers.add_parser('locate', help='tool can remember the project location and use it for some reasons (abfcd, etc.).',
epilog='Every interaction with git repository (build, get, put, etc.) updates the cached location of the project (overriding '
@ -164,7 +164,7 @@ def parse_command_line():
parser_locate.add_argument('-d', '--directory', action='store', help='Directory to update locations for. It should be a '
'git repository for "update" and any directory for "update-recursive". If not specified - the current directory will be used')
parser_locate.set_defaults(func=locate)
# build
parser_build = subparsers.add_parser('build', help='Initiate a build task on ABF.', formatter_class=RawDescriptionHelpFormatter,
epilog= 'NOTES:\n'
@ -187,7 +187,7 @@ def parse_command_line():
'([platform/]repository). If no platform part specified, it is assumed to be "<default_group>_personal". '
'If this option is not specified at all, "<default_group>_personal/main" will be used.')
parser_build.add_argument('-a', '--arch', action='append', help='architectures to build, '
'can be set more than once. If not set - use all the available architectures.')
'can be set more than once. If not set - use all the available architectures.')
parser_build.add_argument('-r', '--repository', action='append', help='repositories to build with ([platform/]repository). '
'Can be set more than once. If no platform part specified, it is assumed to be your "<default_build_platform>".'
' If no repositories were specified at all, use the "main" repository from save-to platform.')
@ -200,25 +200,25 @@ def parse_command_line():
(BuildList.update_types[0]) )
parser_build.add_argument('--skip-spec-check', action='store_true', help='Do not check spec file.' )
parser_build.set_defaults(func=build)
# mock-urpm
parser_mock_urpm = subparsers.add_parser('mock-urpm', help='Build a project locally using mock-urpm.', epilog='No checkouts will be made,'
'the current git repository state will be used')
parser_mock_urpm.add_argument('-c', '--config', action='store', help='A config template to use. Specify owne of the config names '
parser_mock_urpm.add_argument('-c', '--config', action='store', help='A config template to use. Specify owne of the config names '
'from %s. Directory path should be omitted. If no config specified, "default.cfg" will be used' % configs_dir)
parser_mock_urpm.set_defaults(func=localbuild_mock_urpm)
# rpmbuild
parser_rpmbuild = subparsers.add_parser('rpmbuild', help='Build a project locally using rpmbuild.', epilog='No checkouts will be made,'
'the current git repository state will be used')
parser_rpmbuild.add_argument('-b', '--build', action='store', choices=['b', 's', 'a'], default='a', help='Build src.rpm (s), rpm (b) or both (a)')
parser_rpmbuild.add_argument('-b', '--build', action='store', choices=['b', 's', 'a'], default='a', help='Build src.rpm (s), rpm (b) or both (a)')
parser_rpmbuild.set_defaults(func=localbuild_rpmbuild)
# publish
parser_publish = subparsers.add_parser('publish', help='Publish the task that have already been built.')
parser_publish.add_argument('task_ids', action='store', nargs="+", help='The IDs of tasks to publish.')
parser_publish.set_defaults(func=publish)
# copy
parser_copy = subparsers.add_parser('copy', help='Copy all the files from SRC_BRANCH to DST_BRANCH')
parser_copy.add_argument('src_branch', action='store', help='source branch')
@ -271,28 +271,71 @@ def parse_command_line():
parser_status.add_argument('-s', '--short', action='store_true', help='Show one-line information including id, project, '
'arch and status')
parser_status.set_defaults(func=status)
# clean
parser_clean = subparsers.add_parser('clean', help='Analyze spec file and show missing and unnecessary files from '
'the current git repository directory.')
parser_clean.add_argument('--auto-remove', action='store_true', help='automatically remove all the unnecessary files')
parser_clean.set_defaults(func=clean)
# search
parser_search = subparsers.add_parser('search', help='Search for something on ABF.', epilog='NOTE: only first 100 results of any request will be shown')
search_choices = ['users', 'groups', 'platforms', 'projects']
parser_search.add_argument('type', action='store', choices=search_choices, help='what to search for')
parser_search.add_argument('query', action='store', help='a string to search for')
parser_search.set_defaults(func=search)
#list
# info
parser_info = subparsers.add_parser('info', help='get information about single instance')
info_choices = ['platforms', 'repositories', 'projects']
parser_info.add_argument('type', action='store', choices=info_choices, help='type of the instance')
parser_info.add_argument('-f', '--filter', action='store', help='The filter may be specified by defining multiple pairs <type>.<attribute>=<value> or <attribute>=<value>, where <type> is one of the following positional arguments: %s, <attribute> is the one of the instance fields or special attribute (page - using for pagination) and <value> - string, that can take asterisk (*) or anything else... Example: abf info projects -f platforms.name=rosa2012lts page=*' % info_choices, nargs='*')
parser_info.add_argument('-o', '--output', action='store', help='output format ', nargs='*')
parser_info.set_defaults(func=info_single)
# test
parser_test = subparsers.add_parser('test', help='Execute a set of internal datamodel tests')
parser_test.set_defaults(func=test)
for s in subparsers._name_parser_map:
subparsers._name_parser_map[s].add_argument('-v', '--verbose', action='store_true', help='be verbose, display even debug messages')
command_line = parser.parse_args(sys.argv[1:])
command_line = parser.parse_args(sys.argv[1:])
def info_single():
st = command_line.type
cl = {'platforms': Platform, 'repositories': Repository, 'projects': Project}
if not command_line.filter:
log.debug('Filter can be specified with the following parameters:\n %s' % cl[st].filter_dict)
sf = None
else:
for param in command_line.filter:
try:
st, param = map(str, param.split('.'))
except:
pass
attr, value = map(str, param.split('='))
cl[st].filter_dict[attr]=value
log.debug('Filter setup for instance %s ' % st)
st = command_line.type
if not command_line.output:
log.debug('Output format can be specified with the following parameters:\n %s' % cl[st].required_fields)
so = [cl[st].required_fields[1]]
log.debug('Using default query format: %s' % so)
else:
so = command_line.output
res = cl[st].info(models)
info_out = []
for inst in res:
for param in so:
try:
print param + ':\t' + str(inst.params_dict[param])
except:
log.debug("Parameter %s not available:" % param)
def fix_default_config():
if not os.path.exists('/etc/abf/mock-urpm/configs/default.cfg'):
@ -300,7 +343,7 @@ def fix_default_config():
print "To set up a default configuration file, symbolic link in " +\
"/etc/abf/mock-urpm/configs have to be created. I need sudo rights to do it."
exit(1)
files = os.listdir('/etc/abf/mock-urpm/configs')
print 'Avaliable configurations: '
out = []
@ -310,7 +353,7 @@ def fix_default_config():
if f == 'site-defaults.cfg':
continue
out.append(f[:-4])
print ', '.join(out)
res = None
while res not in out:
@ -331,19 +374,19 @@ def run_mock_urpm(binary=True):
if os.path.basename(config_path) == 'default.cfg':
log.error("You should create this file or a symbolic link to another config in order to execute 'abf mock-urpm' withow --config")
exit(1)
config_opts = {'plugins': [], 'scm_opts': {}}
config_opts = {'plugins': [], 'scm_opts': {}}
config_opts['plugin_conf'] = {'ccache_opts': {}, 'root_cache_opts': {}, 'bind_mount_opts': {'dirs': []}, 'tmpfs_opts': {}, 'selinux_opts': {}}
try:
execfile(config_path)
except Exception, ex:
log.error("Could not read the contents of '%s': %s" % (config_path, str(ex)))
exit(2)
basedir = ('basedir' in config_opts and config_opts['basedir']) or '/var/lib/abf/mock-urpm'
root = config_opts['root']
resultsdir = ('resultdir' in config_opts and config_opts['resultdir']) or '%s/%s/result' % (basedir, root)
src_dir = basedir + '/src'
if os.path.exists(src_dir):
shutil.rmtree(src_dir)
src = get_root_git_dir()
@ -370,7 +413,7 @@ def run_mock_urpm(binary=True):
exit(1)
finally:
shutil.rmtree(src_dir)
srpm_path = glob(os.path.join(resultsdir, '*.src.rpm'))
if len (srpm_path) != 1:
log.error('Could not find a single src.rpm file in %s' % resultsdir)
@ -380,7 +423,7 @@ def run_mock_urpm(binary=True):
if os.path.exists(srpm_path_new):
os.remove(srpm_path_new)
shutil.move(srpm_path, os.getcwd())
log.info('\nSRPM: %s\n' % srpm_path_new)
if binary:
cmd = ['mock-urpm', '-r', command_line.config, '--configdir', configs_dir, srpm_path_new]
@ -397,18 +440,18 @@ def run_mock_urpm(binary=True):
os.remove(new_path)
shutil.move(rpm, os.getcwd())
print('RPM: ' + os.path.join(os.getcwd(), os.path.basename(rpm)))
def localbuild_mock_urpm():
def localbuild_mock_urpm():
# get project
proj = get_project(models, must_exist=True)
find_spec_problems()
try:
run_mock_urpm(binary=True)
except OSError, ex:
log.error(str(ex))
exit(1)
def alias():
log.debug('ALIAS started')
if command_line.command == 'list':
@ -445,9 +488,9 @@ def alias():
exit(1)
cfg['alias'].pop(al_name)
log.info('Done')
def localbuild_rpmbuild():
log.debug('RPMBUILD started')
src_dir = '/tmp/abf/rpmbuild'
@ -460,7 +503,7 @@ def localbuild_rpmbuild():
cmd.append('-v')
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
shutil.copytree(src, src_dir, symlinks=True)
spec_path = find_spec(src_dir)
if not spec_path:
log.error('Can not locate a spec file in %s' % src_dir)
@ -491,7 +534,7 @@ def localbuild_rpmbuild():
log.info('SRPM: ' + new_ff)
else:
log.info('RPM: ' + new_ff)
shutil.rmtree(src_dir)
def help():
@ -500,7 +543,7 @@ def help():
else:
sys.argv = [sys.argv[0], '-h']
parse_command_line()
def search():
log.debug('SEARCH started')
st = command_line.type
@ -509,7 +552,7 @@ def search():
items = cl[st].search(models, sq)
for item in items:
print item
def get_project_name_only(must_exist=True, name=None):
if name:
tmp = name.split('/')
@ -522,7 +565,7 @@ def get_project_name_only(must_exist=True, name=None):
owner_name = default_group
else: # len == 2
owner_name = tmp[0]
project_name = tmp[1]
project_name = tmp[1]
else:
owner_name, project_name = get_project_name()
if not project_name:
@ -535,7 +578,7 @@ def get_project_name_only(must_exist=True, name=None):
return (owner_name, project_name)
def get_project(models, must_exist=True, name=None):
owner_name, project_name = get_project_name_only(must_exist, name)
try:
proj = Project.get_by_name(models, '%s/%s' % (owner_name, project_name))
@ -545,11 +588,11 @@ def get_project(models, must_exist=True, name=None):
except ForbiddenError:
log.error('You do not have acces to the project %s/%s!' % (owner_name, project_name))
exit(1)
log.debug('Project: %s' % proj)
return proj
def get():
log.debug('GET started')
proj = command_line.project
@ -562,7 +605,7 @@ def get():
proj = '%s/%s' % (cfg['user']['default_group'], proj)
elif len(tmp) == 2:
project_name = tmp[1]
uri = "%s/%s.git" % (cfg['user']['git_uri'], proj)
cmd = ['git', 'clone', uri]
if command_line.branch:
@ -582,15 +625,15 @@ def get():
def put():
log.debug('PUT started')
path = get_root_git_dir()
yaml_path = os.path.join(path, '.abf.yml')
if not path:
log.error("You have to be in a git repository directory")
exit(1)
_update_location()
try:
min_size = human2bytes(command_line.minimal_file_size)
except ValueError, ex:
@ -600,20 +643,20 @@ def put():
if error_count:
log.info('There were errors while uploading, stopping.')
exit(1)
if not command_line.message:
return
cmd = ['git', 'add', '--all']
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
if os.path.isfile(yaml_path):
cmd = ['git', 'add', '-f', yaml_path]
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
cmd = ['git', 'commit', '-m', command_line.message]
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
log.info('Commited.')
cmd = ['git', 'push']
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
@ -645,10 +688,10 @@ def store():
if not os.path.isfile(p):
log.error('"%s" is not a regular file!' % p)
exit(1)
res = models.jsn.upload_file(p, silent=True)
print res
def copy():
log.debug('COPY started')
sbrn = command_line.src_branch
@ -661,14 +704,14 @@ def copy():
dbrn = command_line.dst_branch
else:
dbrn = start_branch
if sbrn == dbrn:
log.error("Source and destination branches shold be different branches!")
exit(1)
path = get_root_git_dir()
log.debug("Repository root folder is " + path)
_update_location(path=path)
stage = 0
@ -692,7 +735,7 @@ def copy():
log.error(str(ex))
else:
log.exception(ex)
if stage == 1 or stage == 2:
log.info("Checking out the initial branch (%s)" % start_branch)
cmd = ['git', 'reset', '--hard', start_branch]
@ -856,27 +899,27 @@ def remove_project_from_repository():
def build():
log.debug('BUILD started')
if command_line.project and not (command_line.branch or command_line.tag or command_line.commit):
log.error("You've specified a project name without branch, tag or commit (-b, -t or -c)")
exit(1)
tag_def = bool(command_line.tag)
branch_def = bool(command_line.branch)
commit_def = bool(command_line.commit)
tmp = tag_def + branch_def + commit_def
if tmp > 1:
log.error("You should specify ONLY ONE of the following options: branch, tag or commit.")
exit(1)
IDs = {
'arches':[],
'version':None,
'target_platform':None,
'repositories':[],
}
# get project
proj = get_project(models, must_exist=True, name=command_line.project)
if not command_line.project and not command_line.skip_spec_check: # local git repository
@ -884,8 +927,8 @@ def build():
if not proj.is_package:
log.error('The project %s is not a package and can not be built.' % proj)
exit(1)
# try to automatically resolve all the options, discarding all the other options except --branch
# If we couldn't - use only user-given options. If we could, but user specified other parameters -
# reject averything we've resolved and use only user-given options.
@ -951,13 +994,13 @@ def build():
log.debug('Aitoresolved options were rejected.')
log.debug('Git commit hash: %s' % commit_hash)
# get save-to repository
save_to_repository = None
build_for_platform = None
available_repos = proj.repositories
if command_line.save_to_repository:
items = command_line.save_to_repository.split('/')
else:
@ -979,8 +1022,8 @@ def build():
else:
log.error("save-to-repository option format: [platform/]repository")
exit(1)
if (as_saveto and as_saveto in available_repos and as_saveto.platform.name == pl_name
if (as_saveto and as_saveto in available_repos and as_saveto.platform.name == pl_name
and as_saveto.name == repo_name):
save_to_repository = as_saveto
else:
@ -992,19 +1035,19 @@ def build():
if not build_for_platform:
log.error("Can not build for platform %s. Select one of the following:\n%s" % (pl_name, ', '.join(pls)))
exit(1)
for repo in build_for_platform.repositories:
if repo.name == repo_name:
save_to_repository = repo
break
if not save_to_repository:
log.error("Incorrect save-to repository %s/%s.\nSelect one of the following:\n%s" % (pl_name, repo_name,
log.error("Incorrect save-to repository %s/%s.\nSelect one of the following:\n%s" % (pl_name, repo_name,
', '.join([str(x) for x in build_for_platform.repositories])))
exit(1)
log.debug('Save-to repository: ' + str(save_to_repository))
# get the list of build repositories
build_platforms = Platform.get_build_platforms(models)
build_platform_names = [x.name for x in build_platforms]
@ -1017,12 +1060,12 @@ def build():
pl_name = items[0]
elif len(items) == 1:
repo_name = items[0]
pl_name = default_build_platform
pl_name = default_build_platform
log.debug("Platform for selected repository %s is assumed to be %s" % (repo_name, pl_name))
else:
log.error("'repository' option format: [platform/]repository")
exit(1)
if pl_name not in build_platform_names:
log.error("Can not use build repositories from platform %s!\nSelect one of the following:\n%s" % (pl_name,
', '.join(build_platform_names)))
@ -1043,7 +1086,7 @@ def build():
build_repositories.append(build_repo)
else:
build_platform = save_to_repository.platform
if build_platform.name not in build_platform_names or not build_platform.repositories:
log.error("Could not resolve repositories to build with. Please specify it (-r option)")
exit(1)
@ -1056,7 +1099,7 @@ def build():
if not build_repositories:
log.error("You have to specify the repository(s) to build with (-r option)")
exit(1)
log.debug("Build repositories: " + str(build_repositories))
# get architectures
@ -1106,7 +1149,7 @@ def build():
ids = ','.join([str(i) for i in build_ids])
projects_cfg['main']['last_build_ids'] = ids
projects_cfg[str(proj)]['last_build_ids'] = ids
def publish():
log.debug('PUBLISH started')
for task_id in command_line.task_ids:
@ -1129,7 +1172,7 @@ def _print_build_status(models, ID):
if command_line.short:
print repr(bl)
else:
print '%-20s%s' %('Buildlist ID:', bl.id)
print '%-20s%s' %('Buildlist ID:', bl.id)
# print '%-20s%s' %('Owner:', bl.owner.uname)
print '%-20s%s' %('Project:', bl.project.fullname)
print '%-20s%s' %('Status:', bl.status_string)
@ -1144,7 +1187,7 @@ def _print_build_status(models, ID):
if bl.chroot_tree:
print '%-20s%s' %('Chroot Tree:', bl.chroot_tree)
print ''
def status():
log.debug('STATUS started')
ids = []
@ -1168,9 +1211,9 @@ def status():
log.error('"%s" is not a number' % i)
continue
_print_build_status(models, i)
def _update_location(path=None, silent=True):
try:
if not path:
@ -1193,22 +1236,22 @@ def _update_location_recursive(path):
if '.git' in items: # it's a git directory!
_update_location(path, silent=False)
return
for item in items:
item_path = os.path.join(path, item)
if not os.path.isdir(item_path) or os.path.islink(item_path):
continue
_update_location_recursive(item_path)
def locate():
log.debug('LOCATE started')
if not command_line.action: # show location
if not command_line.project:
print "To show a project location, you have to specify a project name ('-p' option)"
return
tmp = command_line.project.split('/')
if len(tmp) > 2:
log.error('error: the project format is "[owner_name/]project_name"')
@ -1217,7 +1260,7 @@ def locate():
proj = '%s/%s' % (default_group, tmp[0])
else: # len == 2
proj = command_line.project
if proj not in projects_cfg or 'location' not in projects_cfg[proj] or not projects_cfg[proj]['location']:
print 'error: project %s can not be located' % proj
exit(1)
@ -1235,7 +1278,7 @@ def locate():
elif command_line.action == 'update-recursive':
path = command_line.directory or os.getcwd()
_update_location_recursive(path)
def show():
log.debug('SHOW started')
Log.set_silent()
@ -1251,7 +1294,7 @@ def show():
for repo in plat.repositories:
repo_names.append(str(repo))
out = (t == 'build-platforms' and platform_names) or (t == 'build-repos' and repo_names)
if t in ['save-to-platforms', 'save-to-repos']:
proj = get_project(models, must_exist=True, name=command_line.project)
repos = proj.repositories
@ -1263,14 +1306,14 @@ def show():
platform_names = list(set(platform_names))
out = (t == 'save-to-platforms' and platform_names) or (t == 'save-to-repos' and repo_names)
print ' '.join(out)
def clean():
log.debug("CLEAN started")
_update_location()
find_spec_problems(auto_remove=command_line.auto_remove)
if __name__ == '__main__':
apply_aliases()

View file

@ -36,7 +36,7 @@ def bytes2human(n, format='%(value).1f%(symbol)s', symbols='basic'):
prefix = {}
for i, s in enumerate(symbols[1:]):
prefix[s] = 1 << (i+1)*10
for symbol in reversed(symbols[1:]):
if n >= prefix[symbol]:
value = float(n) / prefix[symbol]
@ -50,25 +50,25 @@ class AbfJson(object):
self.password = password
self.abf_url = re.compile('/+$').sub('', abf_url)
self.file_store_url = re.compile('/+$').sub('', file_store_url)
if not self.file_store_url.startswith('http://'):
log.error('File-store url have to start with "http://"')
exit(1)
self.file_store_domain = self.file_store_url[7:]
#does not work sometimes
'''self.password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
self.password_manager.add_password(None, abf_url, login, password)
self.auth = urllib2.HTTPBasicAuthHandler(self.password_manager)
self.auth = urllib2.HTTPBasicAuthHandler(self.password_manager)
self.opener = urllib2.build_opener(self.auth)
urllib2.install_opener(self.opener)
'''
# but it works!
self.base64_auth_string = base64.standard_b64encode('%s:%s' % (login, password)).replace('\n', '')
self.log = log
errors = {
"Invalid email or password.": AuthError,
"403 Forbidden | Rate Limit Exceeded": RateLimitError,
@ -81,10 +81,10 @@ class AbfJson(object):
"Access violation to this page!": ForbiddenError,
"Bad Request": BadRequestError,}
good_messages = ['Errors during build publishing!', 'Build is queued for publishing']
# in case of this error, get_url_contents will print the error message and exit
fatal_errors = [AuthError, RateLimitError, InternalServerError, ServerWorksError]
def process_response(self, response_sting):
try:
res = json.loads(response_sting)
@ -110,12 +110,12 @@ class AbfJson(object):
log.error('Sorry, but something went wrong and request I\'ve sent to ABF is bad. Please, '
'notify developers, send them a set of command-line arguments and the request data:\n%s\n%s' % (URL, post_json or "No POST DATA") )
exit(1)
if exception in AbfJson.fatal_errors:
exit(2)
raise exception(m)
return res
def get_url_contents(self, path, GET=None, POST=None, file_store=False, PUT=None, DELETE=None):
url = ((file_store and self.file_store_url) or self.abf_url) + path
if GET:
@ -211,7 +211,7 @@ class AbfJson(object):
fobj = open(value, 'rb')
datablock = 1
while datablock:
datablock = fobj.read(AbfJson.BLOCK_SIZE)
@ -222,8 +222,8 @@ class AbfJson(object):
body.write('\r\n')
body.write('--%s--\r\n' % boundary)
body.write('--%s--\r\n' % boundary)
def compute_sha1(self, file_name):
fd = open(file_name, 'rb')
datablock = 1
@ -234,12 +234,12 @@ class AbfJson(object):
s.update(datablock)
hex_sha = s.hexdigest()
return hex_sha
def upload_file(self, file_name, silent=False):
self.log.debug('Looking for "%s" in file-store...' % file_name)
sha_hash = self.compute_sha1(file_name)
self.log.debug('File hash is %s' % sha_hash)
res = self.get_file_info_by_hash(sha_hash)
if res:
fn = res[0]['file_name']
@ -251,11 +251,11 @@ class AbfJson(object):
if fn != new_fn and not silent:
self.log.warning('The name of the file in file-store is %s, but you are trying to upload file %s' % (fn, new_fn))
return sha_hash
tempfile.tempdir = '/tmp'
boundary = uuid.uuid4().hex
body = tempfile.SpooledTemporaryFile(max_size = AbfJson.MAX_SIZE)
self.__encode_multipart_formdata(body, boundary,[], [('file_store[file]', file_name)])
length = body.tell()
@ -267,18 +267,18 @@ class AbfJson(object):
headers = {'Content-Type' : content_type, 'Content-Length' : length, "Authorization": "Basic %s" % self.base64_auth_string}
conn.request('POST', '/api/v1/upload', body, headers)
body.close()
resp = conn.getresponse()
output = resp.read()
conn.close()
if resp.status < 200 or resp.status > 299:
self.log.error("Could not upload file. HTTP error %s %s" % (resp.status, resp.reason))
exit(1)
output = json.loads(output)
return output['sha1_hash'] or None
def fetch_file(self, sha_hash, path):
URL = self.file_store_url + '/api/v1/file_stores/' + sha_hash
try:
@ -291,7 +291,7 @@ class AbfJson(object):
fd = open(path, 'wb')
shutil.copyfileobj(response, fd)
fd.close()
def get_file_info_by_hash(self, sha_hash):
URL = "/api/v1/file_stores.json"
GET = {'hash': sha_hash}
@ -300,48 +300,48 @@ class AbfJson(object):
def get_architectures(self):
URL = "/api/v1/arches.json"
return self.get_url_contents(URL)
def get_platforms(self, typ=None):
URL = "/api/v1/platforms.json"
GET = {}
if typ:
GET['type'] = typ
return self.get_url_contents(URL, GET)
def get_platform_by_id(self, pl_id):
pl_id = int(pl_id)
URL = "/api/v1/platforms/%d.json" % pl_id
return self.get_url_contents(URL)
def get_build_platforms(self):
URL = "/api/v1/platforms/platforms_for_build.json"
URL = "/api/v1/platforms/platforms_for_build.json"
return self.get_url_contents(URL)
def get_repository_by_id(self, rep_id):
rep_id = int(rep_id)
URL = "/api/v1/repositories/%d.json" % rep_id
return self.get_url_contents(URL)
def get_buildlist_by_id(self, bl_id):
bl_id = int(bl_id)
URL = "/api/v1/build_lists/%d.json" % bl_id
return self.get_url_contents(URL)
def get_project_by_id(self, p_id):
p_id = int(p_id)
URL = "/api/v1/projects/%d.json" % p_id
return self.get_url_contents(URL)
def get_project_id_by_name(self, key):
proj_owner, proj_name = key
URL = "/api/v1/projects/get_id.json"
GET = {'name': proj_name, 'owner':proj_owner}
return self.get_url_contents(URL, GET)
def new_build_task(self, data):
URL = "/api/v1/build_lists.json"
return self.get_url_contents(URL, GET=None, POST=data)
def publish(self, task_id):
task_id = int(task_id)
URL = "/api/v1/build_lists/%d/publish.json" % task_id
@ -371,18 +371,29 @@ class AbfJson(object):
proj_id = int(proj_id)
URL = "/api/v1/projects/%d/refs_list.json" % proj_id
return self.get_url_contents(URL)
def get_user_by_id(self, user_id):
user_id = int(user_id)
URL = "/api/v1/users/%d.json" % user_id
return self.get_url_contents(URL)
def get_group_by_id(self, group_id):
group_id = int(group_id)
URL = "/api/v1/groups/%d.json" % group_id
return self.get_url_contents(URL)
def get_search_results(self, search_type, query):
URL = "/api/v1/search.json"
GET = {'type': search_type, 'query': query, 'per_page': 100}
return self.get_url_contents(URL, GET=GET)
def get_list(self, list_type, page):
URL = "/api/v1/" + list_type +".json"
GET = {'page': page, 'per_page': 100 }
return self.get_url_contents(URL, GET=GET)
def get_projects_single(self, repo_id, page):
URL = "/api/v1/repositories/%d/projects.json" % repo_id
GET = {'page': page, 'per_page': 100 }
return self.get_url_contents(URL, GET=GET)

View file

@ -28,7 +28,7 @@ def get_cached(cache, cache_key, func, *args, **kwargs):
return val
class Model(object):
def __init__(self, models, ID=None, init_data=None):
self.params_dict = {}
@ -40,34 +40,34 @@ class Model(object):
if not ID and not init_data:
raise Exception("At least one of 'ID' or 'init_data' parameters have to be set! ")
if init_data and 'id' not in init_data:
raise Exception("Key 'id' have to present in initial data!")
if ID:
cache_key = '%s-%s' % (self.__class__.__name__, ID)
if st_cache and st_cache.has_key(cache_key):
#read cached value
log.debug( 'Loading %s %s from cache' % (self.__class__.__name__, ID))
self.stub = False
self.init_data = st_cache.get(cache_key)
self.load()
else:
log.debug('Loading %s %s using API' % (self.__class__.__name__, ID))
self.stub = False
self.get_init_data(ID)
self.load()
#write to cache
if self.cacher:
self.cacher.put(cache_key, self.init_data)
log.debug('Results were cached')
for field in self.__class__.required_fields:
if field not in self.params_dict:
raise Exception("One of the fields required for %s model was not specified: %s" %
raise Exception("One of the fields required for %s model was not specified: %s" %
(self.__class__.__name__, field))
else:
log.debug('Creating a stub for %s %s' % (self.__class__.__name__, self.init_data['id']))
@ -89,25 +89,26 @@ class Model(object):
if key in self.params_dict:
return self.params_dict[key]
raise KeyError("Key '%s' can not be found!" % key)
def __eq__(self, other):
return self.id == other.id
def __ne__(self, other):
return self.id != other.id
return self.id != other.id
class Platform(Model):
required_fields = ['id', 'name', 'description', 'parent_platform_id', 'created_at', 'updated_at', 'released',
required_fields = ['id', 'name', 'description', 'parent_platform_id', 'created_at', 'updated_at', 'released',
'owner', 'visibility', 'platform_type', 'distrib_type', 'repositories']
filter_dict = { 'id': '*', 'name': '*', 'visibility': '*', 'owner': '*', 'platform_type': '*', 'repositories': '*', 'page': '1' }
def get_init_data(self, ID):
ID = str(ID)
log.debug('Reading platform ' + ID)
self.init_data = self.models.jsn.get_platform_by_id(ID)
self.init_data = self.init_data['platform']
def load(self):
def load(self):
self.params_dict = self.init_data.copy()
if 'repositories' in self.init_data:
repos = self.params_dict['repositories']
@ -124,11 +125,11 @@ class Platform(Model):
if 'updated_at' in self.init_data:
self.params_dict['updated_at'] = datetime.fromtimestamp(float(self.init_data['updated_at']))
self.cacher = lt_cache
def __repr__(self):
return self.name
@staticmethod
@staticmethod
def _get_platforms_filtered(models, typ):
cache_key = '%s__%s' % (Platform.__name__, typ)
platforms = get_cached(st_cache, cache_key, models.jsn.get_platforms, typ=typ)['platforms']
@ -137,15 +138,15 @@ class Platform(Model):
p = Platform(models, init_data = pl)
output.append(p)
return output
@staticmethod
def get_user_platforms_main(models):
return Platform._get_platforms_filtered(models, 'main')
@staticmethod
def get_user_platforms_personal(models):
return Platform._get_platforms_filtered(models, 'personal')
@staticmethod
def get_build_platforms(models):
cache_key = '%s__buil' % (Platform.__name__)
@ -155,7 +156,7 @@ class Platform(Model):
p = Platform(models, init_data=pl)
output.append(p)
return output
@staticmethod
def search(models, query):
res = models.jsn.get_search_results('platforms', query)
@ -165,17 +166,40 @@ class Platform(Model):
p = Platform(models, init_data=platform)
platforms_out.append(p)
return platforms_out
@staticmethod
def info(models):
if Platform.filter_dict['page'] == '*':
num = 1
while 1:
res = models.jsn.get_list('platforms', num)
if not res['platforms']:
break
platforms += res['platforms']
num += 1
else:
res = models.jsn.get_list('platforms', Platform.filter_dict['page'])
platforms = res['platforms']
platforms = res['platforms']
platforms_out = []
for platform in platforms:
p = Platform(models, init_data=platform)
platforms_out.append(p)
for value in Platform.filter_dict:
if Platform.filter_dict[value] != '*' and value != 'page':
platforms_out = [i for i in platforms_out if str(Platform.filter_dict[value]) in str(i.params_dict[value]) ]
return platforms_out
class Repository(Model):
required_fields = ['id', 'name', 'created_at', 'updated_at', 'description', 'publish_without_qa', 'platform']
filter_dict = { 'id': '*', 'name': '*', 'page': '1' }
def get_init_data(self, ID):
ID = str(ID)
log.debug('Reading repository ' + str(ID))
self.init_data = self.models.jsn.get_repository_by_id(ID)
self.init_data = self.init_data['repository']
def load(self):
self.params_dict = self.init_data.copy()
if 'platform' in self.init_data:
@ -188,7 +212,20 @@ class Repository(Model):
def __repr__(self):
return '%s/%s' % (self.platform.name, self.name)
@staticmethod
def info(models):
platform_info = Platform.info(models)
repo_info = []
for platform in platform_info:
repos = platform.params_dict['repositories']
for repo in repos:
repo_fin = Repository(models, repo.id)
repo_info.append(repo_fin)
for value in Repository.filter_dict:
if Repository.filter_dict[value] != '*' and value != 'page':
repo_info = [i for i in repo_info if str(Repository.filter_dict[value]) in str(i.params_dict[value]) ]
return repo_info
class Arch(Model):
@ -204,11 +241,11 @@ class Arch(Model):
self.init_data = arch
if not self.init_data:
raise Exception("Architecture with id %s not found!" % ID)
def load(self):
self.params_dict = self.init_data
self.cacher = lt_cache
@staticmethod
def get_arches(models):
cache_key = '%s__all' % (Arch.__name__)
@ -218,7 +255,7 @@ class Arch(Model):
a = Arch(models, init_data=arch)
output.append(a)
return output
@staticmethod
def get_arch_by_name(models, name):
cache_key = '%s__all' % (Arch.__name__)
@ -228,10 +265,10 @@ class Arch(Model):
a = Arch(models, init_data=arch)
return a
return None
def __repr__(self):
return self.name
class User(Model):
required_fields = ['id', 'name', 'email', 'language', 'professional_experience', 'site', 'company', 'location',
'uname', 'own_projects_count', 'build_priority', 'created_at', 'updated_at', 'avatar_url', 'html_url']
@ -241,17 +278,17 @@ class User(Model):
log.debug('Reading user ' + ID)
self.init_data = self.models.jsn.get_user_by_id(ID)
self.init_data = self.init_data['user']
def load(self):
def load(self):
self.params_dict = self.init_data.copy()
if 'created_at' in self.init_data:
self.params_dict['created_at'] = datetime.fromtimestamp(float(self.init_data['created_at']))
if 'updated_at' in self.init_data:
self.params_dict['updated_at'] = datetime.fromtimestamp(float(self.init_data['updated_at']))
self.cacher = lt_cache
@staticmethod
def search(models, query):
res = models.jsn.get_search_results('users', query)
@ -261,10 +298,10 @@ class User(Model):
u = User(models, init_data=user)
users_out.append(u)
return users_out
def __repr__(self):
return self.uname
class Group(Model):
required_fields = ['id', 'uname', 'own_projects_count', 'created_at', 'updated_at', 'description', 'owner',
'avatar_url', 'html_url']
@ -274,20 +311,20 @@ class Group(Model):
log.debug('Reading group ' + ID)
self.init_data = self.models.jsn.get_group_by_id(ID)
self.init_data = self.init_data['group']
def load(self):
def load(self):
self.params_dict = self.init_data.copy()
if 'created_at' in self.init_data:
self.params_dict['created_at'] = datetime.fromtimestamp(float(self.init_data['created_at']))
if 'updated_at' in self.init_data:
self.params_dict['updated_at'] = datetime.fromtimestamp(float(self.init_data['updated_at']))
if 'owner' in self.params_dict:
self.params_dict['owner'] = User(self.models, init_data=self.params_dict['owner'])
self.cacher = lt_cache
@staticmethod
def search(models, query):
res = models.jsn.get_search_results('groups', query)
@ -297,20 +334,22 @@ class Group(Model):
g = Group(models, init_data=group)
groups_out.append(g)
return groups_out
def __repr__(self):
return self.uname
class Project(Model):
required_fields = ['id', 'name', 'created_at', 'updated_at', 'visibility', 'description', 'ancestry', 'has_issues',
required_fields = ['id', 'name', 'created_at', 'updated_at', 'visibility', 'description', 'ancestry', 'has_issues',
'has_wiki', 'default_branch', 'is_package', 'owner', 'repositories', 'owner_type']
filter_dict = { 'id': '*', 'name': '*', 'page': '1' }
def get_init_data(self, proj_id):
log.debug("Reading project " + str(proj_id))
self.init_data = self.models.jsn.get_project_by_id(proj_id)
self.init_data = self.init_data['project']
@staticmethod
def get_by_name(models, key):
''' key is a pair (owner_name, project_name), or just owner_name/project_name'''
@ -322,7 +361,7 @@ class Project(Model):
res = models.jsn.get_project_id_by_name(key)
proj_id = res['project']['id']
return Project(models, ID=proj_id)
def load(self):
self.params_dict = self.init_data.copy()
if 'repositories' in self.init_data:
@ -337,14 +376,14 @@ class Project(Model):
self.params_dict['owner'] = User(self.models, init_data=self.init_data['owner'])
elif self.params_dict['owner_type'] == 'Group':
self.params_dict['owner'] = Group(self.models, init_data=self.init_data['owner'])
if 'created_at' in self.init_data:
self.params_dict['created_at'] = datetime.fromtimestamp(float(self.init_data['created_at']))
if 'updated_at' in self.init_data:
self.params_dict['updated_at'] = datetime.fromtimestamp(float(self.init_data['updated_at']))
self.cacher = st_cache
@staticmethod
def search(models, query):
res = models.jsn.get_search_results('projects', query)
@ -354,24 +393,48 @@ class Project(Model):
pr = Project(models, init_data=proj)
projects_out.append(pr)
return projects_out
def __repr__(self):
if 'owner' in self.params_dict:
return '%s/%s' % (self.owner.uname, self.name)
return '%s/%s' % (self.owner.uname, self.name)
else:
return self.fullname
def get_refs_list(self, models):
return self.models.jsn.get_git_refs_list(self.id)['refs_list']
@staticmethod
def info(models):
repo_info = Repository.info(models)
projects_info = []
projs = []
for repo in repo_info:
if Project.filter_dict['page'] == '*':
num = 1
while 1:
p = models.jsn.get_projects_single(repo.id, num)
if not p['repository']['projects']:
break
projs += p['repository']['projects']
num += 1
else:
p = models.jsn.get_projects_single(repo.id, Project.filter_dict['page'])
projs = p['repository']['projects']
for proj in projs:
pr = Project(models, init_data=proj)
projects_info.append(pr)
for value in Project.filter_dict:
if Project.filter_dict[value] != '*' and value != 'page':
projects_info = [i for i in projects_info if str(Project.filter_dict[value]) in str(i.params_dict[value]) ]
return projects_info
class BuildList(Model):
required_fields = ['id', 'container_path', 'status', 'status_string', 'package_version', 'project', 'created_at', 'updated_at',
# 'build_for_platform', 'save_to_repository', 'arch', 'update_type', 'auto_publish', 'extra_repositories',
'build_for_platform', 'save_to_repository', 'arch', 'update_type', 'extra_repositories',
'commit_hash', 'duration', 'include_repos', 'priority', 'build_log_url', 'advisory', 'mass_build', 'log_url', 'chroot_tree']
# 'commit_hash', 'duration', 'owner', 'owner_type', 'include_repos', 'priority', 'build_log_url', 'advisory', 'mass_build']
status_by_id = {
0: 'build complete',
1: 'platform not found',
@ -395,15 +458,15 @@ class BuildList(Model):
}
status_by_name = dict([(status_by_id[x], x) for x in status_by_id])
final_statuses = [1, 2, 3, 4, 666, 5000, 6000, 8000, 9000, 12000, 14000]
def get_init_data(self, ID):
ID = str(ID)
log.debug('Reading buildlist ' + str(ID))
self.init_data = self.models.jsn.get_buildlist_by_id(ID)
self.init_data = self.init_data['build_list']
def load(self):
def load(self):
self.params_dict = self.init_data.copy()
self.params_dict['project'] = Project(self.models, init_data=self.params_dict['project'])
@ -457,7 +520,7 @@ class BuildList(Model):
DATA = {
'project_id': project.id,
'commit_hash': commit_hash,
'update_type': update_type,
'update_type': update_type,
'save_to_repository_id': save_to_repository.id,
'build_for_platform_id': None,
'auto_publish_status': auto_publish_status,
@ -478,7 +541,7 @@ class BuildList(Model):
build_platforms[repo.platform.name]['repositories'] = []
build_platforms[repo.platform.name]['id'] = repo.platform.id
build_platforms[repo.platform.name]['repositories'].append(repo.id)
build_ids = []
build_ids = []
for bpl in build_platforms:
DATA['build_for_platform_id'] = build_platforms[bpl]['id']
DATA['include_repos'] = build_platforms[bpl]['repositories']
@ -493,11 +556,11 @@ class BuildList(Model):
log.error('Sorry, but something went wrong and request I\'ve sent to ABF is bad. Please, '
'notify the console-client developers. Send them a set of command-line arguments and the request data:\n%s' % DATA )
exit(1)
log.info("Task %s|%s|%s|%s has been sent. Build task id is %s" %
log.info("Task %s|%s|%s|%s has been sent. Build task id is %s" %
(project, bpl, save_to_repository, arch, result['build_list']['id']))
build_ids.append(result['build_list']['id'])
return build_ids
def publish(self):
try:
log.info("Publishing the project %s..." % self.id)
@ -641,11 +704,11 @@ class Models(object):
cls._instance[tmp] = super(Models, cls).__new__(
cls, abf_url, file_store_url, login, password, *args, **kwargs)
return cls._instance[tmp]
#properties_by_class = {Platform: 'platforms', Repository: 'repositories', Arch: 'arches',
# BuildList: 'buildlists', Project: 'projects'}
#properties_by_name = dict([(properties_by_class[x], x) for x in properties_by_class])
def __init__(self, abf_url, file_store_url, login, password):
log.debug('Initializing models for ' + abf_url)
self.abf_url = abf_url
@ -653,12 +716,12 @@ class Models(object):
self.login = login
self.password = password
self.jsn = AbfJson(abf_url, file_store_url, login, password, log)
@staticmethod
def clear_cache():
lt_cache.clear()
st_cache.clear()