Merge python3 branch from OpenMandriva

This commit is contained in:
Mikhail Novosyolov 2019-04-15 18:28:34 +03:00
commit 21a490a2d8
10 changed files with 207 additions and 177 deletions

1
.gitignore vendored Normal file
View file

@ -0,0 +1 @@
tmp

View file

@ -5,17 +5,24 @@
#############################################################################
PYTHON=python
PYVER := $(shell $(PYTHON) -c 'import sys; print "%.3s" %(sys.version)')
PYSYSDIR := $(shell $(PYTHON) -c 'import sys; print sys.prefix')
PYTHON=python3
PYVER := $(shell $(PYTHON) -c 'import sys; print("%.3s" %(sys.version))')
PYSYSDIR := $(shell $(PYTHON) -c 'import sys; print(sys.prefix)')
PYLIBDIR = $(PYSYSDIR)/lib/python$(PYVER)
PKGDIR = $(PYLIBDIR)/site-packages
BINDIR=/usr/bin
PREFIX=/usr
BINDIR=$(PREFIX)/bin
ETCDIR=/etc
MANDIR=/usr/share/man
USRSHAREDIR=/usr/share
MANDIR=$(PREFIX)/share/man
DATADIR=$(PREFIX)/share
######### default config #############
MOCK = mock
default_url = https://abf.openmandriva.org
default_filestore_url = http://file-store.openmandriva.org
def_bp = cooker
######### /default config ############
FILES = abf/console/*.py abf/*.py abf/api/*.py
@ -28,19 +35,27 @@ clean:
install:
mkdir -p $(DESTDIR)$(PKGDIR) $(DESTDIR)$(BINDIR) $(DESTDIR)$(MANDIR)/man1
cp -p --parents $(FILES) $(DESTDIR)$(PKGDIR)
cp -p "abf.py" $(DESTDIR)$(BINDIR)/abf
install -m0755 abf.py $(DESTDIR)$(BINDIR)/abf
mkdir -p $(DESTDIR)$(USRSHAREDIR)/bash-completion
# set default config values
sed -i -e "s,https://abf.openmandriva.org,$(default_url),g" \
-e "s,http://file-store.openmandriva.org,$(default_filestore_url),g" \
-e "s,cooker,$(def_bp),g" \
$(DESTDIR)$(PKGDIR)/abf/console/config.py
# TODO: set mock as a varibale at runtime
sed -i -e "s,mock_urpm,m0ck_urpm,g" $(DESTDIR)$(BINDIR)/abf
sed -i -e "s,mock,$(MOCK),g" $(DESTDIR)$(BINDIR)/abf
sed -i -e "s,m0ck_urpm,mock_urpm,g" $(DESTDIR)$(BINDIR)/abf
mkdir -p $(DESTDIR)$(DATADIR)/bash-completion
mkdir -p $(DESTDIR)$(ETCDIR)/bash_completion.d
mkdir -p $(DESTDIR)$(ETCDIR)/profile.d
cp "bash_autocomplete" $(DESTDIR)$(USRSHAREDIR)/bash-completion/abf
cp "bash_autocomplete" $(DESTDIR)$(DATADIR)/bash-completion/abf
cp "abfcd.sh" $(DESTDIR)$(ETCDIR)/profile.d/abfcd.sh
mkdir -p $(DESTDIR)$(ETCDIR)/abf/mock-urpm/configs/
cp configs/* $(DESTDIR)$(ETCDIR)/abf/mock-urpm/configs/
mkdir -p $(DESTDIR)/var/cache/abf/mock-urpm
mkdir -p $(DESTDIR)/var/lib/abf/mock-urpm/src
chmod 0777 $(DESTDIR)/var/lib/abf/mock-urpm -R
mkdir -p $(DESTDIR)$(ETCDIR)/abf/$(MOCK)/configs/
cp configs/* $(DESTDIR)$(ETCDIR)/abf/$(MOCK)/configs/
mkdir -p $(DESTDIR)/var/cache/abf/$(MOCK)
mkdir -p $(DESTDIR)/var/lib/abf/$(MOCK)/src
chmod 0777 $(DESTDIR)/var/lib/abf/$(MOCK) -R

109
abf.py
View file

@ -1,12 +1,11 @@
#!/usr/bin/python -tt
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
import warnings
import importlib
warnings.filterwarnings('ignore','Module argparse was already imported')
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import argparse
from argparse import RawDescriptionHelpFormatter
import os
@ -26,7 +25,7 @@ from abf.console.log import Log
from abf.model import *
configs_dir = '/etc/abf/mock-urpm/configs/'
configs_dir = '/etc/abf/mock/configs/'
def test():
log.debug(_('TEST started'))
@ -97,7 +96,8 @@ def parse_command_line():
parser.add_argument('-c', '--clear-cache', action='store_true', help=_('clear cached information about repositories, platforms, projects, etc.'))
parser.add_argument('-q', '--quiet', action='store_true', help=_('Do not display info messages'))
parser.add_argument('-C', '--config', action='store', help=_('config file to be used'))
subparsers = parser.add_subparsers(title='command')
subparsers = parser.add_subparsers(title='command', dest='help')
subparsers.required = True
# help
subparser = subparsers.add_parser('help', help=_('show a help for command'))
@ -218,7 +218,7 @@ def parse_command_line():
subparser.add_argument('--testing', action='store_true', help=_('Include "testing" subrepository.'))
subparser.add_argument('--no-extra-tests', action='store_true', help=_('Do not launch comprehensive tests.'))
subparser.add_argument('--auto-create-container', action='store_true', help=_('enable automatic creation of container'))
subparser.add_argument('--cached-chroot', action='store_true', help=_('use cached chroot for the build'))
subparser.add_argument('--no-cached-chroot', action='store_true', help=_('do NOT use cached chroot for the build'))
subparser.add_argument('--save-chroot', action='store_true', help=_('save build chroot in case of failure'))
subparser.add_argument('--update-type', action='store', choices=BuildList.update_types, help=_('Update type. Default is "%s".') %
(BuildList.update_types[0]) )
@ -262,7 +262,7 @@ def parse_command_line():
subparser.add_argument('--testing', action='store_true', help=_('Include "testing" subrepository.'))
subparser.add_argument('--no-extra-tests', action='store_true', help=_('Do not launch comprehensive tests.'))
subparser.add_argument('--auto-create-container', action='store_true', help=_('enable automatic creation of container'))
subparser.add_argument('--cached-chroot', action='store_true', help=_('use cached chroot for the build'))
subparser.add_argument('--no-cached-chroot', action='store_true', help=_('do NOT use cached chroot for the build'))
subparser.add_argument('--save-chroot', action='store_true', help=_('save build chroot in case of failure'))
subparser.add_argument('--update-type', action='store', choices=BuildList.update_types, help=_('Update type. Default is "%s".') %
(BuildList.update_types[0]) )
@ -271,8 +271,8 @@ def parse_command_line():
subparser.add_argument('--skip-proj-cfg-update', action='store_true', help=_('Do not update cache with information about project builds.'))
subparser.set_defaults(func=chain_build)
# mock-urpm
subparser = subparsers.add_parser('mock-urpm', help=_('Build a project locally using mock-urpm.'), epilog=_('No checkouts will be made,'
# mock
subparser = subparsers.add_parser('mock', help=_('Build a project locally using mock.'), epilog=_('No checkouts will be made,'
'the current git repository state will be used'))
subparser.add_argument('-c', '--config', action='store', help=_('A config template to use. Specify one of the config names '
'from %s. Directory path should be omitted. If no config specified, "default.cfg" will be used') % configs_dir)
@ -406,10 +406,10 @@ def info_single():
else:
for param in command_line.filter:
try:
st, param = map(str, param.split('.'))
st, param = list(map(str, param.split('.')))
except:
pass
attr, value = map(str, param.split('='))
attr, value = list(map(str, param.split('=')))
cl[st].filter_dict[attr]=value
log.debug(_('Filter setup for instance %s ') % st)
st = command_line.type
@ -430,13 +430,13 @@ def info_single():
def fix_default_config():
if not os.path.exists('/etc/abf/mock-urpm/configs/default.cfg'):
if not os.path.exists('/etc/abf/mock/configs/default.cfg'):
if os.getuid() != 0:
print(_("To set up a default configuration file, symbolic link in /etc/abf/mock-urpm/configs have to be created. I need sudo rights to do it."))
print((_("To set up a default configuration file, symbolic link in /etc/abf/mock/configs have to be created. I need sudo rights to do it.")))
exit(1)
files = os.listdir('/etc/abf/mock-urpm/configs')
print(_('Avaliable configurations: '))
files = os.listdir('/etc/abf/mock/configs')
print((_('Avaliable configurations: ')))
out = []
for f in files:
if not f.endswith('.cfg'):
@ -449,9 +449,9 @@ def fix_default_config():
res = None
while res not in out:
if res is not None:
print(_('"%s" is not a valid configuration.') % res)
res = raw_input(_('Select one (it will be remembered): '))
os.symlink('/etc/abf/mock-urpm/configs/%s.cfg' % res, '/etc/abf/mock-urpm/configs/default.cfg')
print((_('"%s" is not a valid configuration.') % res))
res = input(_('Select one (it will be remembered): '))
os.symlink('/etc/abf/mock/configs/%s.cfg' % res, '/etc/abf/mock/configs/default.cfg')
def run_mock_urpm(binary=True):
fix_default_config()
@ -463,17 +463,17 @@ def run_mock_urpm(binary=True):
if not os.path.exists(config_path):
log.error(_("Config file %s can not be found.") % config_path)
if os.path.basename(config_path) == 'default.cfg':
log.error(_("You should create this file or a symbolic link to another config in order to execute 'abf mock-urpm' without --config"))
log.error(_("You should create this file or a symbolic link to another config in order to execute 'abf mock' without --config"))
exit(1)
config_opts = {'plugins': [], 'scm_opts': {}}
config_opts['plugin_conf'] = {'ccache_opts': {}, 'root_cache_opts': {}, 'bind_mount_opts': {'dirs': []}, 'tmpfs_opts': {}, 'selinux_opts': {}}
try:
execfile(config_path)
exec(compile(open(config_path).read(), config_path, 'exec'))
except Exception as ex:
log.error(_("Could not read the contents of '%(path)s': %(exception)s") % {'path': config_path, 'exception': str(ex)})
exit(2)
basedir = ('basedir' in config_opts and config_opts['basedir']) or '/var/lib/abf/mock-urpm'
basedir = ('basedir' in config_opts and config_opts['basedir']) or '/var/lib/abf/mock'
root = config_opts['root']
resultsdir = ('resultdir' in config_opts and config_opts['resultdir']) or '%s/%s/result' % (basedir, root)
src_dir = basedir + '/src'
@ -493,14 +493,14 @@ def run_mock_urpm(binary=True):
log.error(_('Can not locate a spec file in %s') % src_dir)
exit(1)
spec_path = os.path.join(src_dir, spec_path)
cmd = ['mock-urpm', '-r', command_line.config, '--buildsrpm', '--spec', spec_path, '--sources', src_dir, '--configdir', configs_dir ]
cmd = ['mock', '-r', command_line.config, '--buildsrpm', '--spec', spec_path, '--sources', src_dir, '--configdir', configs_dir ]
if command_line.verbose:
cmd.append('-v')
log.info(_('Executing mock-urpm...'))
log.info(_('Executing mock...'))
try:
res = execute_command(cmd, print_to_stdout=True, exit_on_error=False, shell=False)
except OSError as ex:
log.error(_("Can not execute mock-urpm (%s). Maybe it is not installed?") % str(ex))
log.error(_("Can not execute mock (%s). Maybe it is not installed?") % str(ex))
exit(1)
finally:
shutil.rmtree(src_dir)
@ -517,10 +517,10 @@ def run_mock_urpm(binary=True):
log.info(_('\nSRPM: %s\n') % srpm_path_new)
if binary:
cmd = ['mock-urpm', '-r', command_line.config, '--configdir', configs_dir, srpm_path_new]
cmd = ['mock', '-r', command_line.config, '--configdir', configs_dir, srpm_path_new]
if command_line.verbose:
cmd.append('-v')
log.info(_('Executing mock-urpm...'))
log.info(_('Executing mock...'))
res = execute_command(cmd, print_to_stdout=True, exit_on_error=False, shell=False)
os.remove(srpm_path)
rpms = glob(os.path.join(resultsdir, '*.rpm'))
@ -530,7 +530,7 @@ def run_mock_urpm(binary=True):
if os.path.exists(new_path):
os.remove(new_path)
shutil.move(rpm, os.getcwd())
print(_('RPM: ' + os.path.join(os.getcwd(), os.path.basename(rpm))))
print((_('RPM: ' + os.path.join(os.getcwd(), os.path.basename(rpm)))))
def localbuild_mock_urpm():
# get project
@ -769,12 +769,11 @@ def get():
exit(1)
elif len(tmp) == 1:
project_name = proj
if 'github.com' not in cfg['user']['git_uri']:
proj = '%s/%s' % (cfg['user']['default_group'], proj)
proj = '%s/%s' % (cfg['user']['default_group'], proj)
elif len(tmp) == 2:
project_name = tmp[1]
uri = "%s/%s.git" % (cfg['user']['git_uri'], proj)
uri = "%s/%s.git" % (cfg['user']['git_uri'], project_name)
cmd = ['git', 'clone', uri]
if command_line.branch:
cmd += ['-b', command_line.branch]
@ -986,11 +985,11 @@ def fork_project():
owner_id = owner_group[0].id
elif owner_user:
# ABF doesn't seem to accept forks to platforms of other users
print(_("No group named '%s', will fork to your personal platform") % target_group)
print((_("No group named '%s', will fork to your personal platform") % target_group))
# owner_id = owner_user[0].id
owner_id = 0
else:
print(_("Incorrect target group"))
print((_("Incorrect target group")))
return 1
ProjectCreator.fork_project(models, source_proj.id, owner_id, target_name)
@ -1015,11 +1014,11 @@ def alias_project():
owner_id = owner_group[0].id
elif owner_user:
# ABF doesn't seem to accept forks to platforms of other users
print(_("No group named '%s', will create alias in your personal platform") % target_group)
print((_("No group named '%s', will create alias in your personal platform") % target_group))
# owner_id = owner_user[0].id
owner_id = 0
else:
print(_("Incorrect target group"))
print((_("Incorrect target group")))
return 1
ProjectCreator.alias_project(models, source_proj.id, owner_id, target_name)
@ -1041,7 +1040,7 @@ def create_empty():
owner_id = owner_user[0].id
owner_type = "User"
else:
print(_("Incorrect owner data"))
print((_("Incorrect owner data")))
return 1
description = ""
@ -1067,7 +1066,7 @@ def create():
owner_id = owner_user[0].id
owner_type = "User"
else:
print(_("Incorrect owner data"))
print((_("Incorrect owner data")))
return 1
name = Popen('rpm -qp --qf="%{NAME}" ' + command_line.srpm, stdout=PIPE, shell=True).stdout.read()
@ -1105,7 +1104,7 @@ def create():
os.chdir(curdir)
shutil.rmtree(tempdir)
else:
print(_("Failed to get information from SRPM"))
print((_("Failed to get information from SRPM")))
return 1
def add_project_to_repository():
@ -1138,7 +1137,7 @@ def chain_build():
if command_line.infile:
if command_line.project:
print(_("You can't specify '-i' option and project names in command line at the same time."))
print((_("You can't specify '-i' option and project names in command line at the same time.")))
exit(1)
else:
command_line.project = []
@ -1179,7 +1178,7 @@ def chain_build():
command_line.ID = [str(build_id)]
stat = status(return_status=True)
if stat[0][0] in ["build error", "publishing error", "publishing rejected", "build is canceling", "tests failed", "[testing] Publishing error", "unpermitted architecture"]:
print(_("One of the tasks failed, aborting chain build"))
print((_("One of the tasks failed, aborting chain build")))
exit(1)
elif stat[0][0] in ["build pending", "rerun tests", "rerunning tests", "build started", "build is being published", "[testing] Build is being published'"]:
task_running = True
@ -1187,10 +1186,10 @@ def chain_build():
if stat[0][1] == "container is being published":
task_running = True
elif stat[0][1] == "publishing error":
print(_("Container creation failed for build %d, aborting chain build") % build_id)
print((_("Container creation failed for build %d, aborting chain build") % build_id))
exit(1)
elif stat[0][1] == "waiting for request for publishing container":
print(_("WARNING: Build %d was not published and container was not created") % build_id)
print((_("WARNING: Build %d was not published and container was not created") % build_id))
else:
command_line.build_list.append(str(build_id))
success_builds.append(build_id)
@ -1247,7 +1246,7 @@ def build(return_ids=False):
#log.debug('Auto resolved branch: ' + as_branch)
if not as_branch:
log.info(_('You\'ve specified a project without a branch.'))
return (None, None, None)
return (None, None)
for repo in proj.repositories:
if repo.platform.name == as_branch or (as_branch == 'master' and repo.platform.name == 'cooker') or (as_branch == 'rosa2014.1' and repo.platform.name == 'current'):
@ -1273,6 +1272,7 @@ def build(return_ids=False):
commit_hash = command_line.commit
log.debug(_('Git commit hash: %s') % commit_hash)
# get save-to repository
save_to_repository = None
build_for_platform = None
@ -1415,10 +1415,15 @@ def build(return_ids=False):
if auto_create_container is None:
auto_create_container = True
if command_line.no_extra_tests is None:
use_extra_tests = True
if command_line.no_cached_chroot:
cached_chroot = False
else:
cached_chroot = True
if command_line.no_extra_tests:
use_extra_tests = False
else:
use_extra_tests = True
if not command_line.auto_publish and not command_line.auto_publish_status:
command_line.auto_publish_status = default_publish_status
@ -1442,7 +1447,7 @@ def build(return_ids=False):
command_line.auto_publish_status or BuildList.auto_publish_statuses[0],
arches,
command_line.skip_personal,
command_line.cached_chroot,
cached_chroot,
command_line.save_chroot,
auto_create_container,
command_line.testing,
@ -1566,7 +1571,7 @@ def locate():
if not command_line.action: # show location
if not command_line.project:
print(_("To show a project location, you have to specify a project name ('-p' option)"))
print((_("To show a project location, you have to specify a project name ('-p' option)")))
return
tmp = command_line.project.rstrip('/').split('/')
@ -1579,11 +1584,11 @@ def locate():
proj = command_line.project
if proj not in projects_cfg or 'location' not in projects_cfg[proj] or not projects_cfg[proj]['location']:
print(_('error: project %s can not be located') % proj)
print((_('error: project %s can not be located') % proj))
exit(1)
path = projects_cfg[proj]['location']
if not os.path.isdir(path):
print(_('error: project is not located in "%s" anymore') % path)
print((_('error: project is not located in "%s" anymore') % path))
projects_cfg[proj]['location'] = ''
exit(1)
print(path)
@ -1601,7 +1606,7 @@ def get_true_false(value, key):
return True
if value.lower() == "false":
return False
print(_("Please specify 'true' or 'false' for %s") % key)
print((_("Please specify 'true' or 'false' for %s") % key))
exit(1)
def update():
@ -1666,7 +1671,7 @@ def show():
if t is None:
proj = get_project(models, must_exist=True, name=command_line.project)
for i in proj.required_fields:
print(_("%s: %s") % (i, getattr(proj, i)))
print((_("%s: %s") % (i, getattr(proj, i))))
elif t == 'buildlists':
proj = get_project(models, must_exist=True, name=command_line.project)
res = models.jsn.get_project_buildlists(proj.id, '' ,1)
@ -1727,7 +1732,7 @@ if __name__ == '__main__':
default_publish_status = cfg['main']['default_publish_status']
else:
default_publish_status = BuildList.auto_publish_statuses[0]
print(_("Incorrect value of 'default_publish_status' in config file, ignoring. Possible valus are: ") + "'" + str.join("', '", BuildList.auto_publish_statuses) + "'")
print((_("Incorrect value of 'default_publish_status' in config file, ignoring. Possible valus are: ") + "'" + str.join("', '", BuildList.auto_publish_statuses) + "'"))
else:
default_publish_status = BuildList.auto_publish_statuses[0]
cfg['main']['default_publish_status'] = BuildList.auto_publish_statuses[0]
@ -1748,7 +1753,7 @@ if __name__ == '__main__':
# These commands don't read or update projects file, so don't even read it
# if one of these commands is launched
commands_wo_cache = ['help','alias','put','store','fetch','show','mock-urpm','rpmbuild','publish','copy','pullrequest','fork','create','add','remove','search','info']
commands_wo_cache = ['help','alias','put','store','fetch','show','mock','rpmbuild','publish','copy','pullrequest','fork','create','add','remove','search','info']
if command_line.func.__name__ in commands_wo_cache:
command_line.skip_proj_cfg_update = True

View file

@ -1,4 +1,4 @@
import urllib2, urllib
import urllib.request, urllib.error, urllib.parse, urllib.request, urllib.parse, urllib.error
import re
import json
import os
@ -6,7 +6,7 @@ import base64
import pdb
import uuid
import tempfile
import httplib
import http.client
import mimetypes
import base64
import hashlib
@ -66,7 +66,10 @@ class AbfJson(object):
'''
# but it works!
self.base64_auth_string = base64.standard_b64encode('%s:%s' % (login, password)).replace('\n', '')
lpw = '%s:%s' % (login, password)
encoded_lpw = base64.standard_b64encode(lpw.encode())
self.base64_auth_string = encoded_lpw.decode("utf-8")
# print(self.base64_auth_string)
self.log = log
errors = {
@ -91,7 +94,7 @@ class AbfJson(object):
res = json.loads(response_string)
except ValueError as ex:
self.log.error(_("Internal server error: it has returned non-json data. "))
print(response_string)
# print(response_string)
exit(1)
m = None
if 'message' in res and res['message'] not in AbfJson.good_messages:
@ -129,8 +132,10 @@ class AbfJson(object):
def get_url_contents(self, path, GET=None, POST=None, file_store=False, PUT=None, DELETE=None):
url = ((file_store and self.file_store_url) or self.abf_url) + path
# print(url)
if GET:
get_string = urllib.urlencode(GET)
get_string = urllib.parse.urlencode(GET)
# print(get_string)
if '?' in url:
url = url + '&' + get_string
else:
@ -140,31 +145,31 @@ class AbfJson(object):
etag = None
if POST:
post_json = json.dumps(POST).encode('utf-8')
request = urllib2.Request(url, post_json, {'Content-Type': 'application/json'})
request = urllib.request.Request(url, post_json, {'Content-Type': 'application/json'})
elif PUT:
put_json = json.dumps(PUT).encode('utf-8')
request = urllib2.Request(url, put_json, {'Content-Type': 'application/json'})
request = urllib.request.Request(url, put_json, {'Content-Type': 'application/json'})
request.get_method = lambda: 'PUT'
elif DELETE:
data_json = json.dumps(DELETE).encode('utf-8')
request = urllib2.Request(url, data_json, {'Content-Type': 'application/json'})
request = urllib.request.Request(url, data_json, {'Content-Type': 'application/json'})
request.get_method = lambda: 'DELETE'
else:
request = urllib2.Request(url)
request = urllib.request.Request(url)
if cache_etags.has_key(url):
if url in cache_etags:
etag = cache_etags.get(url)
if cache_data.has_key(etag):
if etag in cache_data:
self.log.debug(_("It was cached! ETag: ") + etag)
request.add_header("If-None-Match", etag)
request.add_header("Authorization", "Basic %s" % self.base64_auth_string)
etag_new = None
try:
result = urllib2.urlopen(request)
result = urllib.request.urlopen(request)
res = result.read()
etag_new = result.headers.getheaders('ETag')[0]
except urllib2.HTTPError as ex:
etag_new = result.headers.get_all('ETag')[0]
except urllib.error.HTTPError as ex:
if ex.code == 304: # data was not modified
res = cache_data.get(etag)
self.log.debug(_('Getting cached result (cache was validated)'))
@ -191,7 +196,7 @@ class AbfJson(object):
res = self.process_response(res)
# print('RAW OUTPUT', res)
# print 'RAW OUTPUT', res
return res
@ -203,22 +208,22 @@ class AbfJson(object):
@staticmethod
def __encode_multipart_formdata(body, boundary, fields = [], files = []):
for key, value in fields:
body.write('--%s\r\n' % boundary)
body.write(b'--%s\r\n' % boundary.encode())
body.write('Content-Disposition: form-data; name="%s"\r\n' % key)
body.write('Content-Type: text/plain\r\n\r\n')
body.write(b'Content-Disposition: form-data; name="%s"\r\n' % key.encode())
body.write(b'Content-Type: text/plain\r\n\r\n')
body.write(value)
body.write('\r\n')
body.write(b'\r\n')
for key, value in files:
content_type = mimetypes.guess_type(value)[0] or 'application/octet-stream'
body.write('--%s\r\n' % boundary)
body.write(b'--%s\r\n' % boundary.encode())
body.write('Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (key, value))
body.write('Content-Type: %s\r\n\r\n' % content_type)
body.write(b'Content-Disposition: form-data; name="%s"; filename="%s"\r\n' % (key.encode(), value.encode()))
body.write(b'Content-Type: %s\r\n\r\n' % content_type.encode())
fobj = open(value, 'rb')
@ -231,9 +236,9 @@ class AbfJson(object):
fobj.close()
body.write('\r\n')
body.write(b'\r\n')
body.write('--%s--\r\n' % boundary)
body.write(b'--%s--\r\n' % boundary.encode())
def compute_sha1(self, file_name):
fd = open(file_name, 'rb')
@ -273,7 +278,7 @@ class AbfJson(object):
body.seek(0)
if not silent:
self.log.info(_('Uploading %(file)s (%(size)s)') % {'file': file_name, 'size': bytes2human(os.stat(file_name).st_size)})
conn = httplib.HTTPConnection(self.file_store_domain, 80)
conn = http.client.HTTPConnection(self.file_store_domain, 80)
content_type = 'multipart/form-data; boundary=%s' % boundary
headers = {'Content-Type' : content_type, 'Content-Length' : length, "Authorization": "Basic %s" % self.base64_auth_string}
conn.request('POST', '/api/v1/upload', body, headers)
@ -293,8 +298,8 @@ class AbfJson(object):
def fetch_file(self, sha_hash, path):
URL = self.file_store_url + '/api/v1/file_stores/' + sha_hash
try:
response = urllib2.urlopen(URL)
except urllib2.HTTPError as ex:
response = urllib.request.urlopen(URL)
except urllib.error.HTTPError as ex:
if ex.code == 404: # data was not modified
raise PageNotFoundError(_('File with hash %s can not be downloaded from File-Store.') % sha_hash)
else:

View file

@ -1,4 +1,4 @@
import ConfigParser
import configparser
import os
import sys
import getpass
@ -11,9 +11,9 @@ import fcntl
#
# cfg = Config()
# cfg['aaa']['bbb'] = 'ccc'
# print(cfg['aaa']['bbb'])
# print(cfg['aaa'].pop('bbb'))
# print(cfg.pop('aaa'))
# print cfg['aaa']['bbb']
# print cfg['aaa'].pop('bbb')
# print cfg.pop('aaa')
#####################################################
VERSION = 4
@ -58,7 +58,7 @@ class Section(dict):
self.save()
def save(self):
configfile = open(self.conf_path, 'wb')
configfile = open(self.conf_path, 'w')
fcntl.flock(configfile, fcntl.LOCK_EX)
self.config.write(configfile)
fcntl.flock(configfile, fcntl.LOCK_UN)
@ -76,12 +76,12 @@ class Section(dict):
return super(Section, self).__getitem__(key)
try:
res = self.config.get(self.section, key)
except ConfigParser.NoOptionError as ex:
except configparser.NoOptionError as ex:
if key in ['default_branch', 'default_publish_status']:
print(_('non-critical error in config "%(path)s": %(exception)s') % {'path': self.conf_path, 'exception': str(ex)})
print((_('non-critical error in config "%(path)s": %(exception)s') % {'path': self.conf_path, 'exception': str(ex)}))
return ''
else:
print(_('error in config "%(path)s": %(exception)s') % {'path': self.conf_path, 'exception': str(ex)})
print((_('error in config "%(path)s": %(exception)s') % {'path': self.conf_path, 'exception': str(ex)}))
exit(1)
def pop(self, key, init=None):
@ -94,8 +94,8 @@ class Section(dict):
return res
class Config(dict):
default_url = 'https://abf.rosalinux.ru'
default_filestore_url = 'http://file-store.rosalinux.ru'
default_url = 'https://abf.openmandriva.org'
default_filestore_url = 'http://file-store.openmandriva.org'
default_log_path = '/var/log/abf.log'
def __init__(self, conf_path='~/.abfcfg', main_conf=True):
self.conf_path = os.path.expanduser(conf_path)
@ -106,7 +106,7 @@ class Config(dict):
init = True
self.config = ConfigParser.RawConfigParser()
self.config = configparser.RawConfigParser()
self.config.read(self.conf_path)
sections = self.config.sections()
@ -117,7 +117,7 @@ class Config(dict):
if main_conf and ('config_version' not in self['main'] or int(self['main']['config_version']) != VERSION):
print(_("Configuration schema have been changed or config file have been corrupted, rebuilding config..."))
print((_("Configuration schema have been changed or config file have been corrupted, rebuilding config...")))
init = True
if init and main_conf:
@ -152,10 +152,10 @@ class Config(dict):
domain = domain[:-1] # remove trailing '/'
parts = domain.split('//')
if len(parts) == 1:
print(_('No protocol part specified (http://, https://, etc.)'))
print((_('No protocol part specified (http://, https://, etc.)')))
continue
if len(parts) > 2:
print(_('Double slash must present only once (in a protocol part)'))
print((_('Double slash must present only once (in a protocol part)')))
continue
done = True
return domain
@ -175,9 +175,9 @@ class Config(dict):
self['user']['password'] = password
parts = self['main']['abf_url'].split('//')
git_uri = "%(protocol)s//%(user)s@%(domain)s" % \
dict(protocol=parts[0], user=self['user']['login'], domain=parts[1])
# git_uri = "ssh://git@github.com/OpenMandrivaAssociation"
#git_uri = "%(protocol)s//%(user)s@%(domain)s" % \
# dict(protocol=parts[0], user=self['user']['login'], domain=parts[1])
git_uri = "ssh://git@github.com/OpenMandrivaAssociation"
self['user']['git_uri'] = git_uri
@ -191,7 +191,7 @@ class Config(dict):
self['user']['default_group'] = res or self['user']['login']
if 'default_build_platform' not in self['user']:
def_bp = 'rosa2014.1'
def_bp = 'cooker'
res = ask_user('Default platform [%s]: ' % def_bp, can_be_empty=True)
self['user']['default_build_platform'] = res or def_bp
@ -247,8 +247,8 @@ class Config(dict):
self['alias']['sp'] = 'search projects'
self['main']['config_version'] = VERSION
print(_('Configuration have been completed'))
print(_('Now you can execute "abf locate update-recursive -d PATH", where PATH is your directory with cloned ABF projects. It will let you use "abfcd <project>" command to simply cd to project directory.\n\n'))
print((_('Configuration have been completed')))
print((_('Now you can execute "abf locate update-recursive -d PATH", where PATH is your directory with cloned ABF projects. It will let you use "abfcd <project>" command to simply cd to project directory.\n\n')))

View file

@ -11,8 +11,8 @@ import re
import yaml
import tempfile
import magic
import sha
import urllib2
import hashlib
import urllib.request, urllib.error, urllib.parse
from abf.console.log import Log
from abf.api.exceptions import *
@ -51,7 +51,7 @@ def get_project_name(path=None):
owner_name, project_name = m.groups()
# Cant tell abf owner from github loc, so let's hardocde it
if "OpenMandriva" in owner_name:
return ("openmandriva", project_name)
return ('openmandriva', project_name)
return (owner_name, project_name)
return (None, None)
except ReturnCodeNotZero:
@ -64,7 +64,7 @@ def parse_spec_silently(ts, spec_path):
try:
os.dup2(sys.stderr.fileno(), stderr)
os.dup2(sys.stdout.fileno(), stdout)
se = file('/dev/null', 'w')
se = open('/dev/null', 'w')
os.dup2(se.fileno(), sys.stderr.fileno())
os.dup2(se.fileno(), sys.stdout.fileno())
rpm_spec = ts.parseSpec(spec_path)
@ -372,6 +372,7 @@ def execute_command(command, shell=False, cwd=None, timeout=0, raiseExc=True, pr
stdin=open("/dev/null", "r"),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding='utf-8',
env=env,
cwd=cwd
)
@ -381,7 +382,7 @@ def execute_command(command, shell=False, cwd=None, timeout=0, raiseExc=True, pr
except Exception as ex:
# kill children if they arent done
if type(ex) == IOError and ex.errno==4:
print(_('Process execution has been terminated'))
print((_('Process execution has been terminated')))
exit()
try:
if child is not None and child.returncode is None:
@ -433,7 +434,7 @@ def logOutput(fds, start=0, timeout=0, print_to_stdout=False):
events = epoll.poll(1)
for fileno, event in events:
if event & select.EPOLLIN:
#print(fileno, event)
#print (fileno, event)
if fileno == fds[0].fileno():
r = fds[0].read()
output += r
@ -441,6 +442,7 @@ def logOutput(fds, start=0, timeout=0, print_to_stdout=False):
sys.stdout.write(r)
else:
r = fds[1].read()
# print(r)
output += r
if print_to_stdout:
sys.stdout.write(r)
@ -489,7 +491,7 @@ def fetch_files(models, yaml_path, file_names=None):
try:
models.jsn.fetch_file(to_fetch[file_name], path)
except AbfApiException as ex:
print(_('error: ') + str(ex))
print((_('error: ') + str(ex)))
def upload_files(models, min_size, path=None, remove_files=True, upload_all=False):
log.debug('Uploading files for directory ' + str(path))
@ -587,7 +589,7 @@ def upload_files(models, min_size, path=None, remove_files=True, upload_all=Fals
yaml_data['removed_sources'] = {}
yaml_data['removed_sources'][item] = h
log.info(_('Removing %(item)s:%(hash)s from .abf.yml') % {'item': item, 'hash': h })
yaml_files[src] = sha_hash.encode()
yaml_files[src] = sha_hash
yaml_file_changed = True
else:
log.debug(_('Hash for file %s is already correct') % src)
@ -602,7 +604,7 @@ def upload_files(models, min_size, path=None, remove_files=True, upload_all=Fals
log.debug(_('Writing the new .abf.yml file...'))
yaml_data['sources'] = yaml_files
with open(yaml_path, 'w') as fd:
yaml.dump(yaml_data, fd, default_flow_style=False)
yaml.dump(yaml_data, fd, default_flow_style=False, allow_unicode=True)
return errors_count
@ -625,7 +627,7 @@ def human2bytes(s):
num = float(num)
letter = s.strip().lower()
ss = None
for name, sset in SYMBOLS.items():
for name, sset in list(SYMBOLS.items()):
if letter in sset:
ss = sset
break

View file

@ -2,8 +2,7 @@
from beaker.cache import Cache
from beaker.util import parse_cache_config_options
import logging
import urllib2, urllib
import string
import urllib.request, urllib.error, urllib.parse, urllib.request, urllib.parse, urllib.error
from datetime import datetime
from abf.api.exceptions import *
@ -14,8 +13,43 @@ log = logging.getLogger('models')
lt_cache = Cache('abf', expire = 86400, type='file', data_dir='/tmp/abf_cache/data', lock_dir='/tmp/abf_cache/data')
st_cache = Cache('abf', expire = 3600, type='file', data_dir='/tmp/abf_cache/data', lock_dir='/tmp/abf_cache/data')
status_by_id = {
0: 'build complete',
1: 'platform not found',
2: 'platform pending',
3: 'project not found',
4: 'project version not found',
6: 'project source error',
555: 'dependencies error',
666: 'build error',
777: 'packages fail',
2000: 'build pending',
2500: 'rerun tests',
2550: 'rerunning tests',
3000: 'build started',
4000: 'waiting for response',
5000: 'build canceled',
6000: 'build has been published',
7000: 'build is being published',
8000: 'publishing error',
9000: 'publishing rejected',
10000: 'build is canceling',
11000: 'tests failed',
12000: '[testing] Build has been published',
13000: '[testing] Build is being published',
14000: '[testing] Publishing error',
15000: 'unpermitted architecture'
}
container_status_by_id = {
4000: 'waiting for request for publishing container',
6000: 'container has been published',
7000: 'container is being published',
8000: 'publishing error'
}
def get_cached(cache, cache_key, func, *args, **kwargs):
if cache and cache.has_key(cache_key):
if cache and cache_key in cache:
val = cache.get(cache_key)
else:
val = func(*args, **kwargs)
@ -43,7 +77,7 @@ class Model(object):
if ID:
cache_key = '%s-%s-%s' % (self.models.abf_url, self.__class__.__name__, ID)
if st_cache and st_cache.has_key(cache_key):
if st_cache and cache_key in st_cache:
#read cached value
log.debug( _('Loading %(name)s %(id)s from cache') % {'name': self.__class__.__name__, 'id': ID})
self.stub = False
@ -93,7 +127,7 @@ class Model(object):
class Platform(Model):
required_fields = ['id', 'name', 'description', 'parent_platform_id', 'created_at', 'updated_at', 'released',
required_fields = ['id', 'name', 'parent_platform_id', 'created_at', 'updated_at', 'released',
'owner', 'visibility', 'platform_type', 'distrib_type', 'repositories']
filter_dict = { 'id': '*', 'name': '*', 'visibility': '*', 'owner': '*', 'platform_type': '*', 'repositories': '*', 'page': '1' }
@ -335,7 +369,7 @@ class Group(Model):
return self.uname
class Project(Model):
required_fields = ['id', 'name', 'fullname', 'git_url', 'created_at', 'updated_at', 'visibility', 'ancestry',
required_fields = ['id', 'name', 'fullname', 'git_url', 'created_at', 'updated_at', 'visibility', 'ancestry',
'default_branch', 'is_package', 'owner', 'repositories', 'owner_type', 'maintainer', 'project_statistics',]
filter_dict = { 'id': '*', 'name': '*', 'page': '1' }
@ -348,7 +382,7 @@ class Project(Model):
@staticmethod
def get_by_name(models, key):
''' key is a pair (owner_name, project_name), or just owner_name/project_name'''
if type(key) is unicode or type(key) is str:
if type(key) is str or type(key) is str:
items = key.split('/')
if len(items) != 2:
raise Exception(_('Invalid key: ') + key)
@ -457,45 +491,14 @@ class Project(Model):
class BuildList(Model):
required_fields = ['id', 'container_path', 'status', 'status_string', 'package_version', 'project', 'created_at', 'updated_at',
'build_for_platform', 'save_to_repository', 'arch', 'update_type', 'extra_repositories',
'commit_hash', 'duration', 'include_repos', 'priority', 'build_log_url', 'advisory', 'mass_build', 'log_url', 'chroot_tree']
'build_for_platform', 'save_to_repository', 'arch', 'extra_repositories',
'commit_hash', 'duration', 'include_repos', 'priority', 'build_log_url', 'mass_build', 'log_url', 'chroot_tree']
status_by_id = status_by_id
container_status_by_id = container_status_by_id
status_by_id = {
0: 'build complete',
1: 'platform not found',
2: 'platform pending',
3: 'project not found',
4: 'project version not found',
6: 'project source error',
555: 'dependencies error',
666: 'build error',
777: 'packages fail',
2000: 'build pending',
2500: 'rerun tests',
2550: 'rerunning tests',
3000: 'build started',
4000: 'waiting for response',
5000: 'build canceled',
6000: 'build has been published',
7000: 'build is being published',
8000: 'publishing error',
9000: 'publishing rejected',
10000: 'build is canceling',
11000: 'tests failed',
12000: '[testing] Build has been published',
13000: '[testing] Build is being published',
14000: '[testing] Publishing error',
15000: 'unpermitted architecture'
}
status_by_name = dict([(status_by_id[x], x) for x in status_by_id])
final_statuses = [1, 2, 3, 4, 666, 5000, 6000, 8000, 9000, 12000, 14000]
container_status_by_id = {
4000: 'waiting for request for publishing container',
6000: 'container has been published',
7000: 'container is being published',
8000: 'publishing error'
}
container_status_by_name = dict([(container_status_by_id[x], x) for x in container_status_by_id])
def get_init_data(self, ID):
@ -614,8 +617,7 @@ class BuildList(Model):
}
build_platforms = {}
if not skip_personal and string.find(save_to_repository.platform.name,"_personal") > 0:
if not skip_personal and save_to_repository.platform.name.find("_personal") > 0:
DATA['extra_repositories'].append(save_to_repository.id)
for repo in repositories:
@ -825,10 +827,10 @@ class ProjectCreator(Model):
class Models(object):
_instance = {}
def __new__(cls, abf_url, file_store_url, login, password, *args, **kwargs):
def __cmds__(cls, abf_url, file_store_url, login, password, *args, **kwargs):
tmp = '%s:%s:%s:%s' % (abf_url, file_store_url, login, password)
if tmp not in cls._instance:
cls._instance[tmp] = super(Models, cls).__new__(
cls._instance[tmp] = super(Models, cls).__cmds__(
cls, abf_url, file_store_url, login, password, *args, **kwargs)
return cls._instance[tmp]

View file

@ -69,7 +69,7 @@ __abf_rpmbuild()
__abf_build()
{
__abf_opts "--branch --build-list --tag --commit --target-platform --arch --repository --save-to-repository --auto-publish-status --auto-publish --update-type --skip-spec-check --auto-create-container --no-extra-tests --cached-chroot --save-chroot --testing --external-nodes"
__abf_opts "--branch --build-list --tag --commit --target-platform --arch --repository --save-to-repository --auto-publish-status --auto-publish --update-type --skip-spec-check --auto-create-container --no-extra-tests --no-cached-chroot --save-chroot --testing --external-nodes"
update_types="security bugfix enhancement recommended newpackage"
external_nodes_vals="owned everything"
auto_publish_status="none default testing"
@ -116,7 +116,7 @@ __abf_build()
__abf_chain_build()
{
__abf_opts "--branch --build-list --tag --infile --commit --target-platform --arch --repository --save-to-repository --auto-publish-status --auto-publish --skip-spec-check --auto-create-container --no-extra-tests --cached-chroot --testing"
__abf_opts "--branch --build-list --tag --infile --commit --target-platform --arch --repository --save-to-repository --auto-publish-status --auto-publish --skip-spec-check --auto-create-container --no-extra-tests --no-cached-chroot --testing"
auto_publish_status="none default testing"
if [ ${prev} == --auto-publish-status ] ; then

View file

@ -513,7 +513,7 @@ msgid "enable automatic creation of container"
msgstr ""
#: ../abf.py:245 ../abf.py:289
msgid "use cached chroot for the build"
msgid "do NOT use cached chroot for the build"
msgstr ""
#: ../abf.py:246 ../abf.py:290

View file

@ -604,8 +604,8 @@ msgid "enable automatic creation of container"
msgstr "включить автоматическое создание контейнера"
#: ../abf.py:245 ../abf.py:289
msgid "use cached chroot for the build"
msgstr "использовать для сборки кэшированное окружение"
msgid "do NOT use cached chroot for the build"
msgstr "не используйте кэшированную среду для сборки"
#: ../abf.py:246 ../abf.py:290
msgid "save build chroot in case of failure"