Make master the latest branch

This commit is contained in:
Denis Silakov 2013-05-17 12:11:20 +04:00
parent 37004b0d4e
commit f8dd2a66ba
11 changed files with 241 additions and 98 deletions

View file

@ -1,6 +1,6 @@
Name: abf-console-client
Version: 1.8
Release: 1
Version: 1.9
Release: 3
Summary: Console client for ABF (https://abf.rosalinux.ru)
Group: System/Configuration/Packaging
License: GPLv2
@ -56,7 +56,7 @@ ln -s %{_datadir}/bash-completion/abf %{buildroot}/%{_sysconfdir}/bash_completio
%{_sysconfdir}/abf/mock-urpm/configs/*
%dir /var/cache/abf/
%dir /var/cache/abf/mock-urpm/
%dir /var/cache/abf/mock-urpm/src
%dir /var/lib/abf/mock-urpm/src
%dir /var/lib/abf/
%dir /var/lib/abf/mock-urpm

159
abf.py
View file

@ -8,6 +8,7 @@ import os
import shutil
import platform
from glob import glob
import shlex
from abf.console.config import Config, mkdirs
from abf.console.log import Log
@ -74,6 +75,25 @@ def test():
log.info('Datamodel seems to work fine')
def apply_aliases():
# check if the current command is 'alias'
if 'alias' in sys.argv:
ind = sys.argv.index('alias')
found = False
for i in range(1, ind):
if sys.argv[i] not in ['-h', '-v', '--help', '--verbose', 'help']:
found = True
if not found:
return
for alias_name in cfg['alias']:
alias = shlex.split(cfg['alias'][alias_name])
if alias_name in sys.argv:
ind = sys.argv.index(alias_name)
del sys.argv[ind]
for item in alias:
sys.argv.insert(ind, item)
ind += 1
def parse_command_line():
global command_line
@ -88,6 +108,13 @@ def parse_command_line():
parser_help.add_argument('command', action='store', nargs='?', help='a command to show help for')
parser_help.set_defaults(func=help)
# alias
parser_alias = subparsers.add_parser('alias', help='Manage aliases')
alias_commands = ['list', 'add', 'remove']
parser_alias.add_argument('command', action='store', choices=alias_commands)
parser_alias.add_argument('options', action='store', nargs='*', help='name and alias (not quoted, e. g. "abf alias add sg search groups") for adding, only name for removing.')
parser_alias.set_defaults(func=alias)
# get
parser_get = subparsers.add_parser('get', help='clone a project from ABF')
parser_get.add_argument('project', action='store', help='project name. ([group/]project). If no group specified, '
@ -96,15 +123,19 @@ def parse_command_line():
parser_get.set_defaults(func=get)
# put
parser_put = subparsers.add_parser('put', help='Upload large binary files to File-Store, commit all the changes (git add --all), commit with a message specified and push')
parser_put.add_argument('-m', '--message', action='store', help='A message to commit with. It is ignored in case of "--do-not-upload"')
parser_put.add_argument('-u', '--upload-only', action='store_true', help='Upload large files to file-store and exit')
parser_put.add_argument('-d', '--do-not-upload', action='store', help='Do nothing with .abf.yml, just add, commit and push')
parser_put = subparsers.add_parser('put', help='Upload large binary files to File-Store and update (or create) .abf.yml file. Can also commit and push changes.')
parser_put.add_argument('-m', '--message', action='store', help='With this option specified, "git add --all", "git commit -m MSG" and "git push" will be executed.')
parser_put.add_argument('-s', '--minimal-file-size', default='0', action='store', help='The minimal file size to upload to File-Store. '
'Default is 0B.')
parser_put.add_argument('-r', '--do-not-remove-files', action='store_true', help='By default files are being removed on uploading. Override this behavior.')
parser_put.add_argument('-n', '--do-not-remove-files', action='store_true', help='By default files are being removed on uploading. Override this behavior.')
parser_put.add_argument('-u', '--upload-only', action='store_true', help='Deprecated! Affects nothing. Saved for compatibility reasons and will be removed later.')
parser_put.set_defaults(func=put)
# store
parser_store = subparsers.add_parser('store', help='Upload a given file to File-Store. Prints a sha1 hash or error message (with non-zero return code).')
parser_store.add_argument('path', action='store', help='Path to file')
parser_store.set_defaults(func=store)
# fetch
parser_fetch = subparsers.add_parser('fetch', help='Download all the files listed in .abf.yml from File-Store to local directory.')
parser_fetch.add_argument('-o', '--only', action='append', help='Limit the list of downloaded files to this file name(s). This option can be specified more than once.')
@ -275,10 +306,11 @@ def run_mock_urpm(binary=True):
if os.path.exists(src_dir):
shutil.rmtree(src_dir)
src = get_root_git_dir()
cmd = ['abf', 'fetch']
if command_line.verbose:
cmd.append('-v')
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
if os.path.exists(os.path.join(src, '.abf.yml')):
cmd = ['abf', 'fetch']
if command_line.verbose:
cmd.append('-v')
execute_command(cmd, print_to_stdout=True, exit_on_error=True, cwd=src)
shutil.copytree(src, src_dir, symlinks=True)
spec_path = find_spec(src_dir)
@ -335,7 +367,46 @@ def localbuild_mock_urpm():
except OSError, ex:
log.error(str(ex))
exit(1)
def alias():
log.debug('ALIAS started')
if command_line.command == 'list':
if not cfg['alias']:
log.info('No aliases found')
return
for al_name in cfg['alias']:
print '%10s: %s' % (al_name, cfg['alias'][al_name])
elif command_line.command == 'add':
if len(command_line.options) < 2:
log.error('Not enough options. Use it like "abf alias add <alias_name> opt1 [opt2 ...]"')
exit(1)
al_name = command_line.options[0]
if ' ' in al_name or '=' in al_name:
log.error('Do not use " " or "=" for alias name!')
exit(1)
alias = ''
for al in command_line.options[1:]:
if ' ' in al:
alias += '"%s" ' % al
else:
alias += al + ' '
if al_name in cfg['alias']:
log.warning('Alias "%s" already exists and will be overwritten.' % al_name)
cfg['alias'][al_name] = alias
log.info('Done')
elif command_line.command == 'remove':
if not command_line.options:
log.error("Enter the alias name!")
exit(1)
al_name = command_line.options[0]
if al_name not in cfg['alias']:
log.error('Alias "%s" not found' % al_name)
exit(1)
cfg['alias'].pop(al_name)
log.info('Done')
def localbuild_rpmbuild():
log.debug('RPMBUILD started')
src_dir = '/tmp/abf/rpmbuild'
@ -461,14 +532,6 @@ def get():
def put():
log.debug('PUT started')
if not command_line.upload_only and not command_line.message:
log.error("Specify a message first!")
exit(1)
if command_line.upload_only and command_line.do_not_upload:
log.error("Conflicting options: --upload-only and --do-not-upload" )
exit(1)
path = get_root_git_dir()
yaml_path = os.path.join(path, '.abf.yml')
@ -477,25 +540,25 @@ def put():
exit(1)
_update_location()
if not command_line.do_not_upload:
try:
min_size = human2bytes(command_line.minimal_file_size)
except ValueError, ex:
log.error('Incorrect "--minimal-file-size" value: %s' % command_line.minimal_file_size)
exit(1)
error_count = upload_files(models, min_size, remove_files=not command_line.do_not_remove_files, path=path)
if error_count:
log.info('There were errors while uploading, stopping.')
exit(1)
if command_line.upload_only:
try:
min_size = human2bytes(command_line.minimal_file_size)
except ValueError, ex:
log.error('Incorrect "--minimal-file-size" value: %s' % command_line.minimal_file_size)
exit(1)
error_count = upload_files(models, min_size, remove_files=not command_line.do_not_remove_files, path=path)
if error_count:
log.info('There were errors while uploading, stopping.')
exit(1)
if not command_line.message:
return
cmd = ['git', 'add', '--all']
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
if os.path.isfile(yaml_path):
cmd = ['git', 'add', '-f', path]
cmd = ['git', 'add', '-f', yaml_path]
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
cmd = ['git', 'commit', '-m', command_line.message]
@ -516,8 +579,26 @@ def fetch():
if not os.path.isfile(path):
log.error('File "%s" can not be found' % path)
exit(1)
fetch_files(models, path, command_line.only)
try:
fetch_files(models, path, command_line.only)
except yaml.scanner.ScannerError, ex:
log.error('Invalid yml file %s!\nProblem in line %d column %d: %s' % (path, ex.problem_mark.line, ex.problem_mark.column, ex.problem))
except yaml.composer.ComposerError, ex:
log.error('Invalid yml file %s!\n%s' % (path, ex))
def store():
log.debug('STORE started')
p = os.path.expanduser(command_line.path)
if not os.path.exists(p):
log.error('File "%s" does not exist!' % p)
exit(1)
if not os.path.isfile(p):
log.error('"%s" is not a regular file!' % p)
exit(1)
res = models.jsn.upload_file(p, silent=True)
print res
def copy():
log.debug('COPY started')
sbrn = command_line.src_branch
@ -612,9 +693,10 @@ def build():
arches.append(a)
else:
# arches = all_arches
for arch in ['i586','x86_64']:
for arch in ['i586','i686','x86_64']:
a = Arch.get_arch_by_name(models, arch)
arches.append(a)
if a:
arches.append(a)
log.info("Arches are assumed to be " + str(arches))
log.debug('Architectures: %s' % arches)
@ -636,7 +718,7 @@ def build():
if not as_branch:
log.info('You\'ve specified a project without a branch.')
return (None, None, None)
for ref in proj.get_refs_list(models):
if ref['ref'] == as_branch and ref['object']['type'] == 'commit':
as_commit = ref['object']['sha']
@ -644,20 +726,20 @@ def build():
if not as_commit:
log.error("Could not resolve hash for branch '%s'" % (as_branch))
return (None, None, None)
for repo in proj.repositories:
if repo.platform.name == as_branch or (as_branch == 'master' and repo.platform.name == 'cooker'):
as_saveto = repo
if not as_saveto:
log.info('Could not resolve a platform to save to from the branch name "%s".' % as_branch)
return (as_branch, as_commit, None)
return (as_branch, as_commit, as_saveto)
as_branch, as_commit, as_saveto = auto_resolve()
opts = 'Branch: %s, commit: %s, save-to-repo: %s' % (as_branch, as_commit, as_saveto)
log.debug('A list of options which could be resolved automatically: %s' % opts)
# get git commit hash
commit_hash = None
@ -961,6 +1043,7 @@ def clean():
if __name__ == '__main__':
apply_aliases()
parse_command_line()
if command_line.verbose:

View file

@ -9,7 +9,7 @@ import tempfile
import httplib
import mimetypes
import base64
import sha
import hashlib
import shutil
from abf.api.exceptions import *
@ -219,7 +219,7 @@ class AbfJson(object):
def compute_sha1(self, file_name):
fd = open(file_name, 'rb')
datablock = 1
s = sha.new()
s = hashlib.sha1()
while datablock:
datablock = fd.read(AbfJson.BLOCK_SIZE)
if datablock:
@ -227,7 +227,7 @@ class AbfJson(object):
hex_sha = s.hexdigest()
return hex_sha
def upload_file(self, file_name):
def upload_file(self, file_name, silent=False):
self.log.debug('Looking for "%s" in file-store...' % file_name)
sha_hash = self.compute_sha1(file_name)
self.log.debug('File hash is %s' % sha_hash)
@ -240,7 +240,7 @@ class AbfJson(object):
self.log.critical('File-Store returned file for sha1 %s instead of %s!' % (sha_hash_new, sha_hash))
exit(1)
new_fn = os.path.basename(file_name)
if fn != new_fn:
if fn != new_fn and not silent:
self.log.warning('The name of the file in file-store is %s, but you are trying to upload file %s' % (fn, new_fn))
return sha_hash
@ -252,7 +252,8 @@ class AbfJson(object):
self.__encode_multipart_formdata(body, boundary,[], [('file_store[file]', file_name)])
length = body.tell()
body.seek(0)
self.log.info('Uploading %s (%s)' % (file_name, bytes2human(os.stat(file_name).st_size)))
if not silent:
self.log.info('Uploading %s (%s)' % (file_name, bytes2human(os.stat(file_name).st_size)))
conn = httplib.HTTPConnection(self.file_store_domain, 80)
content_type = 'multipart/form-data; boundary=%s' % boundary
headers = {'Content-Type' : content_type, 'Content-Length' : length, "Authorization": "Basic %s" % self.base64_auth_string}

View file

@ -15,7 +15,7 @@ import getpass
# print cfg.pop('aaa')
#####################################################
VERSION = 3
VERSION = 4
def mkdirs(path):
''' the equivalent of mkdir -p path'''
@ -110,7 +110,7 @@ class Config(dict):
if main_conf and ('config_version' not in self['main'] or int(self['main']['config_version']) != VERSION):
print "Sorry, but configuration schema have been changed or config file have been corrupted, so you need to reinitialize the configuration."
print "Configuration schema have been changed or config file have been corrupted, rebuilding config..."
init = True
if init and main_conf:
@ -155,32 +155,36 @@ class Config(dict):
def first_start(self):
domain = self.ask_user_url('ABF URL [%s]: ' % Config.default_url, Config.default_url)
self['main']['abf_url'] = domain
if 'abf_url' not in self['main']:
domain = self.ask_user_url('ABF URL [%s]: ' % Config.default_url, Config.default_url)
self['main']['abf_url'] = domain
user_default = getpass.getuser()
user = ask_user('User [%s]: ' % user_default, can_be_empty=True)
self['user']['login'] = user or user_default
if 'login' not in self['user'] or 'password' not in self['user']:
user_default = getpass.getuser()
user = ask_user('User [%s]: ' % user_default, can_be_empty=True)
self['user']['login'] = user or user_default
password = getpass.getpass()
self['user']['password'] = password
#password = ask_user('Password: ', can_be_empty=False)
password = getpass.getpass()
self['user']['password'] = password
parts = domain.split('//')
parts = self['main']['abf_url'].split('//')
git_uri = "%(protocol)s//%(user)s@%(domain)s" % \
dict(protocol=parts[0], user=user, domain=parts[1])
dict(protocol=parts[0], user=self['user']['login'], domain=parts[1])
self['user']['git_uri'] = git_uri
res = ask_user('Default project owner [%s]: ' % user, can_be_empty=True)
self['user']['default_group'] = res or user
if 'default_group' not in self['user']:
res = ask_user('Default project owner [%s]: ' % self['user']['login'], can_be_empty=True)
self['user']['default_group'] = res or self['user']['login']
def_bp = 'rosa2012.1'
res = ask_user('Default platform [%s]: ' % def_bp, can_be_empty=True)
self['user']['default_build_platform'] = res or def_bp
if 'default_build_platform' not in self['user']:
def_bp = 'rosa2012.1'
res = ask_user('Default platform [%s]: ' % def_bp, can_be_empty=True)
self['user']['default_build_platform'] = res or def_bp
filestore_domain = self.ask_user_url('File-store URL [%s]: ' % Config.default_filestore_url, Config.default_filestore_url)
self['main']['file_store_url'] = filestore_domain
if 'file_store_url' not in self['main']:
filestore_domain = self.ask_user_url('File-store URL [%s]: ' % Config.default_filestore_url, Config.default_filestore_url)
self['main']['file_store_url'] = filestore_domain
#configure logging
self['formatters']['keys'] = 'verbose,simple'
@ -216,8 +220,16 @@ class Config(dict):
self['handler_main']['formatter'] = 'simple'
self['handler_main']['args'] = '()'
if not self['alias']:
self['alias']['st'] = 'status'
self['alias']['b'] = 'build'
self['alias']['su'] = 'search users'
self['alias']['sg'] = 'search groups'
self['alias']['spl'] = 'search platforms'
self['alias']['sp'] = 'search projects'
self['main']['config_version'] = VERSION
print('Initial configuration have been completed')
print('Configuration have been completed')
print 'Now you can execute "abf locate update-recursive -d PATH", where PATH is your directory with ' + \
'cloned ABF projects. It will let you use "abfcd <project>" command to simply cd to project directory.\n\n'

View file

@ -18,7 +18,17 @@ from abf.console.log import Log
from abf.api.exceptions import *
log = Log('models')
def mkdirs(path):
''' the equivalent of mkdir -p path'''
if os.path.exists(path):
return
path = os.path.normpath(path)
items = path.split('/')
p = ''
for item in items:
p += '/' + item
if not os.path.isdir(p):
os.mkdir(p)
class CommandTimeoutExpired(Exception):
pass
@ -75,15 +85,22 @@ def get_project_data(spec_path):
rpm_spec = parse_spec_silently(ts, spec_path)
name = rpm.expandMacro("%{name}")
version = rpm.expandMacro("%{version}")
sources_all = rpm_spec.sources()
if type(rpm_spec.sources) is list: # rpm4
sources_all = rpm_spec.sources
src_flag = 1
patch_fkag = 2
else:
sources_all = rpm_spec.sources() # rpm5
src_flag = 65536
patch_fkag = 131072
sources = []
patches = []
for src in sources_all:
name, number, flag = src
if flag & 65536: # source file
if flag & src_flag: # source file
sources.append((name, number))
elif flag & 131072:
elif flag & patch_fkag:
patches.append((name, number))
return {'name': name, 'version': version, 'sources': sources, 'patches': patches}
@ -202,9 +219,16 @@ def find_spec_problems(exit_on_error=True, strict=False, auto_remove=False):
files_present.append(fl)
yaml_path = os.path.join(path, '.abf.yml')
yaml_data = {'sources': {}}
if os.path.isfile(yaml_path):
with open(yaml_path, 'r') as fd:
yaml_data = yaml.load(fd)
try:
yaml_data = yaml.load(fd)
except yaml.scanner.ScannerError, ex:
log.error('Invalid yml file %s!\nProblem in line %d column %d: %s' % (yaml_path, ex.problem_mark.line, ex.problem_mark.column, ex.problem))
except yaml.composer.ComposerError, ex:
log.error('Invalid yml file %s!\n%s' % (yaml_path, ex))
if not 'sources' in yaml_data:
log.error("Incorrect .abf.yml file: no 'sources' key")
exit(1)
@ -443,7 +467,7 @@ def fetch_files(models, yaml_path, file_names=None):
sha_hash_current = to_fetch[file_name]
sha_hash_new = models.jsn.compute_sha1(path)
if sha_hash_current == sha_hash_new:
log.debug('The file %s already presents and has correct hash' % file_name)
log.debug('The file %s already presents and has a correct hash' % file_name)
continue
else:
log.info('The file %s already presents but its hash is not the same as in .abf.yml, so it will be rewritten.' % file_name)
@ -466,9 +490,10 @@ def upload_files(models, min_size, path=None, remove_files=True):
with open(yaml_path, 'r') as fd:
try:
yaml_data = yaml.load(fd)
except yaml.composer.ComposerError:
except (yaml.composer.ComposerError, yaml.scanner.ScannerError) :
log.error('Could not parse .abf.yml file. It seems to be corrupted and will be rewritten.')
yaml_file_changed = True
yaml_data['sources'] = {}
if not 'sources' in yaml_data:
log.error("Incorrect .abf.yml file: no 'sources' key. The file will be rewritten.")
yaml_file_changed = True

View file

@ -67,7 +67,7 @@ class Model(object):
for field in self.__class__.required_fields:
if field not in self.params_dict:
raise Exception("One of the fields required for %s model was not specified: %s" %
(self.__class__.__name__field))
(self.__class__.__name__, field))
else:
log.debug('Creating a stub for %s %s' % (self.__class__.__name__, self.init_data['id']))
self.load()
@ -367,8 +367,8 @@ class Project(Model):
class BuildList(Model):
required_fields = ['id', 'name', 'container_path', 'status', 'status_string', 'package_version', 'project', 'created_at', 'updated_at',
'build_for_platform', 'save_to_repository', 'arch', 'is_circle', 'update_type', 'build_requires', 'auto_publish',
required_fields = ['id', 'container_path', 'status', 'status_string', 'package_version', 'project', 'created_at', 'updated_at',
'build_for_platform', 'save_to_repository', 'arch', 'update_type', 'auto_publish',
'commit_hash', 'duration', 'owner', 'owner_type', 'include_repos', 'priority', 'build_log_url', 'advisory', 'mass_build']
status_by_id = {
@ -444,7 +444,6 @@ class BuildList(Model):
'save_to_repository_id': save_to_repository.id,
'build_for_platform_id': None,
'auto_publish': auto_publish,
'build_requires': False,
'arch_id': None,
'include_repos': [],
}

View file

@ -9,21 +9,22 @@ __abf_opts()
COMPREPLY=( $(compgen -W "${@}" -- "${cur}") )
return 0
fi
return 1
}
__abf_get()
{
__abf_opts "--branch --verbode --help"
__abf_opts "--branch --verbose --help"
}
__abf_put()
{
__abf_opts "--verbode --help"
__abf_opts "--verbose --help --message --minimal-file-size --do-not-remove-files"
}
__abf_show()
{
__abf_opts "--project --verbode --help"
__abf_opts "--project --verbose --help"
shows="build-repos build-platforms save-to-repos save-to-platforms"
if [[ ${cur} != -* ]] ; then
if [[ ${prev} == -* ]] ; then
@ -36,7 +37,7 @@ __abf_show()
__abf_locate()
{
__abf_opts "--project --directory --verbode --help"
__abf_opts "--project --directory --verbose --help"
actions="update update-recursive"
if [[ ${cur} != -* ]] ; then
if [[ ${prev} == -* ]] ; then
@ -49,7 +50,7 @@ __abf_locate()
__abf_mock_urpm()
{
__abf_opts "--config --verbode --help"
__abf_opts "--config --verbose --help"
configs_dir='/etc/abf/mock-urpm/configs'
if [ ${prev} == -c ] || [ ${prev} == --config ] ; then
COMPREPLY=( $(compgen -W "` ls -1 ${configs_dir}/*.cfg | xargs -l basename | sed s/[.]cfg//g`" -- "${cur}") )
@ -58,7 +59,7 @@ __abf_mock_urpm()
__abf_rpmbuild()
{
__abf_opts "--verbode --help --build"
__abf_opts "--verbose --help --build"
}
__abf_build()
@ -101,12 +102,12 @@ __abf_build()
__abf_publish()
{
__abf_opts "--pack --verbode --help"
__abf_opts "--verbose --help"
}
__abf_copy()
{
__abf_opts "--pack --verbode --help"
__abf_opts "--pack --verbose --help"
if [[ ${cur} != -* ]] ; then
@ -118,17 +119,17 @@ __abf_copy()
__abf_status()
{
__abf_opts "--verbode --help"
__abf_opts "--verbose --help"
}
__abf_help()
{
__abf_opts "--verbode --help"
__abf_opts "--verbose --help"
}
__abf_search()
{
__abf_opts "--verbode --help"
__abf_opts "--verbose --help"
types="users groups platforms projects"
if [ "$COMP_CWORD" == "2" ] ; then
COMPREPLY=( $(compgen -W "${types}" -- "${cur}") )
@ -139,26 +140,48 @@ __abf_search()
__abf_test()
{
__abf_opts "--verbode --help"
__abf_opts "--verbose --help"
}
__abf_fetch()
{
__abf_opts "--only --verbode --help"
__abf_opts "--only --verbose --help"
}
__abf_store()
{
__abf_opts "--verbose --help"
if [ $? == 0 ]
then
return 0;
fi
_filedir
return 0
}
__abf_clean()
{
__abf_opts "--auto-remove --verbode --help"
__abf_opts "--auto-remove --verbose --help"
}
__abf_alias()
{
__abf_opts "--verbose --help"
types="add remove list"
if [ "$COMP_CWORD" == "2" ] ; then
COMPREPLY=( $(compgen -W "${types}" -- "${cur}") )
return 0
fi
}
__abf()
{
local opts modes
modes="help get put show build publish status locate search test fetch mock-urpm rpmbuild clean"
modes="help get put show build publish status locate search test fetch mock-urpm rpmbuild clean copy store alias"
COMPREPLY=()
mode="${COMP_WORDS[1]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"

View file

@ -1,6 +1,6 @@
config_opts['root'] = 'rosa2012.1-i586-all'
config_opts['target_arch'] = 'i586'
config_opts['legal_host_arches'] = ('i586', 'x86_64')
config_opts['legal_host_arches'] = ('i586', 'x86_64', 'i686')
config_opts['chroot_setup'] = 'basesystem-minimal locales locales-en libmpc3 libmpfr4 libnatspec0 libpwl5 make patch unzip mandriva-release-common binutils curl gcc gcc-c++ gnupg rpm-build'
config_opts['urpmi_options'] = '--no-suggests --no-verify-rpm'
config_opts['use_system_media'] = False

View file

@ -1,6 +1,6 @@
config_opts['root'] = 'rosa2012.1-i586-main'
config_opts['target_arch'] = 'i586'
config_opts['legal_host_arches'] = ('i586', 'x86_64')
config_opts['legal_host_arches'] = ('i586', 'i686', 'x86_64')
config_opts['chroot_setup'] = 'basesystem-minimal locales locales-en libmpc3 libmpfr4 libnatspec0 libpwl5 make patch unzip mandriva-release-common binutils curl gcc gcc-c++ gnupg rpm-build'
config_opts['urpmi_options'] = '--no-suggests --no-verify-rpm'
config_opts['use_system_media'] = False

View file

@ -1,6 +1,6 @@
config_opts['root'] = 'rosa2012lts-i586-all'
config_opts['target_arch'] = 'i586'
config_opts['legal_host_arches'] = ('i586', 'x86_64')
config_opts['legal_host_arches'] = ('i586', 'i686', 'x86_64')
config_opts['chroot_setup'] = 'basesystem-minimal locales locales-en libmpc2 libmpfr4 libnatspec0 libpwl5 make patch unzip mandriva-release-common binutils curl gcc gcc-c++ gnupg rpm-build'
config_opts['urpmi_options'] = '--no-suggests --no-verify-rpm'
config_opts['use_system_media'] = False

View file

@ -1,6 +1,6 @@
config_opts['root'] = 'rosa2012lts-i586-main'
config_opts['target_arch'] = 'i586'
config_opts['legal_host_arches'] = ('i586', 'x86_64')
config_opts['legal_host_arches'] = ('i586', 'i686', 'x86_64')
config_opts['chroot_setup'] = 'basesystem-minimal locales locales-en libmpc2 libmpfr4 libnatspec0 libpwl5 make patch unzip mandriva-release-common binutils curl gcc gcc-c++ gnupg rpm-build'
config_opts['urpmi_options'] = '--no-suggests --no-verify-rpm'
config_opts['use_system_media'] = False