Added localization facilities

This commit is contained in:
Denis Silakov 2014-09-30 12:19:49 +04:00
parent 7463fcca84
commit 7fdf179dfc
8 changed files with 3197 additions and 73 deletions

28
abf.py
View file

@ -383,7 +383,7 @@ def run_mock_urpm(binary=True):
try:
execfile(config_path)
except Exception, ex:
log.error(_("Could not read the contents of '%s': %s") % (config_path, str(ex)))
log.error(_("Could not read the contents of '%(path)s': %(exception)s") % (config_path, str(ex)))
exit(2)
basedir = ('basedir' in config_opts and config_opts['basedir']) or '/var/lib/abf/mock-urpm'
@ -587,10 +587,10 @@ def get_project(models, must_exist=True, name=None):
try:
proj = Project.get_by_name(models, '%s/%s' % (owner_name, project_name))
except PageNotFoundError:
log.error(_('The project %s/%s does not exist!') % (owner_name, project_name))
log.error(_('The project %(owner)s/%(project)s does not exist!') % (owner_name, project_name))
exit(1)
except ForbiddenError:
log.error(_('You do not have acces to the project %s/%s!') % (owner_name, project_name))
log.error(_('You do not have acces to the project %(owner)s/%(project)s!') % (owner_name, project_name))
exit(1)
log.debug(_('Project: %s') % proj)
@ -715,9 +715,9 @@ def fetch():
try:
fetch_files(models, path, command_line.only)
except yaml.scanner.ScannerError, ex:
log.error(_('Invalid yml file %s!\nProblem in line %d column %d: %s') % (path, ex.problem_mark.line, ex.problem_mark.column, ex.problem))
log.error(_('Invalid yml file %(filename)s!\nProblem in line %(line)d column %(column)d: %(problem)s') % (path, ex.problem_mark.line, ex.problem_mark.column, ex.problem))
except yaml.composer.ComposerError, ex:
log.error(_('Invalid yml file %s!\n%s') % (path, ex))
log.error(_('Invalid yml file %(filename)s!\n%(exception)s') % (path, ex))
def store():
log.debug(_('STORE started'))
@ -1029,7 +1029,7 @@ def build():
build_for_platform = repo.platform
pls.append(repo.platform.name)
if not build_for_platform:
log.error(_("Can not build for platform %s. Select one of the following:\n%s") % (pl_name, ', '.join(pls)))
log.error(_("Can not build for platform %(platform)s. Select one of the following:\n%(all_platforms)s") % (pl_name, ', '.join(pls)))
exit(1)
for repo in build_for_platform.repositories:
@ -1038,7 +1038,7 @@ def build():
break
if not save_to_repository:
log.error(_("Incorrect save-to repository %s/%s.\nSelect one of the following:\n%s") % (pl_name, repo_name,
log.error(_("Incorrect save-to repository %(platform)s/%(repo)s.\nSelect one of the following:\n%(all_repos)s") % (pl_name, repo_name,
', '.join([str(x) for x in build_for_platform.repositories])))
exit(1)
@ -1057,13 +1057,13 @@ def build():
elif len(items) == 1:
repo_name = items[0]
pl_name = default_build_platform
log.debug(_("Platform for selected repository %s is assumed to be %s") % (repo_name, pl_name))
log.debug(_("Platform for selected repository %(repo)s is assumed to be %(plat)s") % (repo_name, pl_name))
else:
log.error(_("'repository' option format: [platform/]repository"))
exit(1)
if pl_name not in build_platform_names:
log.error(_("Can not use build repositories from platform %s!\nSelect one of the following:\n%s") % (pl_name,
log.error(_("Can not use build repositories from platform %(platform)s!\nSelect one of the following:\n%(all_plats)s") % (pl_name,
', '.join(build_platform_names)))
exit(1)
for pl in build_platforms:
@ -1076,7 +1076,7 @@ def build():
build_repo = repo
break
if not build_repo:
log.error(_("Platform %s does not have repository %s!\nSelect one of the following:\n%s") % (pl_name, repo_name,
log.error(_("Platform %(plat)s does not have repository %(repo)s!\nSelect one of the following:\n%(all_repos)s") % (pl_name, repo_name,
', '.join([x.name for x in build_platform.repositories])))
exit(1)
build_repositories.append(build_repo)
@ -1153,18 +1153,18 @@ def publish():
try:
bl = BuildList(models, task_id)
if bl.status != 0:
log.error(_("The status of build task %s is \"%s\", can not published!") % (bl.id, bl.status_string))
log.error(_("The status of build task %(id)s is \"%(status)s\", can not published!") % (bl.id, bl.status_string))
continue
res = bl.publish()
except AbfApiException, ex:
log.error(_('Could not publish task %s: %s') %(task_id, str(ex)))
log.error(_('Could not publish task %(id)s: %(exception)s') %(task_id, str(ex)))
def _print_build_status(models, ID):
try:
bl = BuildList(models, ID)
except AbfApiException, ex:
log.error(_("Can not read buildlist %s: %s") % (ID, ex))
log.error(_("Can not read buildlist %(id)s: %(exception)s") % (ID, ex))
exit(3)
if command_line.short:
print repr(bl)
@ -1220,7 +1220,7 @@ def _update_location(path=None, silent=True):
if group:
proj = '%s/%s' % (group, name)
projects_cfg[proj]['location'] = path
text = _("Project %s has been located in %s") % (proj, path)
text = _("Project %(proj)s has been located in %(path)s") % (proj, path)
if silent:
log.debug(text)
else:

View file

@ -108,7 +108,7 @@ class AbfJson(object):
exception = AbfApiException
if exception == BadRequestError:
log.error(_('Sorry, but something went wrong and request I\'ve sent to ABF is bad. Please, '
'notify developers, send them a set of command-line arguments and the request data:\n%s\n%s') % (URL, post_json or "No POST DATA") )
'notify developers, send them a set of command-line arguments and the request data:\n%(url)s\n%(json)s') % (URL, post_json or "No POST DATA") )
exit(1)
if exception in AbfJson.fatal_errors:
@ -174,7 +174,7 @@ class AbfJson(object):
res = ex.fp.read()
if etag_new:
self.log.debug(_("Caching the new value for %s. ETag is %s") % (url, etag_new))
self.log.debug(_("Caching the new value for %(url)s. ETag is %(etag)s") % (url, etag_new))
cache_etags.put(url, etag_new)
cache_data.put(etag_new, res)
@ -245,11 +245,11 @@ class AbfJson(object):
fn = res[0]['file_name']
sha_hash_new = res[0]['sha1_hash']
if sha_hash_new != sha_hash:
self.log.critical(_('File-Store returned file for sha1 %s instead of %s!') % (sha_hash_new, sha_hash))
self.log.critical(_('File-Store returned file for sha1 %(new)s instead of %(old)s!') % (sha_hash_new, sha_hash))
exit(1)
new_fn = os.path.basename(file_name)
if fn != new_fn and not silent:
self.log.warning(_('The name of the file in file-store is %s, but you are trying to upload file %s') % (fn, new_fn))
self.log.warning(_('The name of the file in file-store is %(old)s, but you are trying to upload file %(new)s') % (fn, new_fn))
return sha_hash
tempfile.tempdir = '/tmp'
@ -261,7 +261,7 @@ class AbfJson(object):
length = body.tell()
body.seek(0)
if not silent:
self.log.info(_('Uploading %s (%s)') % (file_name, bytes2human(os.stat(file_name).st_size)))
self.log.info(_('Uploading %(file)s (%(size)s)') % (file_name, bytes2human(os.stat(file_name).st_size)))
conn = httplib.HTTPConnection(self.file_store_domain, 80)
content_type = 'multipart/form-data; boundary=%s' % boundary
headers = {'Content-Type' : content_type, 'Content-Length' : length, "Authorization": "Basic %s" % self.base64_auth_string}
@ -272,7 +272,7 @@ class AbfJson(object):
output = resp.read()
conn.close()
if resp.status < 200 or resp.status > 299:
self.log.error(_("Could not upload file. HTTP error %s %s") % (resp.status, resp.reason))
self.log.error(_("Could not upload file. HTTP error %(status)s %(reason)s") % (resp.status, resp.reason))
exit(1)
output = json.loads(output)
@ -287,7 +287,7 @@ class AbfJson(object):
if ex.code == 404: # data was not modified
raise PageNotFoundError(_('File with hash %s can not be downloaded from File-Store.') % sha_hash)
else:
raise AbfApiException(_('Error while downloading file by hash %s: %s') % (sha_hash, str(ex)))
raise AbfApiException(_('Error while downloading file by hash %(hash)s: %(exception)s') % (sha_hash, str(ex)))
fd = open(path, 'wb')
shutil.copyfileobj(response, fd)
fd.close()

View file

@ -8,7 +8,7 @@ import fcntl
# USAGE:
#
# from abf.console.config import Config
#
#
# cfg = Config()
# cfg['aaa']['bbb'] = 'ccc'
# print cfg['aaa']['bbb']
@ -78,21 +78,21 @@ class Section(dict):
res = self.config.get(self.section, key)
except ConfigParser.NoOptionError, ex:
if key == 'default_branch':
print(_('non-critical error in config "%s": %s') % (self.conf_path, str(ex)))
print(_('non-critical error in config "%(path)s": %(exception)s') % (self.conf_path, str(ex)))
return ''
else:
print(_('error in config "%s": %s') % (self.conf_path, str(ex)))
print(_('error in config "%(path)s": %(exception)s') % (self.conf_path, str(ex)))
exit(1)
def pop(self, key, init=None):
if init is not None and key not in self:
return init
res = super(Section, self).pop(key, init)
self.config.remove_option(self.section, key)
self.save()
return res
class Config(dict):
default_url = 'https://abf.rosalinux.ru'
default_filestore_url = 'http://file-store.rosalinux.ru'
@ -104,8 +104,8 @@ class Config(dict):
if not os.path.isfile(self.conf_path):
mkdirs(os.path.dirname(self.conf_path))
init = True
self.config = ConfigParser.RawConfigParser()
self.config.read(self.conf_path)
@ -114,35 +114,35 @@ class Config(dict):
opts = self.config.options(section)
for opt in opts:
super(Section, self[section]).__setitem__(opt, self.config.get(section, opt))
if main_conf and ('config_version' not in self['main'] or int(self['main']['config_version']) != VERSION):
print(_("Configuration schema have been changed or config file have been corrupted, rebuilding config..."))
init = True
if init and main_conf:
self.first_start()
def __setitem__(self, key, value):
'''NOTE: value is ignored'''
if super(Config, self).__contains__(key):
return
super(Config, self).__setitem__(key, Section(self.config, self.conf_path, key))
def __getitem__(self, key):
if not super(Config, self).__contains__(key):
self[key] = []
return super(Config, self).__getitem__(key)
def pop(self, section, init=None):
if init is not None and section not in self:
return init
res = super(Config, self).pop(section, init)
self.config.remove_section(section)
self.config.remove_section(section)
res.save()
return res
def ask_user_url(self, prompt, default):
done = False
while not done:
@ -159,27 +159,27 @@ class Config(dict):
continue
done = True
return domain
def first_start(self):
if 'abf_url' not in self['main']:
domain = self.ask_user_url('ABF URL [%s]: ' % Config.default_url, Config.default_url)
self['main']['abf_url'] = domain
if 'login' not in self['user'] or 'password' not in self['user']:
user_default = getpass.getuser()
user = ask_user('User [%s]: ' % user_default, can_be_empty=True)
self['user']['login'] = user or user_default
password = getpass.getpass()
self['user']['password'] = password
parts = self['main']['abf_url'].split('//')
git_uri = "%(protocol)s//%(user)s@%(domain)s" % \
dict(protocol=parts[0], user=self['user']['login'], domain=parts[1])
self['user']['git_uri'] = git_uri
if 'default_branch' not in self['user']:
def_br = 'master'
res = ask_user('Default project branch [%s]: ' % def_br, can_be_empty=True)
@ -188,21 +188,21 @@ class Config(dict):
if 'default_group' not in self['user']:
res = ask_user('Default project owner [%s]: ' % self['user']['login'], can_be_empty=True)
self['user']['default_group'] = res or self['user']['login']
if 'default_build_platform' not in self['user']:
def_bp = 'rosa2012.1'
res = ask_user('Default platform [%s]: ' % def_bp, can_be_empty=True)
self['user']['default_build_platform'] = res or def_bp
if 'file_store_url' not in self['main']:
filestore_domain = self.ask_user_url('File-store URL [%s]: ' % Config.default_filestore_url, Config.default_filestore_url)
self['main']['file_store_url'] = filestore_domain
#configure logging
#configure logging
self['formatters']['keys'] = 'verbose,simple'
self['formatter_verbose']['format'] = '%(asctime)s %(levelname)-7s in %(filename)s:%(funcName)s:%(lineno)d: %(message)s'
self['formatter_simple']['format'] = '%(message)s'
self['loggers']['keys'] = 'root,abf,beaker,models'
self['logger_root']['handlers'] = 'verbose'
self['logger_root']['propagate'] = '1'
@ -216,12 +216,12 @@ class Config(dict):
self['logger_models']['propagate'] = '0'
self['logger_models']['level'] = 'DEBUG'
self['logger_models']['qualname'] = 'models'
self['logger_beaker']['handlers'] = 'verbose'
self['logger_beaker']['propagate'] = '1'
self['logger_beaker']['level'] = 'ERROR'
self['logger_beaker']['qualname'] = 'beaker'
self['handlers']['keys'] = 'verbose,main'
self['handler_verbose']['level'] = 'DEBUG'
self['handler_verbose']['class'] = 'StreamHandler'
@ -231,22 +231,21 @@ class Config(dict):
self['handler_main']['class'] = 'StreamHandler'
self['handler_main']['formatter'] = 'simple'
self['handler_main']['args'] = '()'
if not self['alias']:
self['alias']['st'] = 'status'
self['alias']['b'] = 'build'
self['alias']['su'] = 'search users'
self['alias']['sg'] = 'search groups'
self['alias']['spl'] = 'search platforms'
self['alias']['sp'] = 'search projects'
self['alias']['sp'] = 'search projects'
self['main']['config_version'] = VERSION
print(_('Configuration have been completed'))
print(_('Now you can execute "abf locate update-recursive -d PATH", where PATH is your directory with ' + \
'cloned ABF projects. It will let you use "abfcd <project>" command to simply cd to project directory.\n\n'))

View file

@ -226,9 +226,9 @@ def find_spec_problems(exit_on_error=True, strict=False, auto_remove=False):
try:
yaml_data = yaml.load(fd)
except yaml.scanner.ScannerError, ex:
log.error(_('Invalid yml file %s!\nProblem in line %d column %d: %s') % (yaml_path, ex.problem_mark.line, ex.problem_mark.column, ex.problem))
log.error(_('Invalid yml file %(file)s!\nProblem in line %(line)d column %(col)d: %(exception)s') % (yaml_path, ex.problem_mark.line, ex.problem_mark.column, ex.problem))
except yaml.composer.ComposerError, ex:
log.error(_('Invalid yml file %s!\n%s') % (yaml_path, ex))
log.error(_('Invalid yml file %(file)s!\n%(exception)s') % (yaml_path, ex))
if not 'sources' in yaml_data:
log.error(_("Incorrect .abf.yml file: no 'sources' key"))
@ -315,7 +315,7 @@ def pack_project(root_path):
tardir = '%s-%s' % (name, version)
tarball = tardir + ".tar.gz"
log.debug(_("Writing %s/%s ...") % (root_path, tarball))
log.debug(_("Writing %(path)s/%(tarball)s ...") % (root_path, tarball))
full_tarball_path = '%s/%s' % (root_path, tarball)
if os.path.exists(full_tarball_path):
@ -386,14 +386,14 @@ def execute_command(command, shell=False, cwd=None, timeout=0, raiseExc=True, pr
niceExit=0
os.killpg(child.pid, 9)
if not niceExit and raiseExc:
raise CommandTimeoutExpired(_("Timeout(%s) expired for command:\n # %s\n%s") % (timeout, command, output))
raise CommandTimeoutExpired(_("Timeout(%(timeout)s) expired for command:\n # %(cmd)s\n%(output)s") % (timeout, command, output))
log.debug(_("Child returncode was: %s") % str(child.returncode))
if child.returncode:
if exit_on_error:
exit(1)
if raiseExc:
raise ReturnCodeNotZero(_("Command failed.\nReturn code: %s\nOutput: %s") % (child.returncode, output), child.returncode)
raise ReturnCodeNotZero(_("Command failed.\nReturn code: %(ret_code)s\nOutput: %(output)s") % (child.returncode, output), child.returncode)
return (output, child.returncode)
def logOutput(fds, start=0, timeout=0, print_to_stdout=False):
@ -443,7 +443,7 @@ def is_text_file(path):
m = magic.open(magic.MAGIC_MIME)
m.load()
r = m.file(path)
log.debug(_("Magic type of file %s is %s") % (path, r))
log.debug(_("Magic type of file %(path)s is %(type)s") % (path, r))
if r.startswith('text'):
return True
return False
@ -519,7 +519,7 @@ def upload_files(models, min_size, path=None, remove_files=True):
log.info(_('File %s not found, URL will be used instead. Skipping.') % src)
continue
if src not in yaml_files:
log.error(_("error: Source%d file %s does not exist, skipping!") % (num, source))
log.error(_("error: Source%(num)d file %(source)s does not exist, skipping!") % (num, source))
errors_count += 1;
else:
log.info(_('File %s not found, but it\'s listed in .abf.yml. Skipping.') % src)
@ -561,7 +561,7 @@ def upload_files(models, min_size, path=None, remove_files=True):
if 'removed_sources' not in yaml_data:
yaml_data['removed_sources'] = {}
yaml_data['removed_sources'][item] = h
log.info(_('Removing %s:%s from .abf.yml') % (item, h ))
log.info(_('Removing %(item)s:%(hash)s from .abf.yml') % (item, h ))
yaml_files[src] = sha_hash.encode()
yaml_file_changed = True
else:

View file

@ -49,13 +49,13 @@ class Model(object):
if st_cache and st_cache.has_key(cache_key):
#read cached value
log.debug( _('Loading %s %s from cache') % (self.__class__.__name__, ID))
log.debug( _('Loading %(name)s %(id)s from cache') % (self.__class__.__name__, ID))
self.stub = False
self.init_data = st_cache.get(cache_key)
self.load()
else:
log.debug(_('Loading %s %s using API') % (self.__class__.__name__, ID))
log.debug(_('Loading %(name)s %(id)s using API') % (self.__class__.__name__, ID))
self.stub = False
self.get_init_data(ID)
self.load()
@ -67,10 +67,10 @@ class Model(object):
for field in self.__class__.required_fields:
if field not in self.params_dict:
raise Exception(_("One of the fields required for %s model was not specified: %s") %
raise Exception(_("One of the fields required for %(name)s model was not specified: %(field)s") %
(self.__class__.__name__, field))
else:
log.debug(_('Creating a stub for %s %s') % (self.__class__.__name__, self.init_data['id']))
log.debug(_('Creating a stub for %(name)s %(id)s') % (self.__class__.__name__, self.init_data['id']))
self.load()
self.stub = True
@ -573,7 +573,7 @@ class BuildList(Model):
log.error(_('Sorry, but something went wrong and request I\'ve sent to ABF is bad. Please, '
'notify the console-client developers. Send them a set of command-line arguments and the request data:\n%s') % DATA )
exit(1)
log.info(_("Task %s|%s|%s|%s has been sent. Build task id is %s") %
log.info(_("Task %(proj)s|%(plat)s|%(save_repo)s|%(arch)s has been sent. Build task id is %(id)s") %
(project, bpl, save_to_repository, arch, result['build_list']['id']))
build_ids.append(result['build_list']['id'])
return build_ids
@ -623,7 +623,7 @@ class PullRequest(Model):
log.error(_('Sorry, but something went wrong and request I\'ve sent to ABF is bad. Please, '
'notify the console-client developers. Send them a set of command-line arguments and the request data:\n%s') % DATA )
exit(1)
log.info(_("Pull request for %s from %s to %s has been sent.") % (project, from_ref, to_ref))
log.info(_("Pull request for %(proj)s from %(from)s to %(to)s has been sent.") % (project, from_ref, to_ref))
class ProjectCreator(Model):
required_fields = ['name', 'description', 'owner']
@ -656,7 +656,7 @@ class ProjectCreator(Model):
log.error(_('Sorry, but something went wrong and request I\'ve sent to ABF is bad. Please, '
'notify the console-client developers. Send them a set of command-line arguments and the request data:\n%s') % DATA )
exit(1)
log.info(_("The project %s for owner %d has been created.") % (name, owner_id))
log.info(_("The project %(name)s for owner %(owner)d has been created.") % (name, owner_id))
@staticmethod
def add_project_to_repo(models, repo_id, project_id):
@ -671,7 +671,7 @@ class ProjectCreator(Model):
log.error(_('Sorry, but something went wrong and request I\'ve sent to ABF is bad. Please, '
'notify the console-client developers. Send them a set of command-line arguments and the request data:\n%s') % DATA )
exit(1)
log.info(_("The project %d has been added to repository %d.") % (project_id, repo_id) )
log.info(_("The project %(project)d has been added to repository %(repo)d.") % (project_id, repo_id) )
# Would be nice to invalidate only record corresponding to our project...
models.clear_cache()

41
po/Makefile Normal file
View file

@ -0,0 +1,41 @@
# the domain name for gettext
PGOAL = abf-console-client
# python files to search translatable strings in
PY_FILES = ../abf/*py ../abf.py ../abf/console/*py ../abf/api/*py
POFILES = $(wildcard *.po)
MOFILES = $(POFILES:%.po=%.mo)
LANGS = $(POFILES:%.po=%)
top_srcdir=..
PREFIX = $(RPM_BUILD_ROOT)/usr
DATADIR = $(PREFIX)/share
LOCALEDIR=$(DATADIR)/locale
all: $(MOFILES)
%.mo: %.po
msgfmt -c -o $@ $<
update_n_merge: $(PGOAL).pot merge
merge:
@for n in $(POFILES); do \
echo "Merging $$n"; \
msgmerge "$$n" $(PGOAL).pot > "$$n"t; \
mv -f "$$n"t "$$n"; \
done
$(PGOAL).pot: $(PY_FILES)
xgettext -L Python $(PY_FILES) -o $@
install:
for l in $(LANGS); do \
install -d $(LOCALEDIR)/$$l/LC_MESSAGES; \
install -m 644 $$l.mo $(LOCALEDIR)/$$l/LC_MESSAGES/$(PGOAL).mo; \
done
clean:
@rm -rf *.mo *.pof *.pog $(POFILES:%=%t) $(PL_CFILES) desktopstuff.pot $(PGOAL)_tmp.pot

1544
po/abf-console-client.pot Normal file

File diff suppressed because it is too large Load diff

1540
po/ru.po Normal file

File diff suppressed because it is too large Load diff