abf-console-client-src/abf/console/misc.py

616 lines
21 KiB
Python
Raw Normal View History

2012-09-18 13:26:44 +04:00
import os
import sys
import time
import select
import subprocess
import fcntl
2012-10-25 15:26:22 +04:00
2012-09-19 13:38:52 +04:00
from glob import glob
import shutil
2012-10-11 18:13:59 +04:00
import re
2012-10-25 15:26:22 +04:00
import yaml
2012-10-11 18:13:59 +04:00
import tempfile
2012-11-14 15:04:19 +04:00
import magic
import sha
import urllib2
2012-09-18 13:26:44 +04:00
2012-10-25 15:26:22 +04:00
from abf.console.log import Log
2012-11-14 15:04:19 +04:00
from abf.api.exceptions import *
2012-10-25 15:26:22 +04:00
log = Log('models')
2013-05-17 12:11:20 +04:00
def mkdirs(path):
''' the equivalent of mkdir -p path'''
if os.path.exists(path):
return
path = os.path.normpath(path)
items = path.split('/')
p = ''
for item in items:
p += '/' + item
if not os.path.isdir(p):
os.mkdir(p)
2012-09-18 13:26:44 +04:00
class CommandTimeoutExpired(Exception):
pass
class ReturnCodeNotZero(Exception):
def __init__(self, message, code):
super(ReturnCodeNotZero, self).__init__(message)
self.code = code
2012-10-25 15:26:22 +04:00
def get_project_name(path=None):
2012-09-18 13:26:44 +04:00
try:
# TODO: Force C locale?
2012-11-21 18:36:07 +04:00
output, ret_code = execute_command(['git', 'remote', 'show', 'origin', '-n'], cwd=path)
2012-09-18 13:26:44 +04:00
for line in output.split('\n'):
2012-10-26 12:14:30 +04:00
if line.startswith(' Fetch URL:') and 'abf' in line:
2012-09-18 13:26:44 +04:00
project_name = line.split('/')[-1][:-4]
owner_name = line.split('/')[-2]
return (owner_name, project_name)
2012-09-25 16:21:45 +04:00
return (None, None)
2012-09-18 13:26:44 +04:00
except ReturnCodeNotZero:
2012-09-25 16:21:45 +04:00
return (None, None)
2012-11-22 17:18:41 +04:00
def parse_spec_silently(ts, spec_path):
#'ts.parseSpec' writes error: cannot create %_sourcedir /root/rpmbuild/SOURCES
2012-12-11 17:08:55 +04:00
stderr = 1001
stdout = 1000
2012-12-03 15:11:06 +04:00
try:
os.dup2(sys.stderr.fileno(), stderr)
2012-12-11 17:08:55 +04:00
os.dup2(sys.stdout.fileno(), stdout)
2012-12-03 15:11:06 +04:00
se = file('/dev/null', 'w')
os.dup2(se.fileno(), sys.stderr.fileno())
2012-12-11 17:08:55 +04:00
os.dup2(se.fileno(), sys.stdout.fileno())
2012-12-03 15:11:06 +04:00
rpm_spec = ts.parseSpec(spec_path)
finally:
os.dup2(stderr, sys.stderr.fileno())
2012-12-11 17:08:55 +04:00
os.dup2(stdout, sys.stdout.fileno())
#se.close()
2012-11-22 17:18:41 +04:00
return rpm_spec
2012-09-19 13:38:52 +04:00
def get_project_name_version(spec_path):
try:
2012-10-25 15:26:22 +04:00
rpm = __import__('rpm') # it's initialization is too long to place it to the top of the file
2012-09-19 13:38:52 +04:00
ts = rpm.TransactionSet()
2012-11-22 17:18:41 +04:00
rpm_spec = parse_spec_silently(ts, spec_path)
2012-09-19 13:38:52 +04:00
name = rpm.expandMacro("%{name}")
version = rpm.expandMacro("%{version}")
return (name, version)
except:
return None
2012-10-25 15:26:22 +04:00
def get_project_data(spec_path):
rpm = __import__('rpm') # it's initialization is too long to place it to the top of the file
ts = rpm.TransactionSet()
2012-11-22 17:18:41 +04:00
rpm_spec = parse_spec_silently(ts, spec_path)
2012-10-25 15:26:22 +04:00
name = rpm.expandMacro("%{name}")
version = rpm.expandMacro("%{version}")
2013-05-17 12:11:20 +04:00
if type(rpm_spec.sources) is list: # rpm4
sources_all = rpm_spec.sources
src_flag = 1
patch_fkag = 2
else:
sources_all = rpm_spec.sources() # rpm5
src_flag = 65536
patch_fkag = 131072
2012-10-25 15:26:22 +04:00
sources = []
patches = []
for src in sources_all:
name, number, flag = src
2013-05-17 12:11:20 +04:00
if flag & src_flag: # source file
2012-10-25 15:26:22 +04:00
sources.append((name, number))
2013-05-17 12:11:20 +04:00
elif flag & patch_fkag:
2012-10-25 15:26:22 +04:00
patches.append((name, number))
return {'name': name, 'version': version, 'sources': sources, 'patches': patches}
2012-09-19 13:38:52 +04:00
2012-10-26 12:14:30 +04:00
def get_branch_name(path=None):
2012-09-19 13:38:52 +04:00
try:
2012-11-21 18:36:07 +04:00
output, ret_code = execute_command(['git', 'branch'], cwd=path)
2012-09-19 13:38:52 +04:00
for line in output.split('\n'):
if not line.startswith('*'):
continue
2012-12-11 17:08:55 +04:00
if line == '* (no branch)':
return '(no branch)'
2012-09-19 13:38:52 +04:00
return line.split()[1]
except ReturnCodeNotZero:
return None
2012-10-26 12:14:30 +04:00
def get_current_commit_hash(path=None):
try:
2012-11-21 18:36:07 +04:00
output, ret_code = execute_command(['git', 'rev-parse', 'HEAD'], cwd=path)
return output.strip()
except ReturnCodeNotZero:
return None
2012-10-11 18:13:59 +04:00
def get_remote_branch_hash(branch, cwd=None):
''' Get the hash of the remote branch top commit.
If not in git repository directory - exception will be reised. If hash can not be found - return None'''
re_ref = re.compile('^([0-9a-f]+) refs/remotes/\w+/%s$' % branch)
2012-11-21 18:36:07 +04:00
output, ret_code = execute_command(['git', 'show-ref'], cwd=cwd)
2012-10-11 18:13:59 +04:00
for line in output.split('\n'):
res = re_ref.match(line)
if res:
h = res.group(1)
return h
return None
2012-10-11 18:13:59 +04:00
def get_tag_hash(tag, cwd=None):
''' Get the hash of the tag.
If not in git repository directory - exception will be reised. If hash can not be found - return None'''
re_ref = re.compile('^([0-9a-f]+) refs/tags/%s$' % tag)
2012-11-21 18:36:07 +04:00
output, ret_code = execute_command(['git', 'show-ref', '--tags'], cwd=cwd)
2012-10-11 18:13:59 +04:00
for line in output.split('\n'):
res = re_ref.match(line)
if res:
h = res.group(1)
return h
return None
2012-10-11 18:13:59 +04:00
2012-10-25 15:26:22 +04:00
def clone_git_repo_tmp(uri, depth=None):
log.info(_('Cloning git repository (temporary workaround)'))
2012-10-11 18:13:59 +04:00
tmp_dir = tempfile.mkdtemp(prefix='tmp_abf_')
2014-10-02 10:56:08 +04:00
log.info(_("Temporary directory is ") + tmp_dir)
2012-10-11 18:13:59 +04:00
cmd = ['git', 'clone', uri, tmp_dir]
2012-10-25 15:26:22 +04:00
execute_command(cmd, print_to_stdout=True, exit_on_error=True)
2012-10-11 18:13:59 +04:00
return tmp_dir
2012-09-19 13:38:52 +04:00
def get_root_git_dir(path=None):
''' Get the root directory of the git project '''
if path:
p = path
else:
p = os.getcwd()
2012-12-11 17:08:55 +04:00
while '.git' not in os.listdir(p) and p != '/':
2012-09-19 13:38:52 +04:00
p = os.path.dirname(p)
if p == '/':
return None
else:
return p
2012-10-25 15:26:22 +04:00
def get_spec_file(root_path):
2012-09-19 13:38:52 +04:00
specs = glob(os.path.join(root_path, '*.spec'))
log.debug(_("Spec files found: ") + str(specs))
2012-09-19 13:38:52 +04:00
if len(specs) == 1:
spec = specs[0]
2012-10-25 15:26:22 +04:00
return spec
2012-09-19 13:38:52 +04:00
else:
raise Excpetion(_("Could not find single spec file"))
2012-11-14 15:04:19 +04:00
def find_spec(path=None):
path = path or get_root_git_dir()
if not path:
log.error(_('No path specified and you are not in a git repository'))
2012-11-14 15:04:19 +04:00
exit(1)
files = os.listdir(path)
specs_present = []
for fl in files:
if fl.endswith('.spec'):
specs_present.append(fl)
2012-11-14 15:04:19 +04:00
if len(specs_present) == 0:
raise Exception(_("No spec files found!"))
2012-11-14 15:04:19 +04:00
elif len(specs_present) > 1:
raise Exception(_("More than one spec file found!"))
2012-11-14 15:04:19 +04:00
return specs_present[0]
2012-10-25 15:26:22 +04:00
def find_spec_problems(exit_on_error=True, strict=False, auto_remove=False):
path = get_root_git_dir()
files = os.listdir(path)
2012-10-25 15:26:22 +04:00
files_present = []
dirs_present = []
yaml_files = []
for fl in files:
if fl.startswith('.'):
continue
if os.path.isdir(fl):
dirs_present.append(fl)
continue
if fl.endswith('.spec'):
continue
files_present.append(fl)
2012-10-25 15:26:22 +04:00
yaml_path = os.path.join(path, '.abf.yml')
2013-05-17 12:11:20 +04:00
yaml_data = {'sources': {}}
2012-10-25 15:26:22 +04:00
if os.path.isfile(yaml_path):
with open(yaml_path, 'r') as fd:
2013-05-17 12:11:20 +04:00
try:
yaml_data = yaml.load(fd)
except yaml.scanner.ScannerError, ex:
2014-09-30 12:19:49 +04:00
log.error(_('Invalid yml file %(file)s!\nProblem in line %(line)d column %(col)d: %(exception)s') % (yaml_path, ex.problem_mark.line, ex.problem_mark.column, ex.problem))
2013-05-17 12:11:20 +04:00
except yaml.composer.ComposerError, ex:
2014-09-30 12:19:49 +04:00
log.error(_('Invalid yml file %(file)s!\n%(exception)s') % (yaml_path, ex))
2012-10-25 15:26:22 +04:00
if not 'sources' in yaml_data:
log.error(_("Incorrect .abf.yml file: no 'sources' key"))
2012-10-25 15:26:22 +04:00
exit(1)
for fl in yaml_data['sources']:
yaml_files.append(fl)
2012-11-14 15:04:19 +04:00
spec_path = find_spec(path)
2012-10-25 15:26:22 +04:00
for d in dirs_present:
log.info(_("warning: directory '%s' was found") % d)
2012-10-25 15:26:22 +04:00
if auto_remove:
shutil.rmtree(os.path.join(path,d) )
2012-10-25 15:26:22 +04:00
res = get_project_data(spec_path)
2012-10-25 15:26:22 +04:00
errors = False
warnings = False
files_required = []
for fl in res['sources'] + res['patches']:
fname, n = fl
fname_base = os.path.basename(fname)
2012-10-25 15:26:22 +04:00
files_required.append(fname_base)
is_url = fname.startswith('http://') or fname.startswith('https://') or fname.startswith('ftp://')
abf_url = fname.startswith('http://abf.') or fname.startswith('https://abf.') or fname.startswith('ftp://abf.')
2012-10-25 15:26:22 +04:00
presents = fname_base in files_present
in_yaml = fname_base in yaml_files
2014-02-27 17:41:27 +04:00
# if is_url and in_yaml:
# warnings = True
# log.info('warning: file "%s" presents in spec (url) and in .abf.yml' % fname_base)
if is_url and not abf_url and not presents and not in_yaml:
2012-10-25 15:26:22 +04:00
warnings = True
log.info(_('warning: file "%s" is listed in spec as a URL, but does not present in the current directory or in .abf.yml file') % fname_base)
2012-10-25 15:26:22 +04:00
if presents and in_yaml:
warnings = True
log.info(_('warning: file "%s" presents in the git directory and in .abf.yml') % fname_base)
2012-10-25 15:26:22 +04:00
if not presents and not in_yaml and not is_url:
errors = True
log.info(_("error: missing file %s") % fname)
2012-10-25 15:26:22 +04:00
remove_from_yaml = []
for fl in set(files_present + yaml_files):
if fl in files_required:
continue # file have already been processed
presents = fl in files_present
in_yaml = fl in yaml_files
if presents:
warnings = True
log.info(_('warning: unnecessary file "%s"') % fl)
2012-10-25 15:26:22 +04:00
if auto_remove:
os.remove( os.path.join(path, fl) )
2012-10-25 15:26:22 +04:00
if in_yaml:
warnings = True
log.info(_('warning: unnecessary file "%s" in .abf.yml') % fl)
2012-10-25 15:26:22 +04:00
remove_from_yaml.append(fl)
2012-10-25 15:26:22 +04:00
if auto_remove:
for fl in remove_from_yaml:
yaml_data['sources'].pop(fl)
with open(yaml_path, 'w') as fd:
yaml.dump(yaml_data, fd, default_flow_style=False)
log.info(_('.abf.yml file was rewritten'))
2012-10-25 15:26:22 +04:00
if exit_on_error and (errors or (strict and warnings)):
exit(1)
2012-10-25 15:26:22 +04:00
def pack_project(root_path):
# look for a spec file
spec = get_spec_file(root_path)
2012-09-19 13:38:52 +04:00
if spec:
name, version = get_project_name_version(spec)
else:
log.error(_("Could not resolve project name and version from the spec file"))
2012-09-19 13:38:52 +04:00
return
log.debug(_("Project name is ") + str(name))
log.debug(_("Project version is ") + str(version))
2012-09-19 13:38:52 +04:00
tardir = '%s-%s' % (name, version)
tarball = tardir + ".tar.gz"
2014-09-30 12:19:49 +04:00
log.debug(_("Writing %(path)s/%(tarball)s ...") % (root_path, tarball))
2012-09-19 13:38:52 +04:00
full_tarball_path = '%s/%s' % (root_path, tarball)
if os.path.exists(full_tarball_path):
os.unlink(full_tarball_path)
#open(full_tarball_path, 'w').close()
cmd = ['tar', 'czf', full_tarball_path, '--exclude-vcs', os.path.basename(root_path)]
2012-09-19 13:38:52 +04:00
try:
2012-10-25 15:26:22 +04:00
execute_command(cmd, cwd=os.path.dirname(root_path), exit_on_error=False)
2012-09-19 13:38:52 +04:00
except ReturnCodeNotZero, ex:
if ex.code != 1:
raise
2012-09-19 13:38:52 +04:00
#remove other files
files = os.listdir(root_path)
do_not_remove = ['.git', tarball, os.path.basename(spec)]
log.debug(_("Removing files except ") + str(do_not_remove))
2012-09-19 13:38:52 +04:00
for f in files:
if f in do_not_remove:
continue
f = os.path.join(root_path, f)
log.debug('Removing ' + f)
if os.path.isfile(f):
os.remove(f)
else:
shutil.rmtree(f)
2012-10-25 15:26:22 +04:00
def execute_command(command, shell=False, cwd=None, timeout=0, raiseExc=True, print_to_stdout=False, exit_on_error=False):
2012-09-18 13:26:44 +04:00
output = ""
start = time.time()
try:
child = None
log.debug(_("Executing command: %s") % command)
2012-09-18 13:26:44 +04:00
child = subprocess.Popen(
command,
shell=shell,
bufsize=0, close_fds=True,
stdin=open("/dev/null", "r"),
stdout=subprocess.PIPE,
2012-09-19 13:38:52 +04:00
stderr=subprocess.PIPE,
cwd=cwd
2012-09-18 13:26:44 +04:00
)
# use select() to poll for output so we dont block
output = logOutput([child.stdout, child.stderr],
start, timeout, print_to_stdout=print_to_stdout)
except Exception, ex:
2012-09-18 13:26:44 +04:00
# kill children if they arent done
if type(ex) == IOError and ex.errno==4:
print(_('Process execution has been terminated'))
exit()
2012-09-18 13:26:44 +04:00
try:
if child is not None and child.returncode is None:
os.killpg(child.pid, 9)
2012-09-18 13:26:44 +04:00
if child is not None:
os.waitpid(child.pid, 0)
except:
pass
raise ex
2012-09-18 13:26:44 +04:00
# wait until child is done, kill it if it passes timeout
niceExit=1
while child.poll() is None:
if (time.time() - start)>timeout and timeout!=0:
niceExit=0
os.killpg(child.pid, 15)
if (time.time() - start)>(timeout+1) and timeout!=0:
niceExit=0
os.killpg(child.pid, 9)
if not niceExit and raiseExc:
2014-09-30 12:19:49 +04:00
raise CommandTimeoutExpired(_("Timeout(%(timeout)s) expired for command:\n # %(cmd)s\n%(output)s") % (timeout, command, output))
log.debug(_("Child returncode was: %s") % str(child.returncode))
2012-09-18 13:26:44 +04:00
if child.returncode:
if exit_on_error:
2012-11-22 17:18:41 +04:00
exit(1)
2012-09-18 13:26:44 +04:00
if raiseExc:
2014-09-30 12:19:49 +04:00
raise ReturnCodeNotZero(_("Command failed.\nReturn code: %(ret_code)s\nOutput: %(output)s") % (child.returncode, output), child.returncode)
2012-11-21 18:36:07 +04:00
return (output, child.returncode)
2012-09-18 13:26:44 +04:00
def logOutput(fds, start=0, timeout=0, print_to_stdout=False):
done = 0
output = ''
#print 'NEW CALL epoll', fds[0].fileno(), fds[1].fileno()
2012-09-18 13:26:44 +04:00
# set all fds to nonblocking
for fd in fds:
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
if not fd.closed:
fcntl.fcntl(fd, fcntl.F_SETFL, flags| os.O_NONBLOCK)
epoll = select.epoll()
epoll.register(fds[0].fileno(), select.EPOLLIN)
epoll.register(fds[1].fileno(), select.EPOLLIN)
reg_num = 2
try:
done = False
while not done:
events = epoll.poll(1)
for fileno, event in events:
if event & select.EPOLLIN:
#print (fileno, event)
if fileno == fds[0].fileno():
r = fds[0].read()
#print r
output += r
2012-12-11 17:08:55 +04:00
if print_to_stdout:
sys.stdout.write(r)
else:
r = fds[1].read()
#print r
output += r
2012-12-11 17:08:55 +04:00
if print_to_stdout:
sys.stdout.write(r)
elif event & select.EPOLLHUP:
epoll.unregister(fileno)
reg_num -= 1
if not reg_num:
done = True
finally:
epoll.close()
2012-11-14 15:04:19 +04:00
return output
2012-11-14 15:04:19 +04:00
def is_text_file(path):
m = magic.open(magic.MAGIC_MIME)
m.load()
r = m.file(path)
2014-09-30 12:19:49 +04:00
log.debug(_("Magic type of file %(path)s is %(type)s") % (path, r))
2012-11-14 15:04:19 +04:00
if r.startswith('text'):
return True
return False
2012-11-14 15:04:19 +04:00
def fetch_files(models, yaml_path, file_names=None):
with open(yaml_path, 'r') as fd:
yaml_data = yaml.load(fd)
if not 'sources' in yaml_data:
log.error(_("Incorrect .abf.yml file: no 'sources' key."))
2012-11-14 15:04:19 +04:00
exit(1)
yaml_files = yaml_data['sources']
if file_names:
to_fetch = dict([(x, yaml_files[x]) for x in file_names])
else:
to_fetch = yaml_files
dest_dir = os.path.dirname(yaml_path)
2012-11-14 15:04:19 +04:00
for file_name in to_fetch:
log.info(_('Fetching file %s') % file_name)
2012-11-14 15:04:19 +04:00
path = os.path.join(dest_dir, file_name)
if os.path.isfile(path):
sha_hash_current = to_fetch[file_name]
sha_hash_new = models.jsn.compute_sha1(path)
if sha_hash_current == sha_hash_new:
log.debug(_('The file %s already presents and has a correct hash') % file_name)
2012-11-14 15:04:19 +04:00
continue
else:
log.info(_('The file %s already presents but its hash is not the same as in .abf.yml, so it will be rewritten.') % file_name)
2012-11-14 15:04:19 +04:00
try:
models.jsn.fetch_file(to_fetch[file_name], path)
except AbfApiException, ex:
print(_('error: ') + str(ex))
2012-11-14 15:04:19 +04:00
def upload_files(models, min_size, path=None, remove_files=True):
log.debug('Uploading files for directory ' + str(path))
spec_path = find_spec(path)
dir_path = os.path.dirname(spec_path)
errors_count = 0
2012-11-14 15:04:19 +04:00
yaml_path = os.path.join(dir_path, '.abf.yml')
yaml_file_changed = False
yaml_files = {}
yaml_data = {'sources':{}}
if os.path.isfile(yaml_path):
with open(yaml_path, 'r') as fd:
2012-11-22 17:18:41 +04:00
try:
yaml_data = yaml.load(fd)
2013-05-17 12:11:20 +04:00
except (yaml.composer.ComposerError, yaml.scanner.ScannerError) :
log.error(_('Could not parse .abf.yml file. It seems to be corrupted and will be rewritten.'))
2013-05-17 12:11:20 +04:00
yaml_file_changed = True
yaml_data['sources'] = {}
2012-11-14 15:04:19 +04:00
if not 'sources' in yaml_data:
log.error(_("Incorrect .abf.yml file: no 'sources' key. The file will be rewritten."))
2012-11-14 15:04:19 +04:00
yaml_file_changed = True
yaml_data['sources'] = {}
yaml_files = yaml_data['sources']
2012-12-03 15:11:06 +04:00
try:
sources = get_project_data(spec_path)['sources']
except Exception, ex:
log.error(ex)
return 1
2012-11-14 15:04:19 +04:00
for src, num in sources:
2012-11-21 18:36:07 +04:00
is_url = False
if '://' in src:
src = os.path.basename(src)
is_url = True
2012-11-14 15:04:19 +04:00
do_not_upload = False
source = os.path.join(dir_path, src)
2012-11-14 15:04:19 +04:00
if not os.path.exists(source):
2012-11-21 18:36:07 +04:00
if is_url:
log.info(_('File %s not found, URL will be used instead. Skipping.') % src)
2012-11-21 18:36:07 +04:00
continue
2012-11-14 15:04:19 +04:00
if src not in yaml_files:
2014-09-30 12:19:49 +04:00
log.error(_("error: Source%(num)d file %(source)s does not exist, skipping!") % (num, source))
2012-11-14 15:04:19 +04:00
errors_count += 1;
else:
log.info(_('File %s not found, but it\'s listed in .abf.yml. Skipping.') % src)
2012-11-14 15:04:19 +04:00
continue
filesize = os.stat(source).st_size
if filesize == 0:
log.debug(_('Size of %s is 0, skipping') % src)
2012-11-14 15:04:19 +04:00
do_not_upload = True
if filesize < min_size:
log.debug(_('Size of %s less then minimal, skipping') % src)
2012-11-14 15:04:19 +04:00
do_not_upload = True
if is_text_file(source):
log.debug(_('File %s is textual, skipping') % src)
2012-11-14 15:04:19 +04:00
do_not_upload = True
if do_not_upload:
# remove file from .abf.yml
if src in yaml_files:
yaml_files.pop(src)
yaml_file_changed = True
continue
sha_hash = models.jsn.upload_file(source)
2012-11-14 15:04:19 +04:00
if src not in yaml_files or sha_hash != yaml_files[src]:
log.debug(_('Hash for file %s has been updated') % src)
2012-11-22 17:18:41 +04:00
# try to remove previous versions
re_src = re.compile('^([\w\d\-\.]+)-([\d\.]+)\.(tar\.gz|tar.xz|tgz|zip|tar\.bz2)$')
2012-11-23 15:29:20 +04:00
res = re_src.match(src)
if res:
src_gr = res.groups()
to_remove = []
for item in yaml_files:
res = re_src.match(item)
if res:
gr = res.groups()
if gr[0] == src_gr[0]:
to_remove.append(item)
for item in to_remove:
h = yaml_files.pop(item)
if 'removed_sources' not in yaml_data:
yaml_data['removed_sources'] = {}
yaml_data['removed_sources'][item] = h
2014-09-30 12:19:49 +04:00
log.info(_('Removing %(item)s:%(hash)s from .abf.yml') % (item, h ))
2012-11-22 17:18:41 +04:00
yaml_files[src] = sha_hash.encode()
2012-11-14 15:04:19 +04:00
yaml_file_changed = True
else:
log.debug(_('Hash for file %s is already correct') % src)
log.info(_('File %s has been processed') % src)
2012-11-14 15:04:19 +04:00
if remove_files:
log.debug(_('Removing file %s') % source)
2012-11-14 15:04:19 +04:00
os.remove(source)
2012-11-14 15:04:19 +04:00
if yaml_file_changed:
log.debug(_('Writing the new .abf.yml file...'))
2012-11-14 15:04:19 +04:00
yaml_data['sources'] = yaml_files
with open(yaml_path, 'w') as fd:
yaml.dump(yaml_data, fd, default_flow_style=False)
2012-11-14 15:04:19 +04:00
return errors_count
SYMBOLS = {
'basic' : ('b', 'k', 'm', 'g', 't'),
'basic_long' : ('byte', 'kilo', 'mega', 'giga', 'tera'),
'iec' : ('bi', 'ki', 'mi', 'gi', 'ti'),
'iec_long' : ('byte', 'kibi', 'mebi', 'gibi', 'tebi'),
}
def human2bytes(s):
if s.strip() == '0':
return 0
init = s
num = ""
while s and s[0:1].isdigit() or s[0:1] == '.':
num += s[0]
s = s[1:]
num = float(num)
letter = s.strip().lower()
ss = None
for name, sset in SYMBOLS.items():
if letter in sset:
ss = sset
break
2012-11-14 15:04:19 +04:00
if not ss:
raise ValueError(_("can't interpret %r") % init)
2012-11-14 15:04:19 +04:00
prefix = {ss[0]:1}
for i, s in enumerate(sset[1:]):
prefix[s] = 1 << (i+1)*10
return int(num * prefix[letter])