patman: Convert camel case in tools.py

Convert this file to snake case and update all files which use it.

Signed-off-by: Simon Glass <sjg@chromium.org>
This commit is contained in:
Simon Glass 2022-01-29 14:14:04 -07:00
parent 82ee8bfe51
commit c1aa66e75d
50 changed files with 473 additions and 473 deletions

View file

@ -327,9 +327,9 @@ class Bintool:
""" """
tmpdir = tempfile.mkdtemp(prefix='binmanf.') tmpdir = tempfile.mkdtemp(prefix='binmanf.')
print(f"- clone git repo '{git_repo}' to '{tmpdir}'") print(f"- clone git repo '{git_repo}' to '{tmpdir}'")
tools.Run('git', 'clone', '--depth', '1', git_repo, tmpdir) tools.run('git', 'clone', '--depth', '1', git_repo, tmpdir)
print(f"- build target '{make_target}'") print(f"- build target '{make_target}'")
tools.Run('make', '-C', tmpdir, '-j', f'{multiprocessing.cpu_count()}', tools.run('make', '-C', tmpdir, '-j', f'{multiprocessing.cpu_count()}',
make_target) make_target)
fname = os.path.join(tmpdir, bintool_path) fname = os.path.join(tmpdir, bintool_path)
if not os.path.exists(fname): if not os.path.exists(fname):
@ -349,8 +349,8 @@ class Bintool:
str: Filename of fetched file to copy to a suitable directory str: Filename of fetched file to copy to a suitable directory
str: Name of temp directory to remove, or None str: Name of temp directory to remove, or None
""" """
fname, tmpdir = tools.Download(url) fname, tmpdir = tools.download(url)
tools.Run('chmod', 'a+x', fname) tools.run('chmod', 'a+x', fname)
return fname, tmpdir return fname, tmpdir
@classmethod @classmethod
@ -384,7 +384,7 @@ class Bintool:
""" """
args = ['sudo', 'apt', 'install', '-y', package] args = ['sudo', 'apt', 'install', '-y', package]
print('- %s' % ' '.join(args)) print('- %s' % ' '.join(args))
tools.Run(*args) tools.run(*args)
return True return True
@staticmethod @staticmethod

View file

@ -80,7 +80,7 @@ class TestBintool(unittest.TestCase):
Args: Args:
fake_download (function): Function to call instead of fake_download (function): Function to call instead of
tools.Download() tools.download()
method (bintool.FETCH_...: Fetch method to use method (bintool.FETCH_...: Fetch method to use
Returns: Returns:
@ -88,7 +88,7 @@ class TestBintool(unittest.TestCase):
""" """
btest = Bintool.create('_testing') btest = Bintool.create('_testing')
col = terminal.Color() col = terminal.Color()
with unittest.mock.patch.object(tools, 'Download', with unittest.mock.patch.object(tools, 'download',
side_effect=fake_download): side_effect=fake_download):
with test_util.capture_sys_output() as (stdout, _): with test_util.capture_sys_output() as (stdout, _):
btest.fetch_tool(method, col, False) btest.fetch_tool(method, col, False)
@ -97,7 +97,7 @@ class TestBintool(unittest.TestCase):
def test_fetch_url_err(self): def test_fetch_url_err(self):
"""Test an error while fetching a tool from a URL""" """Test an error while fetching a tool from a URL"""
def fail_download(url): def fail_download(url):
"""Take the tools.Download() function by raising an exception""" """Take the tools.download() function by raising an exception"""
raise urllib.error.URLError('my error') raise urllib.error.URLError('my error')
stdout = self.check_fetch_url(fail_download, bintool.FETCH_ANY) stdout = self.check_fetch_url(fail_download, bintool.FETCH_ANY)
@ -114,7 +114,7 @@ class TestBintool(unittest.TestCase):
def test_fetch_method(self): def test_fetch_method(self):
"""Test fetching using a particular method""" """Test fetching using a particular method"""
def fail_download(url): def fail_download(url):
"""Take the tools.Download() function by raising an exception""" """Take the tools.download() function by raising an exception"""
raise urllib.error.URLError('my error') raise urllib.error.URLError('my error')
stdout = self.check_fetch_url(fail_download, bintool.FETCH_BIN) stdout = self.check_fetch_url(fail_download, bintool.FETCH_BIN)
@ -123,11 +123,11 @@ class TestBintool(unittest.TestCase):
def test_fetch_pass_fail(self): def test_fetch_pass_fail(self):
"""Test fetching multiple tools with some passing and some failing""" """Test fetching multiple tools with some passing and some failing"""
def handle_download(_): def handle_download(_):
"""Take the tools.Download() function by writing a file""" """Take the tools.download() function by writing a file"""
if self.seq: if self.seq:
raise urllib.error.URLError('not found') raise urllib.error.URLError('not found')
self.seq += 1 self.seq += 1
tools.WriteFile(fname, expected) tools.write_file(fname, expected)
return fname, dirname return fname, dirname
expected = b'this is a test' expected = b'this is a test'
@ -140,12 +140,12 @@ class TestBintool(unittest.TestCase):
self.seq = 0 self.seq = 0
with unittest.mock.patch.object(bintool, 'DOWNLOAD_DESTDIR', destdir): with unittest.mock.patch.object(bintool, 'DOWNLOAD_DESTDIR', destdir):
with unittest.mock.patch.object(tools, 'Download', with unittest.mock.patch.object(tools, 'download',
side_effect=handle_download): side_effect=handle_download):
with test_util.capture_sys_output() as (stdout, _): with test_util.capture_sys_output() as (stdout, _):
Bintool.fetch_tools(bintool.FETCH_ANY, ['_testing'] * 2) Bintool.fetch_tools(bintool.FETCH_ANY, ['_testing'] * 2)
self.assertTrue(os.path.exists(dest_fname)) self.assertTrue(os.path.exists(dest_fname))
data = tools.ReadFile(dest_fname) data = tools.read_file(dest_fname)
self.assertEqual(expected, data) self.assertEqual(expected, data)
lines = stdout.getvalue().splitlines() lines = stdout.getvalue().splitlines()
@ -245,14 +245,14 @@ class TestBintool(unittest.TestCase):
tmpdir = cmd[2] tmpdir = cmd[2]
self.fname = os.path.join(tmpdir, 'pathname') self.fname = os.path.join(tmpdir, 'pathname')
if write_file: if write_file:
tools.WriteFile(self.fname, b'hello') tools.write_file(self.fname, b'hello')
btest = Bintool.create('_testing') btest = Bintool.create('_testing')
col = terminal.Color() col = terminal.Color()
self.fname = None self.fname = None
with unittest.mock.patch.object(bintool, 'DOWNLOAD_DESTDIR', with unittest.mock.patch.object(bintool, 'DOWNLOAD_DESTDIR',
self._indir): self._indir):
with unittest.mock.patch.object(tools, 'Run', side_effect=fake_run): with unittest.mock.patch.object(tools, 'run', side_effect=fake_run):
with test_util.capture_sys_output() as (stdout, _): with test_util.capture_sys_output() as (stdout, _):
btest.fetch_tool(bintool.FETCH_BUILD, col, False) btest.fetch_tool(bintool.FETCH_BUILD, col, False)
fname = os.path.join(self._indir, '_testing') fname = os.path.join(self._indir, '_testing')
@ -275,7 +275,7 @@ class TestBintool(unittest.TestCase):
btest = Bintool.create('_testing') btest = Bintool.create('_testing')
btest.install = True btest.install = True
col = terminal.Color() col = terminal.Color()
with unittest.mock.patch.object(tools, 'Run', return_value=None): with unittest.mock.patch.object(tools, 'run', return_value=None):
with test_util.capture_sys_output() as _: with test_util.capture_sys_output() as _:
result = btest.fetch_tool(bintool.FETCH_BIN, col, False) result = btest.fetch_tool(bintool.FETCH_BIN, col, False)
self.assertEqual(bintool.FETCHED, result) self.assertEqual(bintool.FETCHED, result)
@ -292,8 +292,8 @@ class TestBintool(unittest.TestCase):
def test_all_bintools(self): def test_all_bintools(self):
"""Test that all bintools can handle all available fetch types""" """Test that all bintools can handle all available fetch types"""
def handle_download(_): def handle_download(_):
"""Take the tools.Download() function by writing a file""" """Take the tools.download() function by writing a file"""
tools.WriteFile(fname, expected) tools.write_file(fname, expected)
return fname, dirname return fname, dirname
def fake_run(*cmd): def fake_run(*cmd):
@ -301,15 +301,15 @@ class TestBintool(unittest.TestCase):
# See Bintool.build_from_git() # See Bintool.build_from_git()
tmpdir = cmd[2] tmpdir = cmd[2]
self.fname = os.path.join(tmpdir, 'pathname') self.fname = os.path.join(tmpdir, 'pathname')
tools.WriteFile(self.fname, b'hello') tools.write_file(self.fname, b'hello')
expected = b'this is a test' expected = b'this is a test'
dirname = os.path.join(self._indir, 'download_dir') dirname = os.path.join(self._indir, 'download_dir')
os.mkdir(dirname) os.mkdir(dirname)
fname = os.path.join(dirname, 'downloaded') fname = os.path.join(dirname, 'downloaded')
with unittest.mock.patch.object(tools, 'Run', side_effect=fake_run): with unittest.mock.patch.object(tools, 'run', side_effect=fake_run):
with unittest.mock.patch.object(tools, 'Download', with unittest.mock.patch.object(tools, 'download',
side_effect=handle_download): side_effect=handle_download):
with test_util.capture_sys_output() as _: with test_util.capture_sys_output() as _:
for name in Bintool.get_tool_list(): for name in Bintool.get_tool_list():
@ -320,7 +320,7 @@ class TestBintool(unittest.TestCase):
if result is not True and result is not None: if result is not True and result is not None:
result_fname, _ = result result_fname, _ = result
self.assertTrue(os.path.exists(result_fname)) self.assertTrue(os.path.exists(result_fname))
data = tools.ReadFile(result_fname) data = tools.read_file(result_fname)
self.assertEqual(expected, data) self.assertEqual(expected, data)
os.remove(result_fname) os.remove(result_fname)

View file

@ -88,8 +88,8 @@ class Bintoollz4(bintool.Bintool):
bytes: Compressed data bytes: Compressed data
""" """
with tempfile.NamedTemporaryFile(prefix='comp.tmp', with tempfile.NamedTemporaryFile(prefix='comp.tmp',
dir=tools.GetOutputDir()) as tmp: dir=tools.get_output_dir()) as tmp:
tools.WriteFile(tmp.name, indata) tools.write_file(tmp.name, indata)
args = ['--no-frame-crc', '-B4', '-5', '-c', tmp.name] args = ['--no-frame-crc', '-B4', '-5', '-c', tmp.name]
return self.run_cmd(*args, binary=True) return self.run_cmd(*args, binary=True)
@ -103,8 +103,8 @@ class Bintoollz4(bintool.Bintool):
bytes: Decompressed data bytes: Decompressed data
""" """
with tempfile.NamedTemporaryFile(prefix='decomp.tmp', with tempfile.NamedTemporaryFile(prefix='decomp.tmp',
dir=tools.GetOutputDir()) as inf: dir=tools.get_output_dir()) as inf:
tools.WriteFile(inf.name, indata) tools.write_file(inf.name, indata)
args = ['-cd', inf.name] args = ['-cd', inf.name]
return self.run_cmd(*args, binary=True) return self.run_cmd(*args, binary=True)

View file

@ -65,13 +65,13 @@ class Bintoollzma_alone(bintool.Bintool):
bytes: Compressed data bytes: Compressed data
""" """
with tempfile.NamedTemporaryFile(prefix='comp.tmp', with tempfile.NamedTemporaryFile(prefix='comp.tmp',
dir=tools.GetOutputDir()) as inf: dir=tools.get_output_dir()) as inf:
tools.WriteFile(inf.name, indata) tools.write_file(inf.name, indata)
with tempfile.NamedTemporaryFile(prefix='compo.otmp', with tempfile.NamedTemporaryFile(prefix='compo.otmp',
dir=tools.GetOutputDir()) as outf: dir=tools.get_output_dir()) as outf:
args = ['e', inf.name, outf.name, '-lc1', '-lp0', '-pb0', '-d8'] args = ['e', inf.name, outf.name, '-lc1', '-lp0', '-pb0', '-d8']
self.run_cmd(*args, binary=True) self.run_cmd(*args, binary=True)
return tools.ReadFile(outf.name) return tools.read_file(outf.name)
def decompress(self, indata): def decompress(self, indata):
"""Decompress data with lzma_alone """Decompress data with lzma_alone
@ -83,13 +83,13 @@ class Bintoollzma_alone(bintool.Bintool):
bytes: Decompressed data bytes: Decompressed data
""" """
with tempfile.NamedTemporaryFile(prefix='decomp.tmp', with tempfile.NamedTemporaryFile(prefix='decomp.tmp',
dir=tools.GetOutputDir()) as inf: dir=tools.get_output_dir()) as inf:
tools.WriteFile(inf.name, indata) tools.write_file(inf.name, indata)
with tempfile.NamedTemporaryFile(prefix='compo.otmp', with tempfile.NamedTemporaryFile(prefix='compo.otmp',
dir=tools.GetOutputDir()) as outf: dir=tools.get_output_dir()) as outf:
args = ['d', inf.name, outf.name] args = ['d', inf.name, outf.name]
self.run_cmd(*args, binary=True) self.run_cmd(*args, binary=True)
return tools.ReadFile(outf.name, binary=True) return tools.read_file(outf.name, binary=True)
def fetch(self, method): def fetch(self, method):
"""Fetch handler for lzma_alone """Fetch handler for lzma_alone

View file

@ -189,9 +189,9 @@ def _pack_string(instr):
Returns: Returns:
String with required padding (at least one 0x00 byte) at the end String with required padding (at least one 0x00 byte) at the end
""" """
val = tools.ToBytes(instr) val = tools.to_bytes(instr)
pad_len = align_int(len(val) + 1, FILENAME_ALIGN) pad_len = align_int(len(val) + 1, FILENAME_ALIGN)
return val + tools.GetBytes(0, pad_len - len(val)) return val + tools.get_bytes(0, pad_len - len(val))
class CbfsFile(object): class CbfsFile(object):
@ -371,7 +371,7 @@ class CbfsFile(object):
FILE_ATTR_TAG_COMPRESSION, ATTR_COMPRESSION_LEN, FILE_ATTR_TAG_COMPRESSION, ATTR_COMPRESSION_LEN,
self.compress, self.memlen) self.compress, self.memlen)
elif self.ftype == TYPE_EMPTY: elif self.ftype == TYPE_EMPTY:
data = tools.GetBytes(self.erase_byte, self.size) data = tools.get_bytes(self.erase_byte, self.size)
else: else:
raise ValueError('Unknown type %#x when writing\n' % self.ftype) raise ValueError('Unknown type %#x when writing\n' % self.ftype)
if attr: if attr:
@ -388,7 +388,7 @@ class CbfsFile(object):
# possible. # possible.
raise ValueError("Internal error: CBFS file '%s': Requested offset %#x but current output position is %#x" % raise ValueError("Internal error: CBFS file '%s': Requested offset %#x but current output position is %#x" %
(self.name, self.cbfs_offset, offset)) (self.name, self.cbfs_offset, offset))
pad = tools.GetBytes(pad_byte, pad_len) pad = tools.get_bytes(pad_byte, pad_len)
hdr_len += pad_len hdr_len += pad_len
# This is the offset of the start of the file's data, # This is the offset of the start of the file's data,
@ -414,7 +414,7 @@ class CbfsWriter(object):
Usage is something like: Usage is something like:
cbw = CbfsWriter(size) cbw = CbfsWriter(size)
cbw.add_file_raw('u-boot', tools.ReadFile('u-boot.bin')) cbw.add_file_raw('u-boot', tools.read_file('u-boot.bin'))
... ...
data, cbfs_offset = cbw.get_data_and_offset() data, cbfs_offset = cbw.get_data_and_offset()
@ -482,7 +482,7 @@ class CbfsWriter(object):
if fd.tell() > offset: if fd.tell() > offset:
raise ValueError('No space for data before offset %#x (current offset %#x)' % raise ValueError('No space for data before offset %#x (current offset %#x)' %
(offset, fd.tell())) (offset, fd.tell()))
fd.write(tools.GetBytes(self._erase_byte, offset - fd.tell())) fd.write(tools.get_bytes(self._erase_byte, offset - fd.tell()))
def _pad_to(self, fd, offset): def _pad_to(self, fd, offset):
"""Write out pad bytes and/or an empty file until a given offset """Write out pad bytes and/or an empty file until a given offset

View file

@ -36,7 +36,7 @@ class TestCbfs(unittest.TestCase):
def setUpClass(cls): def setUpClass(cls):
# Create a temporary directory for test files # Create a temporary directory for test files
cls._indir = tempfile.mkdtemp(prefix='cbfs_util.') cls._indir = tempfile.mkdtemp(prefix='cbfs_util.')
tools.SetInputDirs([cls._indir]) tools.set_input_dirs([cls._indir])
# Set up some useful data files # Set up some useful data files
TestCbfs._make_input_file('u-boot.bin', U_BOOT_DATA) TestCbfs._make_input_file('u-boot.bin', U_BOOT_DATA)
@ -45,7 +45,7 @@ class TestCbfs(unittest.TestCase):
# Set up a temporary output directory, used by the tools library when # Set up a temporary output directory, used by the tools library when
# compressing files # compressing files
tools.PrepareOutputDir(None) tools.prepare_output_dir(None)
cls.cbfstool = bintool.Bintool.create('cbfstool') cls.cbfstool = bintool.Bintool.create('cbfstool')
cls.have_cbfstool = cls.cbfstool.is_present() cls.have_cbfstool = cls.cbfstool.is_present()
@ -58,7 +58,7 @@ class TestCbfs(unittest.TestCase):
if cls._indir: if cls._indir:
shutil.rmtree(cls._indir) shutil.rmtree(cls._indir)
cls._indir = None cls._indir = None
tools.FinaliseOutputDir() tools.finalise_output_dir()
@classmethod @classmethod
def _make_input_file(cls, fname, contents): def _make_input_file(cls, fname, contents):
@ -71,7 +71,7 @@ class TestCbfs(unittest.TestCase):
Full pathname of file created Full pathname of file created
""" """
pathname = os.path.join(cls._indir, fname) pathname = os.path.join(cls._indir, fname)
tools.WriteFile(pathname, contents) tools.write_file(pathname, contents)
return pathname return pathname
def _check_hdr(self, data, size, offset=0, arch=cbfs_util.ARCHITECTURE_X86): def _check_hdr(self, data, size, offset=0, arch=cbfs_util.ARCHITECTURE_X86):
@ -176,12 +176,12 @@ class TestCbfs(unittest.TestCase):
base = [(1 << 32) - size + b for b in base] base = [(1 << 32) - size + b for b in base]
self.cbfstool.add_raw( self.cbfstool.add_raw(
cbfs_fname, 'u-boot', cbfs_fname, 'u-boot',
tools.GetInputFilename(compress and 'compress' or 'u-boot.bin'), tools.get_input_filename(compress and 'compress' or 'u-boot.bin'),
compress[0] if compress else None, compress[0] if compress else None,
base[0] if base else None) base[0] if base else None)
self.cbfstool.add_raw( self.cbfstool.add_raw(
cbfs_fname, 'u-boot-dtb', cbfs_fname, 'u-boot-dtb',
tools.GetInputFilename(compress and 'compress' or 'u-boot.dtb'), tools.get_input_filename(compress and 'compress' or 'u-boot.dtb'),
compress[1] if compress else None, compress[1] if compress else None,
base[1] if base else None) base[1] if base else None)
return cbfs_fname return cbfs_fname
@ -198,10 +198,10 @@ class TestCbfs(unittest.TestCase):
""" """
if not self.have_cbfstool or not self.have_lz4: if not self.have_cbfstool or not self.have_lz4:
return return
expect = tools.ReadFile(cbfstool_fname) expect = tools.read_file(cbfstool_fname)
if expect != data: if expect != data:
tools.WriteFile('/tmp/expect', expect) tools.write_file('/tmp/expect', expect)
tools.WriteFile('/tmp/actual', data) tools.write_file('/tmp/actual', data)
print('diff -y <(xxd -g1 /tmp/expect) <(xxd -g1 /tmp/actual) | colordiff') print('diff -y <(xxd -g1 /tmp/expect) <(xxd -g1 /tmp/actual) | colordiff')
self.fail('cbfstool produced a different result') self.fail('cbfstool produced a different result')
@ -482,7 +482,7 @@ class TestCbfs(unittest.TestCase):
size = 0xb0 size = 0xb0
cbw = CbfsWriter(size) cbw = CbfsWriter(size)
cbw.add_file_stage('u-boot', tools.ReadFile(elf_fname)) cbw.add_file_stage('u-boot', tools.read_file(elf_fname))
data = cbw.get_data() data = cbw.get_data()
cbfs = self._check_hdr(data, size) cbfs = self._check_hdr(data, size)

View file

@ -258,7 +258,7 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
raise ValueError('Must specify exactly one entry path to write with -f') raise ValueError('Must specify exactly one entry path to write with -f')
entry = image.FindEntryPath(entry_paths[0]) entry = image.FindEntryPath(entry_paths[0])
data = entry.ReadData(decomp, alt_format) data = entry.ReadData(decomp, alt_format)
tools.WriteFile(output_fname, data) tools.write_file(output_fname, data)
tout.Notice("Wrote %#x bytes to file '%s'" % (len(data), output_fname)) tout.Notice("Wrote %#x bytes to file '%s'" % (len(data), output_fname))
return return
@ -281,7 +281,7 @@ def ExtractEntries(image_fname, output_fname, outdir, entry_paths,
fname = os.path.join(fname, 'root') fname = os.path.join(fname, 'root')
tout.Notice("Write entry '%s' size %x to '%s'" % tout.Notice("Write entry '%s' size %x to '%s'" %
(entry.GetPath(), len(data), fname)) (entry.GetPath(), len(data), fname))
tools.WriteFile(fname, data) tools.write_file(fname, data)
return einfos return einfos
@ -398,7 +398,7 @@ def ReplaceEntries(image_fname, input_fname, indir, entry_paths,
if len(entry_paths) != 1: if len(entry_paths) != 1:
raise ValueError('Must specify exactly one entry path to write with -f') raise ValueError('Must specify exactly one entry path to write with -f')
entry = image.FindEntryPath(entry_paths[0]) entry = image.FindEntryPath(entry_paths[0])
data = tools.ReadFile(input_fname) data = tools.read_file(input_fname)
tout.Notice("Read %#x bytes from file '%s'" % (len(data), input_fname)) tout.Notice("Read %#x bytes from file '%s'" % (len(data), input_fname))
WriteEntryToImage(image, entry, data, do_compress=do_compress, WriteEntryToImage(image, entry, data, do_compress=do_compress,
allow_resize=allow_resize, write_map=write_map) allow_resize=allow_resize, write_map=write_map)
@ -425,7 +425,7 @@ def ReplaceEntries(image_fname, input_fname, indir, entry_paths,
if os.path.exists(fname): if os.path.exists(fname):
tout.Notice("Write entry '%s' from file '%s'" % tout.Notice("Write entry '%s' from file '%s'" %
(entry.GetPath(), fname)) (entry.GetPath(), fname))
data = tools.ReadFile(fname) data = tools.read_file(fname)
ReplaceOneEntry(image, entry, data, do_compress, allow_resize) ReplaceOneEntry(image, entry, data, do_compress, allow_resize)
else: else:
tout.Warning("Skipping entry '%s' from missing file '%s'" % tout.Warning("Skipping entry '%s' from missing file '%s'" %
@ -468,8 +468,8 @@ def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded):
# output into a file in our output directly. Then scan it for use # output into a file in our output directly. Then scan it for use
# in binman. # in binman.
dtb_fname = fdt_util.EnsureCompiled(dtb_fname) dtb_fname = fdt_util.EnsureCompiled(dtb_fname)
fname = tools.GetOutputFilename('u-boot.dtb.out') fname = tools.get_output_filename('u-boot.dtb.out')
tools.WriteFile(fname, tools.ReadFile(dtb_fname)) tools.write_file(fname, tools.read_file(dtb_fname))
dtb = fdt.FdtScan(fname) dtb = fdt.FdtScan(fname)
node = _FindBinmanNode(dtb) node = _FindBinmanNode(dtb)
@ -618,7 +618,7 @@ def Binman(args):
global state global state
if args.full_help: if args.full_help:
tools.PrintFullHelp( tools.print_full_help(
os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README.rst') os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README.rst')
) )
return 0 return 0
@ -630,7 +630,7 @@ def Binman(args):
if args.cmd in ['ls', 'extract', 'replace', 'tool']: if args.cmd in ['ls', 'extract', 'replace', 'tool']:
try: try:
tout.Init(args.verbosity) tout.Init(args.verbosity)
tools.PrepareOutputDir(None) tools.prepare_output_dir(None)
if args.cmd == 'ls': if args.cmd == 'ls':
ListEntries(args.image, args.paths) ListEntries(args.image, args.paths)
@ -644,7 +644,7 @@ def Binman(args):
allow_resize=not args.fix_size, write_map=args.map) allow_resize=not args.fix_size, write_map=args.map)
if args.cmd == 'tool': if args.cmd == 'tool':
tools.SetToolPaths(args.toolpath) tools.set_tool_paths(args.toolpath)
if args.list: if args.list:
bintool.Bintool.list_all() bintool.Bintool.list_all()
elif args.fetch: elif args.fetch:
@ -658,7 +658,7 @@ def Binman(args):
except: except:
raise raise
finally: finally:
tools.FinaliseOutputDir() tools.finalise_output_dir()
return 0 return 0
elf_params = None elf_params = None
@ -694,9 +694,9 @@ def Binman(args):
# runtime. # runtime.
use_expanded = not args.no_expanded use_expanded = not args.no_expanded
try: try:
tools.SetInputDirs(args.indir) tools.set_input_dirs(args.indir)
tools.PrepareOutputDir(args.outdir, args.preserve) tools.prepare_output_dir(args.outdir, args.preserve)
tools.SetToolPaths(args.toolpath) tools.set_tool_paths(args.toolpath)
state.SetEntryArgs(args.entry_arg) state.SetEntryArgs(args.entry_arg)
state.SetThreads(args.threads) state.SetThreads(args.threads)
@ -717,7 +717,7 @@ def Binman(args):
# Write the updated FDTs to our output files # Write the updated FDTs to our output files
for dtb_item in state.GetAllFdts(): for dtb_item in state.GetAllFdts():
tools.WriteFile(dtb_item._fname, dtb_item.GetContents()) tools.write_file(dtb_item._fname, dtb_item.GetContents())
if elf_params: if elf_params:
data = state.GetFdtForEtype('u-boot-dtb').GetContents() data = state.GetFdtForEtype('u-boot-dtb').GetContents()
@ -729,7 +729,7 @@ def Binman(args):
# Use this to debug the time take to pack the image # Use this to debug the time take to pack the image
#state.TimingShow() #state.TimingShow()
finally: finally:
tools.FinaliseOutputDir() tools.finalise_output_dir()
finally: finally:
tout.Uninit() tout.Uninit()

View file

@ -54,7 +54,7 @@ def GetSymbols(fname, patterns):
key: Name of symbol key: Name of symbol
value: Hex value of symbol value: Hex value of symbol
""" """
stdout = tools.Run('objdump', '-t', fname) stdout = tools.run('objdump', '-t', fname)
lines = stdout.splitlines() lines = stdout.splitlines()
if patterns: if patterns:
re_syms = re.compile('|'.join(patterns)) re_syms = re.compile('|'.join(patterns))
@ -154,7 +154,7 @@ def LookupAndWriteSymbols(elf_fname, entry, section):
entry: Entry to process entry: Entry to process
section: Section which can be used to lookup symbol values section: Section which can be used to lookup symbol values
""" """
fname = tools.GetInputFilename(elf_fname) fname = tools.get_input_filename(elf_fname)
syms = GetSymbols(fname, ['image', 'binman']) syms = GetSymbols(fname, ['image', 'binman'])
if not syms: if not syms:
return return
@ -282,7 +282,7 @@ SECTIONS
# text section at the start # text section at the start
# -m32: Build for 32-bit x86 # -m32: Build for 32-bit x86
# -T...: Specifies the link script, which sets the start address # -T...: Specifies the link script, which sets the start address
cc, args = tools.GetTargetCompileTool('cc') cc, args = tools.get_target_compile_tool('cc')
args += ['-static', '-nostdlib', '-Wl,--build-id=none', '-m32', '-T', args += ['-static', '-nostdlib', '-Wl,--build-id=none', '-m32', '-T',
lds_file, '-o', elf_fname, s_file] lds_file, '-o', elf_fname, s_file]
stdout = command.Output(cc, *args) stdout = command.Output(cc, *args)
@ -363,9 +363,9 @@ def UpdateFile(infile, outfile, start_sym, end_sym, insert):
raise ValueError("Not enough space in '%s' for data length %#x (%d); size is %#x (%d)" % raise ValueError("Not enough space in '%s' for data length %#x (%d); size is %#x (%d)" %
(infile, len(insert), len(insert), size, size)) (infile, len(insert), len(insert), size, size))
data = tools.ReadFile(infile) data = tools.read_file(infile)
newdata = data[:syms[start_sym].offset] newdata = data[:syms[start_sym].offset]
newdata += insert + tools.GetBytes(0, size - len(insert)) newdata += insert + tools.get_bytes(0, size - len(insert))
newdata += data[syms[end_sym].offset:] newdata += data[syms[end_sym].offset:]
tools.WriteFile(outfile, newdata) tools.write_file(outfile, newdata)
tout.Info('Written to offset %#x' % syms[start_sym].offset) tout.Info('Written to offset %#x' % syms[start_sym].offset)

View file

@ -27,7 +27,7 @@ class FakeEntry:
""" """
def __init__(self, contents_size): def __init__(self, contents_size):
self.contents_size = contents_size self.contents_size = contents_size
self.data = tools.GetBytes(ord('a'), contents_size) self.data = tools.get_bytes(ord('a'), contents_size)
def GetPath(self): def GetPath(self):
return 'entry_path' return 'entry_path'
@ -72,7 +72,7 @@ def BuildElfTestFiles(target_dir):
if 'MAKEFLAGS' in os.environ: if 'MAKEFLAGS' in os.environ:
del os.environ['MAKEFLAGS'] del os.environ['MAKEFLAGS']
try: try:
tools.Run('make', '-C', target_dir, '-f', tools.run('make', '-C', target_dir, '-f',
os.path.join(testdir, 'Makefile'), 'SRC=%s/' % testdir) os.path.join(testdir, 'Makefile'), 'SRC=%s/' % testdir)
except ValueError as e: except ValueError as e:
# The test system seems to suppress this in a strange way # The test system seems to suppress this in a strange way
@ -83,7 +83,7 @@ class TestElf(unittest.TestCase):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
cls._indir = tempfile.mkdtemp(prefix='elf.') cls._indir = tempfile.mkdtemp(prefix='elf.')
tools.SetInputDirs(['.']) tools.set_input_dirs(['.'])
BuildElfTestFiles(cls._indir) BuildElfTestFiles(cls._indir)
@classmethod @classmethod
@ -166,7 +166,7 @@ class TestElf(unittest.TestCase):
section = FakeSection(sym_value=None) section = FakeSection(sym_value=None)
elf_fname = self.ElfTestFile('u_boot_binman_syms') elf_fname = self.ElfTestFile('u_boot_binman_syms')
syms = elf.LookupAndWriteSymbols(elf_fname, entry, section) syms = elf.LookupAndWriteSymbols(elf_fname, entry, section)
self.assertEqual(tools.GetBytes(255, 20) + tools.GetBytes(ord('a'), 4), self.assertEqual(tools.get_bytes(255, 20) + tools.get_bytes(ord('a'), 4),
entry.data) entry.data)
def testDebug(self): def testDebug(self):
@ -193,7 +193,7 @@ class TestElf(unittest.TestCase):
# Make an Elf file and then convert it to a fkat binary file. This # Make an Elf file and then convert it to a fkat binary file. This
# should produce the original data. # should produce the original data.
elf.MakeElf(elf_fname, expected_text, expected_data) elf.MakeElf(elf_fname, expected_text, expected_data)
objcopy, args = tools.GetTargetCompileTool('objcopy') objcopy, args = tools.get_target_compile_tool('objcopy')
args += ['-O', 'binary', elf_fname, bin_fname] args += ['-O', 'binary', elf_fname, bin_fname]
stdout = command.Output(objcopy, *args) stdout = command.Output(objcopy, *args)
with open(bin_fname, 'rb') as fd: with open(bin_fname, 'rb') as fd:
@ -210,7 +210,7 @@ class TestElf(unittest.TestCase):
expected_data = b'wxyz' expected_data = b'wxyz'
elf_fname = os.path.join(outdir, 'elf') elf_fname = os.path.join(outdir, 'elf')
elf.MakeElf(elf_fname, expected_text, expected_data) elf.MakeElf(elf_fname, expected_text, expected_data)
data = tools.ReadFile(elf_fname) data = tools.read_file(elf_fname)
load = 0xfef20000 load = 0xfef20000
entry = load + 2 entry = load + 2
@ -231,7 +231,7 @@ class TestElf(unittest.TestCase):
offset = elf.GetSymbolFileOffset(fname, ['embed_start', 'embed_end']) offset = elf.GetSymbolFileOffset(fname, ['embed_start', 'embed_end'])
start = offset['embed_start'].offset start = offset['embed_start'].offset
end = offset['embed_end'].offset end = offset['embed_end'].offset
data = tools.ReadFile(fname) data = tools.read_file(fname)
embed_data = data[start:end] embed_data = data[start:end]
expect = struct.pack('<III', 0x1234, 0x5678, 0) expect = struct.pack('<III', 0x1234, 0x5678, 0)
self.assertEqual(expect, embed_data) self.assertEqual(expect, embed_data)

View file

@ -14,7 +14,7 @@ from binman import bintool
from binman import comp_util from binman import comp_util
from dtoc import fdt_util from dtoc import fdt_util
from patman import tools from patman import tools
from patman.tools import ToHex, ToHexSize from patman.tools import to_hex, to_hex_size
from patman import tout from patman import tout
modules = {} modules = {}
@ -244,7 +244,7 @@ class Entry(object):
self.uncomp_size = fdt_util.GetInt(self._node, 'uncomp-size') self.uncomp_size = fdt_util.GetInt(self._node, 'uncomp-size')
self.align = fdt_util.GetInt(self._node, 'align') self.align = fdt_util.GetInt(self._node, 'align')
if tools.NotPowerOfTwo(self.align): if tools.not_power_of_two(self.align):
raise ValueError("Node '%s': Alignment %s must be a power of two" % raise ValueError("Node '%s': Alignment %s must be a power of two" %
(self._node.path, self.align)) (self._node.path, self.align))
if self.section and self.align is None: if self.section and self.align is None:
@ -252,7 +252,7 @@ class Entry(object):
self.pad_before = fdt_util.GetInt(self._node, 'pad-before', 0) self.pad_before = fdt_util.GetInt(self._node, 'pad-before', 0)
self.pad_after = fdt_util.GetInt(self._node, 'pad-after', 0) self.pad_after = fdt_util.GetInt(self._node, 'pad-after', 0)
self.align_size = fdt_util.GetInt(self._node, 'align-size') self.align_size = fdt_util.GetInt(self._node, 'align-size')
if tools.NotPowerOfTwo(self.align_size): if tools.not_power_of_two(self.align_size):
self.Raise("Alignment size %s must be a power of two" % self.Raise("Alignment size %s must be a power of two" %
self.align_size) self.align_size)
self.align_end = fdt_util.GetInt(self._node, 'align-end') self.align_end = fdt_util.GetInt(self._node, 'align-end')
@ -397,12 +397,12 @@ class Entry(object):
# Don't let the data shrink. Pad it if necessary # Don't let the data shrink. Pad it if necessary
if size_ok and new_size < self.contents_size: if size_ok and new_size < self.contents_size:
data += tools.GetBytes(0, self.contents_size - new_size) data += tools.get_bytes(0, self.contents_size - new_size)
if not size_ok: if not size_ok:
tout.Debug("Entry '%s' size change from %s to %s" % ( tout.Debug("Entry '%s' size change from %s to %s" % (
self._node.path, ToHex(self.contents_size), self._node.path, to_hex(self.contents_size),
ToHex(new_size))) to_hex(new_size)))
self.SetContents(data) self.SetContents(data)
return size_ok return size_ok
@ -419,8 +419,8 @@ class Entry(object):
def ResetForPack(self): def ResetForPack(self):
"""Reset offset/size fields so that packing can be done again""" """Reset offset/size fields so that packing can be done again"""
self.Detail('ResetForPack: offset %s->%s, size %s->%s' % self.Detail('ResetForPack: offset %s->%s, size %s->%s' %
(ToHex(self.offset), ToHex(self.orig_offset), (to_hex(self.offset), to_hex(self.orig_offset),
ToHex(self.size), ToHex(self.orig_size))) to_hex(self.size), to_hex(self.orig_size)))
self.pre_reset_size = self.size self.pre_reset_size = self.size
self.offset = self.orig_offset self.offset = self.orig_offset
self.size = self.orig_size self.size = self.orig_size
@ -444,20 +444,20 @@ class Entry(object):
New section offset pointer (after this entry) New section offset pointer (after this entry)
""" """
self.Detail('Packing: offset=%s, size=%s, content_size=%x' % self.Detail('Packing: offset=%s, size=%s, content_size=%x' %
(ToHex(self.offset), ToHex(self.size), (to_hex(self.offset), to_hex(self.size),
self.contents_size)) self.contents_size))
if self.offset is None: if self.offset is None:
if self.offset_unset: if self.offset_unset:
self.Raise('No offset set with offset-unset: should another ' self.Raise('No offset set with offset-unset: should another '
'entry provide this correct offset?') 'entry provide this correct offset?')
self.offset = tools.Align(offset, self.align) self.offset = tools.align(offset, self.align)
needed = self.pad_before + self.contents_size + self.pad_after needed = self.pad_before + self.contents_size + self.pad_after
needed = tools.Align(needed, self.align_size) needed = tools.align(needed, self.align_size)
size = self.size size = self.size
if not size: if not size:
size = needed size = needed
new_offset = self.offset + size new_offset = self.offset + size
aligned_offset = tools.Align(new_offset, self.align_end) aligned_offset = tools.align(new_offset, self.align_end)
if aligned_offset != new_offset: if aligned_offset != new_offset:
size = aligned_offset - self.offset size = aligned_offset - self.offset
new_offset = aligned_offset new_offset = aligned_offset
@ -471,10 +471,10 @@ class Entry(object):
# Check that the alignment is correct. It could be wrong if the # Check that the alignment is correct. It could be wrong if the
# and offset or size values were provided (i.e. not calculated), but # and offset or size values were provided (i.e. not calculated), but
# conflict with the provided alignment values # conflict with the provided alignment values
if self.size != tools.Align(self.size, self.align_size): if self.size != tools.align(self.size, self.align_size):
self.Raise("Size %#x (%d) does not match align-size %#x (%d)" % self.Raise("Size %#x (%d) does not match align-size %#x (%d)" %
(self.size, self.size, self.align_size, self.align_size)) (self.size, self.size, self.align_size, self.align_size))
if self.offset != tools.Align(self.offset, self.align): if self.offset != tools.align(self.offset, self.align):
self.Raise("Offset %#x (%d) does not match align %#x (%d)" % self.Raise("Offset %#x (%d) does not match align %#x (%d)" %
(self.offset, self.offset, self.align, self.align)) (self.offset, self.offset, self.align, self.align))
self.Detail(' - packed: offset=%#x, size=%#x, content_size=%#x, next_offset=%x' % self.Detail(' - packed: offset=%#x, size=%#x, content_size=%#x, next_offset=%x' %
@ -541,7 +541,7 @@ class Entry(object):
bytes content of the entry, excluding any padding. If the entry is bytes content of the entry, excluding any padding. If the entry is
compressed, the compressed data is returned compressed, the compressed data is returned
""" """
self.Detail('GetData: size %s' % ToHexSize(self.data)) self.Detail('GetData: size %s' % to_hex_size(self.data))
return self.data return self.data
def GetPaddedData(self, data=None): def GetPaddedData(self, data=None):
@ -991,7 +991,7 @@ features to produce new behaviours.
fname (str): Filename of faked file fname (str): Filename of faked file
""" """
if self.allow_fake and not pathlib.Path(fname).is_file(): if self.allow_fake and not pathlib.Path(fname).is_file():
outfname = tools.GetOutputFilename(os.path.basename(fname)) outfname = tools.get_output_filename(os.path.basename(fname))
with open(outfname, "wb") as out: with open(outfname, "wb") as out:
out.truncate(1024) out.truncate(1024)
self.faked = True self.faked = True

View file

@ -17,10 +17,10 @@ from patman import tools
class TestEntry(unittest.TestCase): class TestEntry(unittest.TestCase):
def setUp(self): def setUp(self):
tools.PrepareOutputDir(None) tools.prepare_output_dir(None)
def tearDown(self): def tearDown(self):
tools.FinaliseOutputDir() tools.finalise_output_dir()
def GetNode(self): def GetNode(self):
binman_dir = os.path.dirname(os.path.realpath(sys.argv[0])) binman_dir = os.path.dirname(os.path.realpath(sys.argv[0]))

View file

@ -181,7 +181,7 @@ class Entry_atf_fip(Entry_section):
self._pad_byte = fdt_util.GetInt(self._node, 'pad-byte', 0) self._pad_byte = fdt_util.GetInt(self._node, 'pad-byte', 0)
self._fip_flags = fdt_util.GetInt64(self._node, 'fip-hdr-flags', 0) self._fip_flags = fdt_util.GetInt64(self._node, 'fip-hdr-flags', 0)
self._fip_align = fdt_util.GetInt(self._node, 'fip-align', 1) self._fip_align = fdt_util.GetInt(self._node, 'fip-align', 1)
if tools.NotPowerOfTwo(self._fip_align): if tools.not_power_of_two(self._fip_align):
raise ValueError("Node '%s': FIP alignment %s must be a power of two" % raise ValueError("Node '%s': FIP alignment %s must be a power of two" %
(self._node.path, self._fip_align)) (self._node.path, self._fip_align))
self.ReadEntries() self.ReadEntries()

View file

@ -37,7 +37,7 @@ class Entry_blob(Entry):
def ObtainContents(self): def ObtainContents(self):
self._filename = self.GetDefaultFilename() self._filename = self.GetDefaultFilename()
self._pathname = tools.GetInputFilename(self._filename, self._pathname = tools.get_input_filename(self._filename,
self.external and self.section.GetAllowMissing()) self.external and self.section.GetAllowMissing())
# Allow the file to be missing # Allow the file to be missing
if not self._pathname: if not self._pathname:
@ -68,7 +68,7 @@ class Entry_blob(Entry):
bytes: Data read bytes: Data read
""" """
state.TimingStart('read') state.TimingStart('read')
indata = tools.ReadFile(pathname) indata = tools.read_file(pathname)
state.TimingAccum('read') state.TimingAccum('read')
state.TimingStart('compress') state.TimingStart('compress')
data = self.CompressData(indata) data = self.CompressData(indata)

View file

@ -38,7 +38,7 @@ class Entry_blob_ext_list(Entry_blob):
pathnames = [] pathnames = []
for fname in self._filenames: for fname in self._filenames:
fname = self.check_fake_fname(fname) fname = self.check_fake_fname(fname)
pathname = tools.GetInputFilename( pathname = tools.get_input_filename(
fname, self.external and self.section.GetAllowMissing()) fname, self.external and self.section.GetAllowMissing())
# Allow the file to be missing # Allow the file to be missing
if not pathname: if not pathname:

View file

@ -140,7 +140,7 @@ class Entry_fdtmap(Entry):
fdt.pack() fdt.pack()
outfdt = Fdt.FromData(fdt.as_bytearray()) outfdt = Fdt.FromData(fdt.as_bytearray())
data = outfdt.GetContents() data = outfdt.GetContents()
data = FDTMAP_MAGIC + tools.GetBytes(0, 8) + data data = FDTMAP_MAGIC + tools.get_bytes(0, 8) + data
return data return data
def ObtainContents(self): def ObtainContents(self):

View file

@ -47,7 +47,7 @@ class Entry_files(Entry_section):
'require-matches') 'require-matches')
def ExpandEntries(self): def ExpandEntries(self):
files = tools.GetInputFilenameGlob(self._pattern) files = tools.get_input_filename_glob(self._pattern)
if self._require_matches and not files: if self._require_matches and not files:
self.Raise("Pattern '%s' matched no files" % self._pattern) self.Raise("Pattern '%s' matched no files" % self._pattern)
for fname in files: for fname in files:

View file

@ -31,5 +31,5 @@ class Entry_fill(Entry):
self.fill_value = fdt_util.GetByte(self._node, 'fill-byte', 0) self.fill_value = fdt_util.GetByte(self._node, 'fill-byte', 0)
def ObtainContents(self): def ObtainContents(self):
self.SetContents(tools.GetBytes(self.fill_value, self.size)) self.SetContents(tools.get_bytes(self.fill_value, self.size))
return True return True

View file

@ -200,19 +200,19 @@ class Entry_fit(Entry):
for seq, fdt_fname in enumerate(self._fdts): for seq, fdt_fname in enumerate(self._fdts):
node_name = subnode.name[1:].replace('SEQ', node_name = subnode.name[1:].replace('SEQ',
str(seq + 1)) str(seq + 1))
fname = tools.GetInputFilename(fdt_fname + '.dtb') fname = tools.get_input_filename(fdt_fname + '.dtb')
with fsw.add_node(node_name): with fsw.add_node(node_name):
for pname, prop in subnode.props.items(): for pname, prop in subnode.props.items():
val = prop.bytes.replace( val = prop.bytes.replace(
b'NAME', tools.ToBytes(fdt_fname)) b'NAME', tools.to_bytes(fdt_fname))
val = val.replace( val = val.replace(
b'SEQ', tools.ToBytes(str(seq + 1))) b'SEQ', tools.to_bytes(str(seq + 1)))
fsw.property(pname, val) fsw.property(pname, val)
# Add data for 'fdt' nodes (but not 'config') # Add data for 'fdt' nodes (but not 'config')
if depth == 1 and in_images: if depth == 1 and in_images:
fsw.property('data', fsw.property('data',
tools.ReadFile(fname)) tools.read_file(fname))
else: else:
if self._fdts is None: if self._fdts is None:
if self._fit_list_prop: if self._fit_list_prop:
@ -246,10 +246,10 @@ class Entry_fit(Entry):
# self._BuildInput() either returns bytes or raises an exception. # self._BuildInput() either returns bytes or raises an exception.
data = self._BuildInput(self._fdt) data = self._BuildInput(self._fdt)
uniq = self.GetUniqueName() uniq = self.GetUniqueName()
input_fname = tools.GetOutputFilename('%s.itb' % uniq) input_fname = tools.get_output_filename('%s.itb' % uniq)
output_fname = tools.GetOutputFilename('%s.fit' % uniq) output_fname = tools.get_output_filename('%s.fit' % uniq)
tools.WriteFile(input_fname, data) tools.write_file(input_fname, data)
tools.WriteFile(output_fname, data) tools.write_file(output_fname, data)
args = {} args = {}
ext_offset = self._fit_props.get('fit,external-offset') ext_offset = self._fit_props.get('fit,external-offset')
@ -260,11 +260,11 @@ class Entry_fit(Entry):
} }
if self.mkimage.run(reset_timestamp=True, output_fname=output_fname, if self.mkimage.run(reset_timestamp=True, output_fname=output_fname,
**args) is not None: **args) is not None:
self.SetContents(tools.ReadFile(output_fname)) self.SetContents(tools.read_file(output_fname))
else: else:
# Bintool is missing; just use empty data as the output # Bintool is missing; just use empty data as the output
self.record_missing_bintool(self.mkimage) self.record_missing_bintool(self.mkimage)
self.SetContents(tools.GetBytes(0, 1024)) self.SetContents(tools.get_bytes(0, 1024))
return True return True

View file

@ -8,7 +8,7 @@
from binman.entry import Entry from binman.entry import Entry
from binman import fmap_util from binman import fmap_util
from patman import tools from patman import tools
from patman.tools import ToHexSize from patman.tools import to_hex_size
from patman import tout from patman import tout
@ -47,7 +47,7 @@ class Entry_fmap(Entry):
def _AddEntries(areas, entry): def _AddEntries(areas, entry):
entries = entry.GetEntries() entries = entry.GetEntries()
tout.Debug("fmap: Add entry '%s' type '%s' (%s subentries)" % tout.Debug("fmap: Add entry '%s' type '%s' (%s subentries)" %
(entry.GetPath(), entry.etype, ToHexSize(entries))) (entry.GetPath(), entry.etype, to_hex_size(entries)))
if entries and entry.etype != 'cbfs': if entries and entry.etype != 'cbfs':
# Create an area for the section, which encompasses all entries # Create an area for the section, which encompasses all entries
# within it # within it

View file

@ -70,14 +70,14 @@ class Entry_gbb(Entry):
def ObtainContents(self): def ObtainContents(self):
gbb = 'gbb.bin' gbb = 'gbb.bin'
fname = tools.GetOutputFilename(gbb) fname = tools.get_output_filename(gbb)
if not self.size: if not self.size:
self.Raise('GBB must have a fixed size') self.Raise('GBB must have a fixed size')
gbb_size = self.size gbb_size = self.size
bmpfv_size = gbb_size - 0x2180 bmpfv_size = gbb_size - 0x2180
if bmpfv_size < 0: if bmpfv_size < 0:
self.Raise('GBB is too small (minimum 0x2180 bytes)') self.Raise('GBB is too small (minimum 0x2180 bytes)')
keydir = tools.GetInputFilename(self.keydir) keydir = tools.get_input_filename(self.keydir)
stdout = self.futility.gbb_create( stdout = self.futility.gbb_create(
fname, [0x100, 0x1000, bmpfv_size, 0x1000]) fname, [0x100, 0x1000, bmpfv_size, 0x1000])
@ -88,14 +88,14 @@ class Entry_gbb(Entry):
rootkey='%s/root_key.vbpubk' % keydir, rootkey='%s/root_key.vbpubk' % keydir,
recoverykey='%s/recovery_key.vbpubk' % keydir, recoverykey='%s/recovery_key.vbpubk' % keydir,
flags=self.gbb_flags, flags=self.gbb_flags,
bmpfv=tools.GetInputFilename(self.bmpblk)) bmpfv=tools.get_input_filename(self.bmpblk))
if stdout is not None: if stdout is not None:
self.SetContents(tools.ReadFile(fname)) self.SetContents(tools.read_file(fname))
else: else:
# Bintool is missing; just use the required amount of zero data # Bintool is missing; just use the required amount of zero data
self.record_missing_bintool(self.futility) self.record_missing_bintool(self.futility)
self.SetContents(tools.GetBytes(0, gbb_size)) self.SetContents(tools.get_bytes(0, gbb_size))
return True return True

View file

@ -58,11 +58,11 @@ class Entry_intel_ifwi(Entry_blob_ext):
# Create the IFWI file if needed # Create the IFWI file if needed
if self._convert_fit: if self._convert_fit:
inname = self._pathname inname = self._pathname
outname = tools.GetOutputFilename('ifwi.bin') outname = tools.get_output_filename('ifwi.bin')
if self.ifwitool.create_ifwi(inname, outname) is None: if self.ifwitool.create_ifwi(inname, outname) is None:
# Bintool is missing; just create a zeroed ifwi.bin # Bintool is missing; just create a zeroed ifwi.bin
self.record_missing_bintool(self.ifwitool) self.record_missing_bintool(self.ifwitool)
self.SetContents(tools.GetBytes(0, 1024)) self.SetContents(tools.get_bytes(0, 1024))
self._filename = 'ifwi.bin' self._filename = 'ifwi.bin'
self._pathname = outname self._pathname = outname
@ -74,15 +74,15 @@ class Entry_intel_ifwi(Entry_blob_ext):
if self.ifwitool.delete_subpart(outname, 'OBBP') is None: if self.ifwitool.delete_subpart(outname, 'OBBP') is None:
# Bintool is missing; just use zero data # Bintool is missing; just use zero data
self.record_missing_bintool(self.ifwitool) self.record_missing_bintool(self.ifwitool)
self.SetContents(tools.GetBytes(0, 1024)) self.SetContents(tools.get_bytes(0, 1024))
return True return True
for entry in self._ifwi_entries.values(): for entry in self._ifwi_entries.values():
# First get the input data and put it in a file # First get the input data and put it in a file
data = entry.GetPaddedData() data = entry.GetPaddedData()
uniq = self.GetUniqueName() uniq = self.GetUniqueName()
input_fname = tools.GetOutputFilename('input.%s' % uniq) input_fname = tools.get_output_filename('input.%s' % uniq)
tools.WriteFile(input_fname, data) tools.write_file(input_fname, data)
# At this point we know that ifwitool is present, so we don't need # At this point we know that ifwitool is present, so we don't need
# to check for None here # to check for None here
@ -107,7 +107,7 @@ class Entry_intel_ifwi(Entry_blob_ext):
After that we delete the OBBP sub-partition and add each of the files After that we delete the OBBP sub-partition and add each of the files
that we want in the IFWI file, one for each sub-entry of the IWFI node. that we want in the IFWI file, one for each sub-entry of the IWFI node.
""" """
self._pathname = tools.GetInputFilename(self._filename, self._pathname = tools.get_input_filename(self._filename,
self.section.GetAllowMissing()) self.section.GetAllowMissing())
# Allow the file to be missing # Allow the file to be missing
if not self._pathname: if not self._pathname:

View file

@ -48,12 +48,12 @@ class Entry_mkimage(Entry):
return False return False
data += entry.GetData() data += entry.GetData()
uniq = self.GetUniqueName() uniq = self.GetUniqueName()
input_fname = tools.GetOutputFilename('mkimage.%s' % uniq) input_fname = tools.get_output_filename('mkimage.%s' % uniq)
tools.WriteFile(input_fname, data) tools.write_file(input_fname, data)
output_fname = tools.GetOutputFilename('mkimage-out.%s' % uniq) output_fname = tools.get_output_filename('mkimage-out.%s' % uniq)
if self.mkimage.run_cmd('-d', input_fname, *self._args, if self.mkimage.run_cmd('-d', input_fname, *self._args,
output_fname) is not None: output_fname) is not None:
self.SetContents(tools.ReadFile(output_fname)) self.SetContents(tools.read_file(output_fname))
else: else:
# Bintool is missing; just use the input data as the output # Bintool is missing; just use the input data as the output
self.record_missing_bintool(self.mkimage) self.record_missing_bintool(self.mkimage)

View file

@ -19,7 +19,7 @@ from binman import state
from dtoc import fdt_util from dtoc import fdt_util
from patman import tools from patman import tools
from patman import tout from patman import tout
from patman.tools import ToHexSize from patman.tools import to_hex_size
class Entry_section(Entry): class Entry_section(Entry):
@ -269,19 +269,19 @@ class Entry_section(Entry):
data = bytearray() data = bytearray()
# Handle padding before the entry # Handle padding before the entry
if entry.pad_before: if entry.pad_before:
data += tools.GetBytes(self._pad_byte, entry.pad_before) data += tools.get_bytes(self._pad_byte, entry.pad_before)
# Add in the actual entry data # Add in the actual entry data
data += entry_data data += entry_data
# Handle padding after the entry # Handle padding after the entry
if entry.pad_after: if entry.pad_after:
data += tools.GetBytes(self._pad_byte, entry.pad_after) data += tools.get_bytes(self._pad_byte, entry.pad_after)
if entry.size: if entry.size:
data += tools.GetBytes(pad_byte, entry.size - len(data)) data += tools.get_bytes(pad_byte, entry.size - len(data))
self.Detail('GetPaddedDataForEntry: size %s' % ToHexSize(self.data)) self.Detail('GetPaddedDataForEntry: size %s' % to_hex_size(self.data))
return data return data
@ -316,7 +316,7 @@ class Entry_section(Entry):
# Handle empty space before the entry # Handle empty space before the entry
pad = (entry.offset or 0) - self._skip_at_start - len(section_data) pad = (entry.offset or 0) - self._skip_at_start - len(section_data)
if pad > 0: if pad > 0:
section_data += tools.GetBytes(self._pad_byte, pad) section_data += tools.get_bytes(self._pad_byte, pad)
# Add in the actual entry data # Add in the actual entry data
section_data += data section_data += data
@ -709,14 +709,14 @@ class Entry_section(Entry):
if not size: if not size:
data = self.GetPaddedData(self.data) data = self.GetPaddedData(self.data)
size = len(data) size = len(data)
size = tools.Align(size, self.align_size) size = tools.align(size, self.align_size)
if self.size and contents_size > self.size: if self.size and contents_size > self.size:
self._Raise("contents size %#x (%d) exceeds section size %#x (%d)" % self._Raise("contents size %#x (%d) exceeds section size %#x (%d)" %
(contents_size, contents_size, self.size, self.size)) (contents_size, contents_size, self.size, self.size))
if not self.size: if not self.size:
self.size = size self.size = size
if self.size != tools.Align(self.size, self.align_size): if self.size != tools.align(self.size, self.align_size):
self._Raise("Size %#x (%d) does not match align-size %#x (%d)" % self._Raise("Size %#x (%d) does not match align-size %#x (%d)" %
(self.size, self.size, self.align_size, (self.size, self.size, self.align_size,
self.align_size)) self.align_size))

View file

@ -60,14 +60,14 @@ class Entry_text(Entry):
super().__init__(section, etype, node) super().__init__(section, etype, node)
value = fdt_util.GetString(self._node, 'text') value = fdt_util.GetString(self._node, 'text')
if value: if value:
value = tools.ToBytes(value) value = tools.to_bytes(value)
else: else:
label, = self.GetEntryArgsOrProps([EntryArg('text-label', str)]) label, = self.GetEntryArgsOrProps([EntryArg('text-label', str)])
self.text_label = label self.text_label = label
if self.text_label: if self.text_label:
value, = self.GetEntryArgsOrProps([EntryArg(self.text_label, value, = self.GetEntryArgsOrProps([EntryArg(self.text_label,
str)]) str)])
value = tools.ToBytes(value) if value is not None else value value = tools.to_bytes(value) if value is not None else value
self.value = value self.value = value
def ObtainContents(self): def ObtainContents(self):

View file

@ -27,9 +27,9 @@ class Entry_u_boot_elf(Entry_blob):
def ReadBlobContents(self): def ReadBlobContents(self):
if self._strip: if self._strip:
uniq = self.GetUniqueName() uniq = self.GetUniqueName()
out_fname = tools.GetOutputFilename('%s.stripped' % uniq) out_fname = tools.get_output_filename('%s.stripped' % uniq)
tools.WriteFile(out_fname, tools.ReadFile(self._pathname)) tools.write_file(out_fname, tools.read_file(self._pathname))
tools.Run('strip', out_fname) tools.run('strip', out_fname)
self._pathname = out_fname self._pathname = out_fname
super().ReadBlobContents() super().ReadBlobContents()
return True return True

View file

@ -27,7 +27,7 @@ class Entry_u_boot_env(Entry_blob):
self.fill_value = fdt_util.GetByte(self._node, 'fill-byte', 0) self.fill_value = fdt_util.GetByte(self._node, 'fill-byte', 0)
def ReadBlobContents(self): def ReadBlobContents(self):
indata = tools.ReadFile(self._pathname) indata = tools.read_file(self._pathname)
data = b'' data = b''
for line in indata.splitlines(): for line in indata.splitlines():
data += line + b'\0' data += line + b'\0'
@ -35,7 +35,7 @@ class Entry_u_boot_env(Entry_blob):
pad = self.size - len(data) - 5 pad = self.size - len(data) - 5
if pad < 0: if pad < 0:
self.Raise("'u-boot-env' entry too small to hold data (need %#x more bytes)" % -pad) self.Raise("'u-boot-env' entry too small to hold data (need %#x more bytes)" % -pad)
data += tools.GetBytes(self.fill_value, pad) data += tools.get_bytes(self.fill_value, pad)
crc = zlib.crc32(data) crc = zlib.crc32(data)
buf = struct.pack('<I', crc) + b'\x01' + data buf = struct.pack('<I', crc) + b'\x01' + data
self.SetContents(buf) self.SetContents(buf)

View file

@ -36,9 +36,9 @@ class Entry_u_boot_spl_bss_pad(Entry_blob):
super().__init__(section, etype, node) super().__init__(section, etype, node)
def ObtainContents(self): def ObtainContents(self):
fname = tools.GetInputFilename('spl/u-boot-spl') fname = tools.get_input_filename('spl/u-boot-spl')
bss_size = elf.GetSymbolAddress(fname, '__bss_size') bss_size = elf.GetSymbolAddress(fname, '__bss_size')
if not bss_size: if not bss_size:
self.Raise('Expected __bss_size symbol in spl/u-boot-spl') self.Raise('Expected __bss_size symbol in spl/u-boot-spl')
self.SetContents(tools.GetBytes(0, bss_size)) self.SetContents(tools.get_bytes(0, bss_size))
return True return True

View file

@ -36,9 +36,9 @@ class Entry_u_boot_tpl_bss_pad(Entry_blob):
super().__init__(section, etype, node) super().__init__(section, etype, node)
def ObtainContents(self): def ObtainContents(self):
fname = tools.GetInputFilename('tpl/u-boot-tpl') fname = tools.get_input_filename('tpl/u-boot-tpl')
bss_size = elf.GetSymbolAddress(fname, '__bss_size') bss_size = elf.GetSymbolAddress(fname, '__bss_size')
if not bss_size: if not bss_size:
self.Raise('Expected __bss_size symbol in tpl/u-boot-tpl') self.Raise('Expected __bss_size symbol in tpl/u-boot-tpl')
self.SetContents(tools.GetBytes(0, bss_size)) self.SetContents(tools.get_bytes(0, bss_size))
return True return True

View file

@ -92,8 +92,8 @@ class Entry_u_boot_ucode(Entry_blob):
return True return True
# Write it out to a file # Write it out to a file
self._pathname = tools.GetOutputFilename('u-boot-ucode.bin') self._pathname = tools.get_output_filename('u-boot-ucode.bin')
tools.WriteFile(self._pathname, fdt_entry.ucode_data) tools.write_file(self._pathname, fdt_entry.ucode_data)
self.ReadBlobContents() self.ReadBlobContents()

View file

@ -38,7 +38,7 @@ class Entry_u_boot_with_ucode_ptr(Entry_blob):
def ProcessFdt(self, fdt): def ProcessFdt(self, fdt):
# Figure out where to put the microcode pointer # Figure out where to put the microcode pointer
fname = tools.GetInputFilename(self.elf_fname) fname = tools.get_input_filename(self.elf_fname)
sym = elf.GetSymbolAddress(fname, '_dt_ucode_base_size') sym = elf.GetSymbolAddress(fname, '_dt_ucode_base_size')
if sym: if sym:
self.target_offset = sym self.target_offset = sym

View file

@ -65,9 +65,9 @@ class Entry_vblock(Entry_collection):
return None return None
uniq = self.GetUniqueName() uniq = self.GetUniqueName()
output_fname = tools.GetOutputFilename('vblock.%s' % uniq) output_fname = tools.get_output_filename('vblock.%s' % uniq)
input_fname = tools.GetOutputFilename('input.%s' % uniq) input_fname = tools.get_output_filename('input.%s' % uniq)
tools.WriteFile(input_fname, input_data) tools.write_file(input_fname, input_data)
prefix = self.keydir + '/' prefix = self.keydir + '/'
stdout = self.futility.sign_firmware( stdout = self.futility.sign_firmware(
vblock=output_fname, vblock=output_fname,
@ -78,11 +78,11 @@ class Entry_vblock(Entry_collection):
kernelkey=prefix + self.kernelkey, kernelkey=prefix + self.kernelkey,
flags=f'{self.preamble_flags}') flags=f'{self.preamble_flags}')
if stdout is not None: if stdout is not None:
data = tools.ReadFile(output_fname) data = tools.read_file(output_fname)
else: else:
# Bintool is missing; just use 4KB of zero data # Bintool is missing; just use 4KB of zero data
self.record_missing_bintool(self.futility) self.record_missing_bintool(self.futility)
data = tools.GetBytes(0, 4096) data = tools.get_bytes(0, 4096)
return data return data
def ObtainContents(self): def ObtainContents(self):

View file

@ -19,11 +19,11 @@ class TestFdt(unittest.TestCase):
def setUpClass(self): def setUpClass(self):
self._binman_dir = os.path.dirname(os.path.realpath(sys.argv[0])) self._binman_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
self._indir = tempfile.mkdtemp(prefix='binmant.') self._indir = tempfile.mkdtemp(prefix='binmant.')
tools.PrepareOutputDir(self._indir, True) tools.prepare_output_dir(self._indir, True)
@classmethod @classmethod
def tearDownClass(self): def tearDownClass(self):
tools._FinaliseForTest() tools._finalise_for_test()
def TestFile(self, fname): def TestFile(self, fname):
return os.path.join(self._binman_dir, 'test', fname) return os.path.join(self._binman_dir, 'test', fname)

View file

@ -248,7 +248,7 @@ class FipEntry:
self.flags = flags self.flags = flags
self.fip_type = None self.fip_type = None
self.data = None self.data = None
self.valid = uuid != tools.GetBytes(0, UUID_LEN) self.valid = uuid != tools.get_bytes(0, UUID_LEN)
if self.valid: if self.valid:
# Look up the friendly name # Look up the friendly name
matches = {val for (key, val) in FIP_TYPES.items() matches = {val for (key, val) in FIP_TYPES.items()
@ -309,7 +309,7 @@ class FipWriter:
Usage is something like: Usage is something like:
fip = FipWriter(size) fip = FipWriter(size)
fip.add_entry('scp-fwu-cfg', tools.ReadFile('something.bin')) fip.add_entry('scp-fwu-cfg', tools.read_file('something.bin'))
... ...
data = cbw.get_data() data = cbw.get_data()
@ -354,7 +354,7 @@ class FipWriter:
offset += ENTRY_SIZE # terminating entry offset += ENTRY_SIZE # terminating entry
for fent in self._fip_entries: for fent in self._fip_entries:
offset = tools.Align(offset, self._align) offset = tools.align(offset, self._align)
fent.offset = offset fent.offset = offset
offset += fent.size offset += fent.size
@ -443,7 +443,7 @@ def parse_macros(srcdir):
re_uuid = re.compile('0x[0-9a-fA-F]{2}') re_uuid = re.compile('0x[0-9a-fA-F]{2}')
re_comment = re.compile(r'^/\* (.*) \*/$') re_comment = re.compile(r'^/\* (.*) \*/$')
fname = os.path.join(srcdir, 'include/tools_share/firmware_image_package.h') fname = os.path.join(srcdir, 'include/tools_share/firmware_image_package.h')
data = tools.ReadFile(fname, binary=False) data = tools.read_file(fname, binary=False)
macros = collections.OrderedDict() macros = collections.OrderedDict()
comment = None comment = None
for linenum, line in enumerate(data.splitlines()): for linenum, line in enumerate(data.splitlines()):
@ -489,7 +489,7 @@ def parse_names(srcdir):
re_data = re.compile(r'\.name = "([^"]*)",\s*\.uuid = (UUID_\w*),\s*\.cmdline_name = "([^"]+)"', re_data = re.compile(r'\.name = "([^"]*)",\s*\.uuid = (UUID_\w*),\s*\.cmdline_name = "([^"]+)"',
re.S) re.S)
fname = os.path.join(srcdir, 'tools/fiptool/tbbr_config.c') fname = os.path.join(srcdir, 'tools/fiptool/tbbr_config.c')
data = tools.ReadFile(fname, binary=False) data = tools.read_file(fname, binary=False)
# Example entry: # Example entry:
# { # {
@ -574,21 +574,21 @@ def parse_atf_source(srcdir, dstfile, oldfile):
raise ValueError( raise ValueError(
f"Expected file '{readme_fname}' - try using -s to specify the " f"Expected file '{readme_fname}' - try using -s to specify the "
'arm-trusted-firmware directory') 'arm-trusted-firmware directory')
readme = tools.ReadFile(readme_fname, binary=False) readme = tools.read_file(readme_fname, binary=False)
first_line = 'Trusted Firmware-A' first_line = 'Trusted Firmware-A'
if readme.splitlines()[0] != first_line: if readme.splitlines()[0] != first_line:
raise ValueError(f"'{readme_fname}' does not start with '{first_line}'") raise ValueError(f"'{readme_fname}' does not start with '{first_line}'")
macros = parse_macros(srcdir) macros = parse_macros(srcdir)
names = parse_names(srcdir) names = parse_names(srcdir)
output = create_code_output(macros, names) output = create_code_output(macros, names)
orig = tools.ReadFile(oldfile, binary=False) orig = tools.read_file(oldfile, binary=False)
re_fip_list = re.compile(r'(.*FIP_TYPE_LIST = \[).*?( ] # end.*)', re.S) re_fip_list = re.compile(r'(.*FIP_TYPE_LIST = \[).*?( ] # end.*)', re.S)
mat = re_fip_list.match(orig) mat = re_fip_list.match(orig)
new_code = mat.group(1) + '\n' + output + mat.group(2) if mat else output new_code = mat.group(1) + '\n' + output + mat.group(2) if mat else output
if new_code == orig: if new_code == orig:
print(f"Existing code in '{oldfile}' is up-to-date") print(f"Existing code in '{oldfile}' is up-to-date")
else: else:
tools.WriteFile(dstfile, new_code, binary=False) tools.write_file(dstfile, new_code, binary=False)
print(f'Needs update, try:\n\tmeld {dstfile} {oldfile}') print(f'Needs update, try:\n\tmeld {dstfile} {oldfile}')

View file

@ -35,14 +35,14 @@ class TestFip(unittest.TestCase):
def setUp(self): def setUp(self):
# Create a temporary directory for test files # Create a temporary directory for test files
self._indir = tempfile.mkdtemp(prefix='fip_util.') self._indir = tempfile.mkdtemp(prefix='fip_util.')
tools.SetInputDirs([self._indir]) tools.set_input_dirs([self._indir])
# Set up a temporary output directory, used by the tools library when # Set up a temporary output directory, used by the tools library when
# compressing files # compressing files
tools.PrepareOutputDir(None) tools.prepare_output_dir(None)
self.src_file = os.path.join(self._indir, 'orig.py') self.src_file = os.path.join(self._indir, 'orig.py')
self.outname = tools.GetOutputFilename('out.py') self.outname = tools.get_output_filename('out.py')
self.args = ['-D', '-s', self._indir, '-o', self.outname] self.args = ['-D', '-s', self._indir, '-o', self.outname]
self.readme = os.path.join(self._indir, 'readme.rst') self.readme = os.path.join(self._indir, 'readme.rst')
self.macro_dir = os.path.join(self._indir, 'include/tools_share') self.macro_dir = os.path.join(self._indir, 'include/tools_share')
@ -78,25 +78,25 @@ toc_entry_t toc_entries[] = {
def setup_readme(self): def setup_readme(self):
"""Set up the readme.txt file""" """Set up the readme.txt file"""
tools.WriteFile(self.readme, 'Trusted Firmware-A\n==================', tools.write_file(self.readme, 'Trusted Firmware-A\n==================',
binary=False) binary=False)
def setup_macro(self, data=macro_contents): def setup_macro(self, data=macro_contents):
"""Set up the tbbr_config.c file""" """Set up the tbbr_config.c file"""
os.makedirs(self.macro_dir) os.makedirs(self.macro_dir)
tools.WriteFile(self.macro_fname, data, binary=False) tools.write_file(self.macro_fname, data, binary=False)
def setup_name(self, data=name_contents): def setup_name(self, data=name_contents):
"""Set up the firmware_image_package.h file""" """Set up the firmware_image_package.h file"""
os.makedirs(self.name_dir) os.makedirs(self.name_dir)
tools.WriteFile(self.name_fname, data, binary=False) tools.write_file(self.name_fname, data, binary=False)
def tearDown(self): def tearDown(self):
"""Remove the temporary input directory and its contents""" """Remove the temporary input directory and its contents"""
if self._indir: if self._indir:
shutil.rmtree(self._indir) shutil.rmtree(self._indir)
self._indir = None self._indir = None
tools.FinaliseOutputDir() tools.finalise_output_dir()
def test_no_readme(self): def test_no_readme(self):
"""Test handling of a missing readme.rst""" """Test handling of a missing readme.rst"""
@ -106,7 +106,7 @@ toc_entry_t toc_entries[] = {
def test_invalid_readme(self): def test_invalid_readme(self):
"""Test that an invalid readme.rst is detected""" """Test that an invalid readme.rst is detected"""
tools.WriteFile(self.readme, 'blah', binary=False) tools.write_file(self.readme, 'blah', binary=False)
with self.assertRaises(Exception) as err: with self.assertRaises(Exception) as err:
fip_util.main(self.args, self.src_file) fip_util.main(self.args, self.src_file)
self.assertIn('does not start with', str(err.exception)) self.assertIn('does not start with', str(err.exception))
@ -228,7 +228,7 @@ toc_entry_t toc_entries[] = {
self.setup_name() self.setup_name()
# Check generating the file when changes are needed # Check generating the file when changes are needed
tools.WriteFile(self.src_file, ''' tools.write_file(self.src_file, '''
# This is taken from tbbr_config.c in ARM Trusted Firmware # This is taken from tbbr_config.c in ARM Trusted Firmware
FIP_TYPE_LIST = [ FIP_TYPE_LIST = [
@ -244,7 +244,7 @@ blah de blah
self.assertIn('Needs update', stdout.getvalue()) self.assertIn('Needs update', stdout.getvalue())
# Check generating the file when no changes are needed # Check generating the file when no changes are needed
tools.WriteFile(self.src_file, ''' tools.write_file(self.src_file, '''
# This is taken from tbbr_config.c in ARM Trusted Firmware # This is taken from tbbr_config.c in ARM Trusted Firmware
FIP_TYPE_LIST = [ FIP_TYPE_LIST = [
# ToC Entry UUIDs # ToC Entry UUIDs
@ -268,7 +268,7 @@ blah blah''', binary=False)
args = self.args.copy() args = self.args.copy()
args.remove('-D') args.remove('-D')
tools.WriteFile(self.src_file, '', binary=False) tools.write_file(self.src_file, '', binary=False)
with test_util.capture_sys_output(): with test_util.capture_sys_output():
fip_util.main(args, self.src_file) fip_util.main(args, self.src_file)
@ -282,8 +282,8 @@ blah blah''', binary=False)
fip.add_entry('tb-fw', tb_fw, 0) fip.add_entry('tb-fw', tb_fw, 0)
fip.add_entry(bytes(range(16)), tb_fw, 0) fip.add_entry(bytes(range(16)), tb_fw, 0)
data = fip.get_data() data = fip.get_data()
fname = tools.GetOutputFilename('data.fip') fname = tools.get_output_filename('data.fip')
tools.WriteFile(fname, data) tools.write_file(fname, data)
result = FIPTOOL.info(fname) result = FIPTOOL.info(fname)
self.assertEqual( self.assertEqual(
'''Firmware Updater NS_BL2U: offset=0xB0, size=0x7, cmdline="--fwu" '''Firmware Updater NS_BL2U: offset=0xB0, size=0x7, cmdline="--fwu"
@ -303,19 +303,19 @@ Trusted Boot Firmware BL2: offset=0xC0, size=0xE, cmdline="--tb-fw"
FipReader: reader for the image FipReader: reader for the image
""" """
fwu = os.path.join(self._indir, 'fwu') fwu = os.path.join(self._indir, 'fwu')
tools.WriteFile(fwu, self.fwu_data) tools.write_file(fwu, self.fwu_data)
tb_fw = os.path.join(self._indir, 'tb_fw') tb_fw = os.path.join(self._indir, 'tb_fw')
tools.WriteFile(tb_fw, self.tb_fw_data) tools.write_file(tb_fw, self.tb_fw_data)
other_fw = os.path.join(self._indir, 'other_fw') other_fw = os.path.join(self._indir, 'other_fw')
tools.WriteFile(other_fw, self.other_fw_data) tools.write_file(other_fw, self.other_fw_data)
fname = tools.GetOutputFilename('data.fip') fname = tools.get_output_filename('data.fip')
uuid = 'e3b78d9e-4a64-11ec-b45c-fba2b9b49788' uuid = 'e3b78d9e-4a64-11ec-b45c-fba2b9b49788'
FIPTOOL.create_new(fname, 8, 0x123, fwu, tb_fw, uuid, other_fw) FIPTOOL.create_new(fname, 8, 0x123, fwu, tb_fw, uuid, other_fw)
return fip_util.FipReader(tools.ReadFile(fname)) return fip_util.FipReader(tools.read_file(fname))
@unittest.skipIf(not HAVE_FIPTOOL, 'No fiptool available') @unittest.skipIf(not HAVE_FIPTOOL, 'No fiptool available')
def test_fiptool_create(self): def test_fiptool_create(self):

View file

@ -70,7 +70,7 @@ def ConvertName(field_names, fields):
value: value of that field (string for the ones we support) value: value of that field (string for the ones we support)
""" """
name_index = field_names.index('name') name_index = field_names.index('name')
fields[name_index] = tools.ToBytes(NameToFmap(fields[name_index])) fields[name_index] = tools.to_bytes(NameToFmap(fields[name_index]))
def DecodeFmap(data): def DecodeFmap(data):
"""Decode a flashmap into a header and list of areas """Decode a flashmap into a header and list of areas

View file

@ -174,7 +174,7 @@ class TestFunctional(unittest.TestCase):
# ELF file with a '_dt_ucode_base_size' symbol # ELF file with a '_dt_ucode_base_size' symbol
TestFunctional._MakeInputFile('u-boot', TestFunctional._MakeInputFile('u-boot',
tools.ReadFile(cls.ElfTestFile('u_boot_ucode_ptr'))) tools.read_file(cls.ElfTestFile('u_boot_ucode_ptr')))
# Intel flash descriptor file # Intel flash descriptor file
cls._SetupDescriptor() cls._SetupDescriptor()
@ -236,7 +236,7 @@ class TestFunctional(unittest.TestCase):
if self.preserve_outdirs: if self.preserve_outdirs:
print('Preserving output dir: %s' % tools.outdir) print('Preserving output dir: %s' % tools.outdir)
else: else:
tools._FinaliseForTest() tools._finalise_for_test()
def setUp(self): def setUp(self):
# Enable this to turn on debugging output # Enable this to turn on debugging output
@ -262,10 +262,10 @@ class TestFunctional(unittest.TestCase):
Temporary directory to use Temporary directory to use
New image filename New image filename
""" """
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
tmpdir = tempfile.mkdtemp(prefix='binman.') tmpdir = tempfile.mkdtemp(prefix='binman.')
updated_fname = os.path.join(tmpdir, 'image-updated.bin') updated_fname = os.path.join(tmpdir, 'image-updated.bin')
tools.WriteFile(updated_fname, tools.ReadFile(image_fname)) tools.write_file(updated_fname, tools.read_file(image_fname))
self._CleanupOutputDir() self._CleanupOutputDir()
return tmpdir, updated_fname return tmpdir, updated_fname
@ -492,14 +492,14 @@ class TestFunctional(unittest.TestCase):
use_expanded=use_expanded, extra_indirs=extra_indirs, use_expanded=use_expanded, extra_indirs=extra_indirs,
threads=threads) threads=threads)
self.assertEqual(0, retcode) self.assertEqual(0, retcode)
out_dtb_fname = tools.GetOutputFilename('u-boot.dtb.out') out_dtb_fname = tools.get_output_filename('u-boot.dtb.out')
# Find the (only) image, read it and return its contents # Find the (only) image, read it and return its contents
image = control.images['image'] image = control.images['image']
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
self.assertTrue(os.path.exists(image_fname)) self.assertTrue(os.path.exists(image_fname))
if map: if map:
map_fname = tools.GetOutputFilename('image.map') map_fname = tools.get_output_filename('image.map')
with open(map_fname) as fd: with open(map_fname) as fd:
map_data = fd.read() map_data = fd.read()
else: else:
@ -578,7 +578,7 @@ class TestFunctional(unittest.TestCase):
Filename of ELF file to use as SPL Filename of ELF file to use as SPL
""" """
TestFunctional._MakeInputFile('spl/u-boot-spl', TestFunctional._MakeInputFile('spl/u-boot-spl',
tools.ReadFile(cls.ElfTestFile(src_fname))) tools.read_file(cls.ElfTestFile(src_fname)))
@classmethod @classmethod
def _SetupTplElf(cls, src_fname='bss_data'): def _SetupTplElf(cls, src_fname='bss_data'):
@ -588,7 +588,7 @@ class TestFunctional(unittest.TestCase):
Filename of ELF file to use as TPL Filename of ELF file to use as TPL
""" """
TestFunctional._MakeInputFile('tpl/u-boot-tpl', TestFunctional._MakeInputFile('tpl/u-boot-tpl',
tools.ReadFile(cls.ElfTestFile(src_fname))) tools.read_file(cls.ElfTestFile(src_fname)))
@classmethod @classmethod
def _SetupDescriptor(cls): def _SetupDescriptor(cls):
@ -756,7 +756,7 @@ class TestFunctional(unittest.TestCase):
image = control.images['image1'] image = control.images['image1']
self.assertEqual(len(U_BOOT_DATA), image.size) self.assertEqual(len(U_BOOT_DATA), image.size)
fname = tools.GetOutputFilename('image1.bin') fname = tools.get_output_filename('image1.bin')
self.assertTrue(os.path.exists(fname)) self.assertTrue(os.path.exists(fname))
with open(fname, 'rb') as fd: with open(fname, 'rb') as fd:
data = fd.read() data = fd.read()
@ -764,13 +764,13 @@ class TestFunctional(unittest.TestCase):
image = control.images['image2'] image = control.images['image2']
self.assertEqual(3 + len(U_BOOT_DATA) + 5, image.size) self.assertEqual(3 + len(U_BOOT_DATA) + 5, image.size)
fname = tools.GetOutputFilename('image2.bin') fname = tools.get_output_filename('image2.bin')
self.assertTrue(os.path.exists(fname)) self.assertTrue(os.path.exists(fname))
with open(fname, 'rb') as fd: with open(fname, 'rb') as fd:
data = fd.read() data = fd.read()
self.assertEqual(U_BOOT_DATA, data[3:7]) self.assertEqual(U_BOOT_DATA, data[3:7])
self.assertEqual(tools.GetBytes(0, 3), data[:3]) self.assertEqual(tools.get_bytes(0, 3), data[:3])
self.assertEqual(tools.GetBytes(0, 5), data[7:]) self.assertEqual(tools.get_bytes(0, 5), data[7:])
def testBadAlign(self): def testBadAlign(self):
"""Test that an invalid alignment value is detected""" """Test that an invalid alignment value is detected"""
@ -838,8 +838,8 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(3, entry.pad_before) self.assertEqual(3, entry.pad_before)
self.assertEqual(3 + 5 + len(U_BOOT_DATA), entry.size) self.assertEqual(3 + 5 + len(U_BOOT_DATA), entry.size)
self.assertEqual(U_BOOT_DATA, entry.data) self.assertEqual(U_BOOT_DATA, entry.data)
self.assertEqual(tools.GetBytes(0, 3) + U_BOOT_DATA + self.assertEqual(tools.get_bytes(0, 3) + U_BOOT_DATA +
tools.GetBytes(0, 5), data[:entry.size]) tools.get_bytes(0, 5), data[:entry.size])
pos = entry.size pos = entry.size
# Second u-boot has an aligned size, but it has no effect # Second u-boot has an aligned size, but it has no effect
@ -857,7 +857,7 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(pos, entry.offset) self.assertEqual(pos, entry.offset)
self.assertEqual(32, entry.size) self.assertEqual(32, entry.size)
self.assertEqual(U_BOOT_DATA, entry.data) self.assertEqual(U_BOOT_DATA, entry.data)
self.assertEqual(U_BOOT_DATA + tools.GetBytes(0, 32 - len(U_BOOT_DATA)), self.assertEqual(U_BOOT_DATA + tools.get_bytes(0, 32 - len(U_BOOT_DATA)),
data[pos:pos + entry.size]) data[pos:pos + entry.size])
pos += entry.size pos += entry.size
@ -867,7 +867,7 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(48, entry.offset) self.assertEqual(48, entry.offset)
self.assertEqual(16, entry.size) self.assertEqual(16, entry.size)
self.assertEqual(U_BOOT_DATA, entry.data[:len(U_BOOT_DATA)]) self.assertEqual(U_BOOT_DATA, entry.data[:len(U_BOOT_DATA)])
self.assertEqual(U_BOOT_DATA + tools.GetBytes(0, 16 - len(U_BOOT_DATA)), self.assertEqual(U_BOOT_DATA + tools.get_bytes(0, 16 - len(U_BOOT_DATA)),
data[pos:pos + entry.size]) data[pos:pos + entry.size])
pos += entry.size pos += entry.size
@ -877,7 +877,7 @@ class TestFunctional(unittest.TestCase):
self.assertEqual(64, entry.offset) self.assertEqual(64, entry.offset)
self.assertEqual(64, entry.size) self.assertEqual(64, entry.size)
self.assertEqual(U_BOOT_DATA, entry.data[:len(U_BOOT_DATA)]) self.assertEqual(U_BOOT_DATA, entry.data[:len(U_BOOT_DATA)])
self.assertEqual(U_BOOT_DATA + tools.GetBytes(0, 64 - len(U_BOOT_DATA)), self.assertEqual(U_BOOT_DATA + tools.get_bytes(0, 64 - len(U_BOOT_DATA)),
data[pos:pos + entry.size]) data[pos:pos + entry.size])
self.CheckNoGaps(entries) self.CheckNoGaps(entries)
@ -997,7 +997,7 @@ class TestFunctional(unittest.TestCase):
"""Test that the image pad byte can be specified""" """Test that the image pad byte can be specified"""
self._SetupSplElf() self._SetupSplElf()
data = self._DoReadFile('021_image_pad.dts') data = self._DoReadFile('021_image_pad.dts')
self.assertEqual(U_BOOT_SPL_DATA + tools.GetBytes(0xff, 1) + self.assertEqual(U_BOOT_SPL_DATA + tools.get_bytes(0xff, 1) +
U_BOOT_DATA, data) U_BOOT_DATA, data)
def testImageName(self): def testImageName(self):
@ -1005,11 +1005,11 @@ class TestFunctional(unittest.TestCase):
retcode = self._DoTestFile('022_image_name.dts') retcode = self._DoTestFile('022_image_name.dts')
self.assertEqual(0, retcode) self.assertEqual(0, retcode)
image = control.images['image1'] image = control.images['image1']
fname = tools.GetOutputFilename('test-name') fname = tools.get_output_filename('test-name')
self.assertTrue(os.path.exists(fname)) self.assertTrue(os.path.exists(fname))
image = control.images['image2'] image = control.images['image2']
fname = tools.GetOutputFilename('test-name.xx') fname = tools.get_output_filename('test-name.xx')
self.assertTrue(os.path.exists(fname)) self.assertTrue(os.path.exists(fname))
def testBlobFilename(self): def testBlobFilename(self):
@ -1021,8 +1021,8 @@ class TestFunctional(unittest.TestCase):
"""Test that entries can be sorted""" """Test that entries can be sorted"""
self._SetupSplElf() self._SetupSplElf()
data = self._DoReadFile('024_sorted.dts') data = self._DoReadFile('024_sorted.dts')
self.assertEqual(tools.GetBytes(0, 1) + U_BOOT_SPL_DATA + self.assertEqual(tools.get_bytes(0, 1) + U_BOOT_SPL_DATA +
tools.GetBytes(0, 2) + U_BOOT_DATA, data) tools.get_bytes(0, 2) + U_BOOT_DATA, data)
def testPackZeroOffset(self): def testPackZeroOffset(self):
"""Test that an entry at offset 0 is not given a new offset""" """Test that an entry at offset 0 is not given a new offset"""
@ -1065,8 +1065,8 @@ class TestFunctional(unittest.TestCase):
"""Test that a basic x86 ROM can be created""" """Test that a basic x86 ROM can be created"""
self._SetupSplElf() self._SetupSplElf()
data = self._DoReadFile('029_x86_rom.dts') data = self._DoReadFile('029_x86_rom.dts')
self.assertEqual(U_BOOT_DATA + tools.GetBytes(0, 3) + U_BOOT_SPL_DATA + self.assertEqual(U_BOOT_DATA + tools.get_bytes(0, 3) + U_BOOT_SPL_DATA +
tools.GetBytes(0, 2), data) tools.get_bytes(0, 2), data)
def testPackX86RomMeNoDesc(self): def testPackX86RomMeNoDesc(self):
"""Test that an invalid Intel descriptor entry is detected""" """Test that an invalid Intel descriptor entry is detected"""
@ -1090,7 +1090,7 @@ class TestFunctional(unittest.TestCase):
def testPackX86RomMe(self): def testPackX86RomMe(self):
"""Test that an x86 ROM with an ME region can be created""" """Test that an x86 ROM with an ME region can be created"""
data = self._DoReadFile('031_x86_rom_me.dts') data = self._DoReadFile('031_x86_rom_me.dts')
expected_desc = tools.ReadFile(self.TestFile('descriptor.bin')) expected_desc = tools.read_file(self.TestFile('descriptor.bin'))
if data[:0x1000] != expected_desc: if data[:0x1000] != expected_desc:
self.fail('Expected descriptor binary at start of image') self.fail('Expected descriptor binary at start of image')
self.assertEqual(ME_DATA, data[0x1000:0x1000 + len(ME_DATA)]) self.assertEqual(ME_DATA, data[0x1000:0x1000 + len(ME_DATA)])
@ -1139,7 +1139,7 @@ class TestFunctional(unittest.TestCase):
fdt_len = self.GetFdtLen(dtb_with_ucode) fdt_len = self.GetFdtLen(dtb_with_ucode)
ucode_content = dtb_with_ucode[fdt_len:] ucode_content = dtb_with_ucode[fdt_len:]
ucode_pos = len(nodtb_data) + fdt_len ucode_pos = len(nodtb_data) + fdt_len
fname = tools.GetOutputFilename('test.dtb') fname = tools.get_output_filename('test.dtb')
with open(fname, 'wb') as fd: with open(fname, 'wb') as fd:
fd.write(dtb_with_ucode) fd.write(dtb_with_ucode)
dtb = fdt.FdtScan(fname) dtb = fdt.FdtScan(fname)
@ -1244,7 +1244,7 @@ class TestFunctional(unittest.TestCase):
# ELF file without a '_dt_ucode_base_size' symbol # ELF file without a '_dt_ucode_base_size' symbol
try: try:
TestFunctional._MakeInputFile('u-boot', TestFunctional._MakeInputFile('u-boot',
tools.ReadFile(self.ElfTestFile('u_boot_no_ucode_ptr'))) tools.read_file(self.ElfTestFile('u_boot_no_ucode_ptr')))
with self.assertRaises(ValueError) as e: with self.assertRaises(ValueError) as e:
self._RunPackUbootSingleMicrocode() self._RunPackUbootSingleMicrocode()
@ -1254,7 +1254,7 @@ class TestFunctional(unittest.TestCase):
finally: finally:
# Put the original file back # Put the original file back
TestFunctional._MakeInputFile('u-boot', TestFunctional._MakeInputFile('u-boot',
tools.ReadFile(self.ElfTestFile('u_boot_ucode_ptr'))) tools.read_file(self.ElfTestFile('u_boot_ucode_ptr')))
def testMicrocodeNotInImage(self): def testMicrocodeNotInImage(self):
"""Test that microcode must be placed within the image""" """Test that microcode must be placed within the image"""
@ -1267,7 +1267,7 @@ class TestFunctional(unittest.TestCase):
def testWithoutMicrocode(self): def testWithoutMicrocode(self):
"""Test that we can cope with an image without microcode (e.g. qemu)""" """Test that we can cope with an image without microcode (e.g. qemu)"""
TestFunctional._MakeInputFile('u-boot', TestFunctional._MakeInputFile('u-boot',
tools.ReadFile(self.ElfTestFile('u_boot_no_ucode_ptr'))) tools.read_file(self.ElfTestFile('u_boot_no_ucode_ptr')))
data, dtb, _, _ = self._DoReadFileDtb('044_x86_optional_ucode.dts', True) data, dtb, _, _ = self._DoReadFileDtb('044_x86_optional_ucode.dts', True)
# Now check the device tree has no microcode # Now check the device tree has no microcode
@ -1279,7 +1279,7 @@ class TestFunctional(unittest.TestCase):
used_len = len(U_BOOT_NODTB_DATA) + fdt_len used_len = len(U_BOOT_NODTB_DATA) + fdt_len
third = data[used_len:] third = data[used_len:]
self.assertEqual(tools.GetBytes(0, 0x200 - used_len), third) self.assertEqual(tools.get_bytes(0, 0x200 - used_len), third)
def testUnknownPosSize(self): def testUnknownPosSize(self):
"""Test that microcode must be placed within the image""" """Test that microcode must be placed within the image"""
@ -1308,7 +1308,7 @@ class TestFunctional(unittest.TestCase):
# ELF file with a '__bss_size' symbol # ELF file with a '__bss_size' symbol
self._SetupSplElf() self._SetupSplElf()
data = self._DoReadFile('047_spl_bss_pad.dts') data = self._DoReadFile('047_spl_bss_pad.dts')
self.assertEqual(U_BOOT_SPL_DATA + tools.GetBytes(0, 10) + U_BOOT_DATA, self.assertEqual(U_BOOT_SPL_DATA + tools.get_bytes(0, 10) + U_BOOT_DATA,
data) data)
def testSplBssPadMissing(self): def testSplBssPadMissing(self):
@ -1404,7 +1404,7 @@ class TestFunctional(unittest.TestCase):
u_boot_offset + len(U_BOOT_DATA), u_boot_offset + len(U_BOOT_DATA),
0x10 + u_boot_offset, 0x04) 0x10 + u_boot_offset, 0x04)
expected = (sym_values + base_data[20:] + expected = (sym_values + base_data[20:] +
tools.GetBytes(0xff, 1) + U_BOOT_DATA + sym_values + tools.get_bytes(0xff, 1) + U_BOOT_DATA + sym_values +
base_data[20:]) base_data[20:])
self.assertEqual(expected, data) self.assertEqual(expected, data)
@ -1426,9 +1426,9 @@ class TestFunctional(unittest.TestCase):
def testSections(self): def testSections(self):
"""Basic test of sections""" """Basic test of sections"""
data = self._DoReadFile('055_sections.dts') data = self._DoReadFile('055_sections.dts')
expected = (U_BOOT_DATA + tools.GetBytes(ord('!'), 12) + expected = (U_BOOT_DATA + tools.get_bytes(ord('!'), 12) +
U_BOOT_DATA + tools.GetBytes(ord('a'), 12) + U_BOOT_DATA + tools.get_bytes(ord('a'), 12) +
U_BOOT_DATA + tools.GetBytes(ord('&'), 4)) U_BOOT_DATA + tools.get_bytes(ord('&'), 4))
self.assertEqual(expected, data) self.assertEqual(expected, data)
def testMap(self): def testMap(self):
@ -1593,9 +1593,9 @@ class TestFunctional(unittest.TestCase):
} }
data, _, _, _ = self._DoReadFileDtb('066_text.dts', data, _, _, _ = self._DoReadFileDtb('066_text.dts',
entry_args=entry_args) entry_args=entry_args)
expected = (tools.ToBytes(TEXT_DATA) + expected = (tools.to_bytes(TEXT_DATA) +
tools.GetBytes(0, 8 - len(TEXT_DATA)) + tools.get_bytes(0, 8 - len(TEXT_DATA)) +
tools.ToBytes(TEXT_DATA2) + tools.ToBytes(TEXT_DATA3) + tools.to_bytes(TEXT_DATA2) + tools.to_bytes(TEXT_DATA3) +
b'some text' + b'more text') b'some text' + b'more text')
self.assertEqual(expected, data) self.assertEqual(expected, data)
@ -1617,8 +1617,8 @@ class TestFunctional(unittest.TestCase):
"""Basic test of generation of a flashrom fmap""" """Basic test of generation of a flashrom fmap"""
data = self._DoReadFile('067_fmap.dts') data = self._DoReadFile('067_fmap.dts')
fhdr, fentries = fmap_util.DecodeFmap(data[32:]) fhdr, fentries = fmap_util.DecodeFmap(data[32:])
expected = (U_BOOT_DATA + tools.GetBytes(ord('!'), 12) + expected = (U_BOOT_DATA + tools.get_bytes(ord('!'), 12) +
U_BOOT_DATA + tools.GetBytes(ord('a'), 12)) U_BOOT_DATA + tools.get_bytes(ord('a'), 12))
self.assertEqual(expected, data[:32]) self.assertEqual(expected, data[:32])
self.assertEqual(b'__FMAP__', fhdr.signature) self.assertEqual(b'__FMAP__', fhdr.signature)
self.assertEqual(1, fhdr.ver_major) self.assertEqual(1, fhdr.ver_major)
@ -1670,7 +1670,7 @@ class TestFunctional(unittest.TestCase):
def testFill(self): def testFill(self):
"""Test for an fill entry type""" """Test for an fill entry type"""
data = self._DoReadFile('069_fill.dts') data = self._DoReadFile('069_fill.dts')
expected = tools.GetBytes(0xff, 8) + tools.GetBytes(0, 8) expected = tools.get_bytes(0xff, 8) + tools.get_bytes(0, 8)
self.assertEqual(expected, data) self.assertEqual(expected, data)
def testFillNoSize(self): def testFillNoSize(self):
@ -1700,8 +1700,8 @@ class TestFunctional(unittest.TestCase):
data, _, _, _ = self._DoReadFileDtb('071_gbb.dts', entry_args=entry_args) data, _, _, _ = self._DoReadFileDtb('071_gbb.dts', entry_args=entry_args)
# Since futility # Since futility
expected = (GBB_DATA + GBB_DATA + tools.GetBytes(0, 8) + expected = (GBB_DATA + GBB_DATA + tools.get_bytes(0, 8) +
tools.GetBytes(0, 0x2180 - 16)) tools.get_bytes(0, 0x2180 - 16))
self.assertEqual(expected, data) self.assertEqual(expected, data)
def testGbbTooSmall(self): def testGbbTooSmall(self):
@ -1751,7 +1751,7 @@ class TestFunctional(unittest.TestCase):
if self._hash_data: if self._hash_data:
infile = pipe_list[0][11] infile = pipe_list[0][11]
m = hashlib.sha256() m = hashlib.sha256()
data = tools.ReadFile(infile) data = tools.read_file(infile)
m.update(data) m.update(data)
fd.write(m.digest()) fd.write(m.digest())
else: else:
@ -1845,7 +1845,7 @@ class TestFunctional(unittest.TestCase):
def testFillZero(self): def testFillZero(self):
"""Test for an fill entry type with a size of 0""" """Test for an fill entry type with a size of 0"""
data = self._DoReadFile('080_fill_empty.dts') data = self._DoReadFile('080_fill_empty.dts')
self.assertEqual(tools.GetBytes(0, 16), data) self.assertEqual(tools.get_bytes(0, 16), data)
def testTextMissing(self): def testTextMissing(self):
"""Test for a text entry type where there is no text""" """Test for a text entry type where there is no text"""
@ -1875,8 +1875,8 @@ class TestFunctional(unittest.TestCase):
else: else:
self.assertNotIn(expected, stdout.getvalue()) self.assertNotIn(expected, stdout.getvalue())
self.assertFalse(os.path.exists(tools.GetOutputFilename('image1.bin'))) self.assertFalse(os.path.exists(tools.get_output_filename('image1.bin')))
self.assertTrue(os.path.exists(tools.GetOutputFilename('image2.bin'))) self.assertTrue(os.path.exists(tools.get_output_filename('image2.bin')))
self._CleanupOutputDir() self._CleanupOutputDir()
def testUpdateFdtAll(self): def testUpdateFdtAll(self):
@ -1933,8 +1933,8 @@ class TestFunctional(unittest.TestCase):
'tpl/u-boot-tpl.dtb.out']: 'tpl/u-boot-tpl.dtb.out']:
dtb = fdt.Fdt.FromData(data[start:]) dtb = fdt.Fdt.FromData(data[start:])
size = dtb._fdt_obj.totalsize() size = dtb._fdt_obj.totalsize()
pathname = tools.GetOutputFilename(os.path.split(fname)[1]) pathname = tools.get_output_filename(os.path.split(fname)[1])
outdata = tools.ReadFile(pathname) outdata = tools.read_file(pathname)
name = os.path.split(fname)[0] name = os.path.split(fname)[0]
if name: if name:
@ -2027,10 +2027,10 @@ class TestFunctional(unittest.TestCase):
"""Test an expanding entry""" """Test an expanding entry"""
data, _, map_data, _ = self._DoReadFileDtb('088_expand_size.dts', data, _, map_data, _ = self._DoReadFileDtb('088_expand_size.dts',
map=True) map=True)
expect = (tools.GetBytes(ord('a'), 8) + U_BOOT_DATA + expect = (tools.get_bytes(ord('a'), 8) + U_BOOT_DATA +
MRC_DATA + tools.GetBytes(ord('b'), 1) + U_BOOT_DATA + MRC_DATA + tools.get_bytes(ord('b'), 1) + U_BOOT_DATA +
tools.GetBytes(ord('c'), 8) + U_BOOT_DATA + tools.get_bytes(ord('c'), 8) + U_BOOT_DATA +
tools.GetBytes(ord('d'), 8)) tools.get_bytes(ord('d'), 8))
self.assertEqual(expect, data) self.assertEqual(expect, data)
self.assertEqual('''ImagePos Offset Size Name self.assertEqual('''ImagePos Offset Size Name
00000000 00000000 00000028 main-section 00000000 00000000 00000028 main-section
@ -2085,7 +2085,7 @@ class TestFunctional(unittest.TestCase):
hash_node = dtb.GetNode('/binman/section/hash').props['value'] hash_node = dtb.GetNode('/binman/section/hash').props['value']
m = hashlib.sha256() m = hashlib.sha256()
m.update(U_BOOT_DATA) m.update(U_BOOT_DATA)
m.update(tools.GetBytes(ord('a'), 16)) m.update(tools.get_bytes(ord('a'), 16))
self.assertEqual(m.digest(), b''.join(hash_node.value)) self.assertEqual(m.digest(), b''.join(hash_node.value))
def testPackUBootTplMicrocode(self): def testPackUBootTplMicrocode(self):
@ -2107,7 +2107,7 @@ class TestFunctional(unittest.TestCase):
"""Basic test of generation of a flashrom fmap""" """Basic test of generation of a flashrom fmap"""
data = self._DoReadFile('094_fmap_x86.dts') data = self._DoReadFile('094_fmap_x86.dts')
fhdr, fentries = fmap_util.DecodeFmap(data[32:]) fhdr, fentries = fmap_util.DecodeFmap(data[32:])
expected = U_BOOT_DATA + MRC_DATA + tools.GetBytes(ord('a'), 32 - 7) expected = U_BOOT_DATA + MRC_DATA + tools.get_bytes(ord('a'), 32 - 7)
self.assertEqual(expected, data[:32]) self.assertEqual(expected, data[:32])
fhdr, fentries = fmap_util.DecodeFmap(data[32:]) fhdr, fentries = fmap_util.DecodeFmap(data[32:])
@ -2129,7 +2129,7 @@ class TestFunctional(unittest.TestCase):
def testFmapX86Section(self): def testFmapX86Section(self):
"""Basic test of generation of a flashrom fmap""" """Basic test of generation of a flashrom fmap"""
data = self._DoReadFile('095_fmap_x86_section.dts') data = self._DoReadFile('095_fmap_x86_section.dts')
expected = U_BOOT_DATA + MRC_DATA + tools.GetBytes(ord('b'), 32 - 7) expected = U_BOOT_DATA + MRC_DATA + tools.get_bytes(ord('b'), 32 - 7)
self.assertEqual(expected, data[:32]) self.assertEqual(expected, data[:32])
fhdr, fentries = fmap_util.DecodeFmap(data[36:]) fhdr, fentries = fmap_util.DecodeFmap(data[36:])
@ -2177,14 +2177,14 @@ class TestFunctional(unittest.TestCase):
with test_util.capture_sys_output() as (stdout, stderr): with test_util.capture_sys_output() as (stdout, stderr):
with self.assertRaises(ValueError) as e: with self.assertRaises(ValueError) as e:
self._DoTestFile('014_pack_overlap.dts', map=True) self._DoTestFile('014_pack_overlap.dts', map=True)
map_fname = tools.GetOutputFilename('image.map') map_fname = tools.get_output_filename('image.map')
self.assertEqual("Wrote map file '%s' to show errors\n" % map_fname, self.assertEqual("Wrote map file '%s' to show errors\n" % map_fname,
stdout.getvalue()) stdout.getvalue())
# We should not get an inmage, but there should be a map file # We should not get an inmage, but there should be a map file
self.assertFalse(os.path.exists(tools.GetOutputFilename('image.bin'))) self.assertFalse(os.path.exists(tools.get_output_filename('image.bin')))
self.assertTrue(os.path.exists(map_fname)) self.assertTrue(os.path.exists(map_fname))
map_data = tools.ReadFile(map_fname, binary=False) map_data = tools.read_file(map_fname, binary=False)
self.assertEqual('''ImagePos Offset Size Name self.assertEqual('''ImagePos Offset Size Name
<none> 00000000 00000008 main-section <none> 00000000 00000008 main-section
<none> 00000000 00000004 u-boot <none> 00000000 00000004 u-boot
@ -2210,12 +2210,12 @@ class TestFunctional(unittest.TestCase):
0000002c 00000000 00000004 u-boot 0000002c 00000000 00000004 u-boot
''', map_data) ''', map_data)
self.assertEqual(data, self.assertEqual(data,
tools.GetBytes(0x26, 4) + U_BOOT_DATA + tools.get_bytes(0x26, 4) + U_BOOT_DATA +
tools.GetBytes(0x21, 12) + tools.get_bytes(0x21, 12) +
tools.GetBytes(0x26, 4) + U_BOOT_DATA + tools.get_bytes(0x26, 4) + U_BOOT_DATA +
tools.GetBytes(0x61, 12) + tools.get_bytes(0x61, 12) +
tools.GetBytes(0x26, 4) + U_BOOT_DATA + tools.get_bytes(0x26, 4) + U_BOOT_DATA +
tools.GetBytes(0x26, 8)) tools.get_bytes(0x26, 8))
def testCbfsRaw(self): def testCbfsRaw(self):
"""Test base handling of a Coreboot Filesystem (CBFS) """Test base handling of a Coreboot Filesystem (CBFS)
@ -2332,17 +2332,17 @@ class TestFunctional(unittest.TestCase):
Args: Args:
data: Conents of output file data: Conents of output file
""" """
expected_desc = tools.ReadFile(self.TestFile('descriptor.bin')) expected_desc = tools.read_file(self.TestFile('descriptor.bin'))
if data[:0x1000] != expected_desc: if data[:0x1000] != expected_desc:
self.fail('Expected descriptor binary at start of image') self.fail('Expected descriptor binary at start of image')
# We expect to find the TPL wil in subpart IBBP entry IBBL # We expect to find the TPL wil in subpart IBBP entry IBBL
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
tpl_fname = tools.GetOutputFilename('tpl.out') tpl_fname = tools.get_output_filename('tpl.out')
ifwitool = bintool.Bintool.create('ifwitool') ifwitool = bintool.Bintool.create('ifwitool')
ifwitool.extract(image_fname, 'IBBP', 'IBBL', tpl_fname) ifwitool.extract(image_fname, 'IBBP', 'IBBL', tpl_fname)
tpl_data = tools.ReadFile(tpl_fname) tpl_data = tools.read_file(tpl_fname)
self.assertEqual(U_BOOT_TPL_DATA, tpl_data[:len(U_BOOT_TPL_DATA)]) self.assertEqual(U_BOOT_TPL_DATA, tpl_data[:len(U_BOOT_TPL_DATA)])
def testPackX86RomIfwi(self): def testPackX86RomIfwi(self):
@ -2403,7 +2403,7 @@ class TestFunctional(unittest.TestCase):
fdtmap_data = data[len(U_BOOT_DATA):] fdtmap_data = data[len(U_BOOT_DATA):]
magic = fdtmap_data[:8] magic = fdtmap_data[:8]
self.assertEqual(b'_FDTMAP_', magic) self.assertEqual(b'_FDTMAP_', magic)
self.assertEqual(tools.GetBytes(0, 8), fdtmap_data[8:16]) self.assertEqual(tools.get_bytes(0, 8), fdtmap_data[8:16])
fdt_data = fdtmap_data[16:] fdt_data = fdtmap_data[16:]
dtb = fdt.Fdt.FromData(fdt_data) dtb = fdt.Fdt.FromData(fdt_data)
@ -2668,7 +2668,7 @@ class TestFunctional(unittest.TestCase):
"""Test reading an image and accessing its FDT map""" """Test reading an image and accessing its FDT map"""
self._CheckLz4() self._CheckLz4()
data = self.data = self._DoReadFileRealDtb('128_decode_image.dts') data = self.data = self._DoReadFileRealDtb('128_decode_image.dts')
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
orig_image = control.images['image'] orig_image = control.images['image']
image = Image.FromFile(image_fname) image = Image.FromFile(image_fname)
self.assertEqual(orig_image.GetEntries().keys(), self.assertEqual(orig_image.GetEntries().keys(),
@ -2684,7 +2684,7 @@ class TestFunctional(unittest.TestCase):
"""Test accessing an image's FDT map without an image header""" """Test accessing an image's FDT map without an image header"""
self._CheckLz4() self._CheckLz4()
data = self._DoReadFileRealDtb('129_decode_image_nohdr.dts') data = self._DoReadFileRealDtb('129_decode_image_nohdr.dts')
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
image = Image.FromFile(image_fname) image = Image.FromFile(image_fname)
self.assertTrue(isinstance(image, Image)) self.assertTrue(isinstance(image, Image))
self.assertEqual('image', image.image_name[-5:]) self.assertEqual('image', image.image_name[-5:])
@ -2692,7 +2692,7 @@ class TestFunctional(unittest.TestCase):
def testReadImageFail(self): def testReadImageFail(self):
"""Test failing to read an image image's FDT map""" """Test failing to read an image image's FDT map"""
self._DoReadFile('005_simple.dts') self._DoReadFile('005_simple.dts')
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
with self.assertRaises(ValueError) as e: with self.assertRaises(ValueError) as e:
image = Image.FromFile(image_fname) image = Image.FromFile(image_fname)
self.assertIn("Cannot find FDT map in image", str(e.exception)) self.assertIn("Cannot find FDT map in image", str(e.exception))
@ -2752,7 +2752,7 @@ class TestFunctional(unittest.TestCase):
""" """
self._CheckLz4() self._CheckLz4()
self._DoReadFileRealDtb('130_list_fdtmap.dts') self._DoReadFileRealDtb('130_list_fdtmap.dts')
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
image = Image.FromFile(image_fname) image = Image.FromFile(image_fname)
lines = image.GetListEntries(paths)[1] lines = image.GetListEntries(paths)[1]
files = [line[0].strip() for line in lines[1:]] files = [line[0].strip() for line in lines[1:]]
@ -2798,7 +2798,7 @@ class TestFunctional(unittest.TestCase):
""" """
self._CheckLz4() self._CheckLz4()
self._DoReadFileRealDtb('130_list_fdtmap.dts') self._DoReadFileRealDtb('130_list_fdtmap.dts')
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
return control.ReadEntry(image_fname, entry_name, decomp) return control.ReadEntry(image_fname, entry_name, decomp)
def testExtractSimple(self): def testExtractSimple(self):
@ -2858,7 +2858,7 @@ class TestFunctional(unittest.TestCase):
def testExtractBadFile(self): def testExtractBadFile(self):
"""Test extracting an invalid file""" """Test extracting an invalid file"""
fname = os.path.join(self._indir, 'badfile') fname = os.path.join(self._indir, 'badfile')
tools.WriteFile(fname, b'') tools.write_file(fname, b'')
with self.assertRaises(ValueError) as e: with self.assertRaises(ValueError) as e:
control.ReadEntry(fname, 'name') control.ReadEntry(fname, 'name')
@ -2874,17 +2874,17 @@ class TestFunctional(unittest.TestCase):
'-f', fname) '-f', fname)
finally: finally:
shutil.rmtree(tmpdir) shutil.rmtree(tmpdir)
data = tools.ReadFile(fname) data = tools.read_file(fname)
self.assertEqual(U_BOOT_DATA, data) self.assertEqual(U_BOOT_DATA, data)
def testExtractOneEntry(self): def testExtractOneEntry(self):
"""Test extracting a single entry fron an image """ """Test extracting a single entry fron an image """
self._CheckLz4() self._CheckLz4()
self._DoReadFileRealDtb('130_list_fdtmap.dts') self._DoReadFileRealDtb('130_list_fdtmap.dts')
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
fname = os.path.join(self._indir, 'output.extact') fname = os.path.join(self._indir, 'output.extact')
control.ExtractEntries(image_fname, fname, None, ['u-boot']) control.ExtractEntries(image_fname, fname, None, ['u-boot'])
data = tools.ReadFile(fname) data = tools.read_file(fname)
self.assertEqual(U_BOOT_DATA, data) self.assertEqual(U_BOOT_DATA, data)
def _CheckExtractOutput(self, decomp): def _CheckExtractOutput(self, decomp):
@ -2906,7 +2906,7 @@ class TestFunctional(unittest.TestCase):
expect_size: Size of data to expect in file, or None to skip expect_size: Size of data to expect in file, or None to skip
""" """
path = os.path.join(outdir, entry_path) path = os.path.join(outdir, entry_path)
data = tools.ReadFile(path) data = tools.read_file(path)
os.remove(path) os.remove(path)
if expect_data: if expect_data:
self.assertEqual(expect_data, data) self.assertEqual(expect_data, data)
@ -2926,7 +2926,7 @@ class TestFunctional(unittest.TestCase):
os.rmdir(path) os.rmdir(path)
self._DoReadFileRealDtb('130_list_fdtmap.dts') self._DoReadFileRealDtb('130_list_fdtmap.dts')
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
outdir = os.path.join(self._indir, 'extract') outdir = os.path.join(self._indir, 'extract')
einfos = control.ExtractEntries(image_fname, None, outdir, [], decomp) einfos = control.ExtractEntries(image_fname, None, outdir, [], decomp)
@ -2962,7 +2962,7 @@ class TestFunctional(unittest.TestCase):
_CheckPresent('section/root', section.data) _CheckPresent('section/root', section.data)
cbfs = section_entries['cbfs'] cbfs = section_entries['cbfs']
_CheckPresent('section/cbfs/root', cbfs.data) _CheckPresent('section/cbfs/root', cbfs.data)
data = tools.ReadFile(image_fname) data = tools.read_file(image_fname)
_CheckPresent('root', data) _CheckPresent('root', data)
# There should be no files left. Remove all the directories to check. # There should be no files left. Remove all the directories to check.
@ -2987,7 +2987,7 @@ class TestFunctional(unittest.TestCase):
"""Test extracting some entries""" """Test extracting some entries"""
self._CheckLz4() self._CheckLz4()
self._DoReadFileRealDtb('130_list_fdtmap.dts') self._DoReadFileRealDtb('130_list_fdtmap.dts')
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
outdir = os.path.join(self._indir, 'extract') outdir = os.path.join(self._indir, 'extract')
einfos = control.ExtractEntries(image_fname, None, outdir, einfos = control.ExtractEntries(image_fname, None, outdir,
['*cb*', '*head*']) ['*cb*', '*head*'])
@ -3002,7 +3002,7 @@ class TestFunctional(unittest.TestCase):
"""Test extracting some entries""" """Test extracting some entries"""
self._CheckLz4() self._CheckLz4()
self._DoReadFileRealDtb('130_list_fdtmap.dts') self._DoReadFileRealDtb('130_list_fdtmap.dts')
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
with self.assertRaises(ValueError) as e: with self.assertRaises(ValueError) as e:
control.ExtractEntries(image_fname, 'fname', None, []) control.ExtractEntries(image_fname, 'fname', None, [])
self.assertIn('Must specify an entry path to write with -f', self.assertIn('Must specify an entry path to write with -f',
@ -3012,7 +3012,7 @@ class TestFunctional(unittest.TestCase):
"""Test extracting some entries""" """Test extracting some entries"""
self._CheckLz4() self._CheckLz4()
self._DoReadFileRealDtb('130_list_fdtmap.dts') self._DoReadFileRealDtb('130_list_fdtmap.dts')
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
with self.assertRaises(ValueError) as e: with self.assertRaises(ValueError) as e:
control.ExtractEntries(image_fname, 'fname', None, ['a', 'b']) control.ExtractEntries(image_fname, 'fname', None, ['a', 'b'])
self.assertIn('Must specify exactly one entry path to write with -f', self.assertIn('Must specify exactly one entry path to write with -f',
@ -3113,9 +3113,9 @@ class TestFunctional(unittest.TestCase):
orig_dtb_data = entries['u-boot-dtb'].data orig_dtb_data = entries['u-boot-dtb'].data
orig_fdtmap_data = entries['fdtmap'].data orig_fdtmap_data = entries['fdtmap'].data
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
updated_fname = tools.GetOutputFilename('image-updated.bin') updated_fname = tools.get_output_filename('image-updated.bin')
tools.WriteFile(updated_fname, tools.ReadFile(image_fname)) tools.write_file(updated_fname, tools.read_file(image_fname))
image = control.WriteEntry(updated_fname, entry_name, data, decomp, image = control.WriteEntry(updated_fname, entry_name, data, decomp,
allow_resize) allow_resize)
data = control.ReadEntry(updated_fname, entry_name, decomp) data = control.ReadEntry(updated_fname, entry_name, decomp)
@ -3170,8 +3170,8 @@ class TestFunctional(unittest.TestCase):
data = self._DoReadFileDtb('133_replace_multi.dts', use_real_dtb=True, data = self._DoReadFileDtb('133_replace_multi.dts', use_real_dtb=True,
update_dtb=True)[0] update_dtb=True)[0]
expected = b'x' * len(U_BOOT_DATA) expected = b'x' * len(U_BOOT_DATA)
updated_fname = tools.GetOutputFilename('image-updated.bin') updated_fname = tools.get_output_filename('image-updated.bin')
tools.WriteFile(updated_fname, data) tools.write_file(updated_fname, data)
entry_name = 'u-boot' entry_name = 'u-boot'
control.WriteEntry(updated_fname, entry_name, expected, control.WriteEntry(updated_fname, entry_name, expected,
allow_resize=False) allow_resize=False)
@ -3182,9 +3182,9 @@ class TestFunctional(unittest.TestCase):
self.assertEqual('/binman/image', state.fdt_path_prefix) self.assertEqual('/binman/image', state.fdt_path_prefix)
# Now check we can write the first image # Now check we can write the first image
image_fname = tools.GetOutputFilename('first-image.bin') image_fname = tools.get_output_filename('first-image.bin')
updated_fname = tools.GetOutputFilename('first-updated.bin') updated_fname = tools.get_output_filename('first-updated.bin')
tools.WriteFile(updated_fname, tools.ReadFile(image_fname)) tools.write_file(updated_fname, tools.read_file(image_fname))
entry_name = 'u-boot' entry_name = 'u-boot'
control.WriteEntry(updated_fname, entry_name, expected, control.WriteEntry(updated_fname, entry_name, expected,
allow_resize=False) allow_resize=False)
@ -3348,8 +3348,8 @@ class TestFunctional(unittest.TestCase):
self._CheckLz4() self._CheckLz4()
expected = b'x' * len(U_BOOT_DATA) expected = b'x' * len(U_BOOT_DATA)
data = self._DoReadFileRealDtb('142_replace_cbfs.dts') data = self._DoReadFileRealDtb('142_replace_cbfs.dts')
updated_fname = tools.GetOutputFilename('image-updated.bin') updated_fname = tools.get_output_filename('image-updated.bin')
tools.WriteFile(updated_fname, data) tools.write_file(updated_fname, data)
entry_name = 'section/cbfs/u-boot' entry_name = 'section/cbfs/u-boot'
control.WriteEntry(updated_fname, entry_name, expected, control.WriteEntry(updated_fname, entry_name, expected,
allow_resize=True) allow_resize=True)
@ -3361,8 +3361,8 @@ class TestFunctional(unittest.TestCase):
self._CheckLz4() self._CheckLz4()
expected = U_BOOT_DATA + b'x' expected = U_BOOT_DATA + b'x'
data = self._DoReadFileRealDtb('142_replace_cbfs.dts') data = self._DoReadFileRealDtb('142_replace_cbfs.dts')
updated_fname = tools.GetOutputFilename('image-updated.bin') updated_fname = tools.get_output_filename('image-updated.bin')
tools.WriteFile(updated_fname, data) tools.write_file(updated_fname, data)
entry_name = 'section/cbfs/u-boot' entry_name = 'section/cbfs/u-boot'
control.WriteEntry(updated_fname, entry_name, expected, control.WriteEntry(updated_fname, entry_name, expected,
allow_resize=True) allow_resize=True)
@ -3383,23 +3383,23 @@ class TestFunctional(unittest.TestCase):
""" """
data = self._DoReadFileRealDtb('143_replace_all.dts') data = self._DoReadFileRealDtb('143_replace_all.dts')
updated_fname = tools.GetOutputFilename('image-updated.bin') updated_fname = tools.get_output_filename('image-updated.bin')
tools.WriteFile(updated_fname, data) tools.write_file(updated_fname, data)
outdir = os.path.join(self._indir, 'extract') outdir = os.path.join(self._indir, 'extract')
einfos = control.ExtractEntries(updated_fname, None, outdir, []) einfos = control.ExtractEntries(updated_fname, None, outdir, [])
expected1 = b'x' + U_BOOT_DATA + b'y' expected1 = b'x' + U_BOOT_DATA + b'y'
u_boot_fname1 = os.path.join(outdir, 'u-boot') u_boot_fname1 = os.path.join(outdir, 'u-boot')
tools.WriteFile(u_boot_fname1, expected1) tools.write_file(u_boot_fname1, expected1)
expected2 = b'a' + U_BOOT_DATA + b'b' expected2 = b'a' + U_BOOT_DATA + b'b'
u_boot_fname2 = os.path.join(outdir, 'u-boot2') u_boot_fname2 = os.path.join(outdir, 'u-boot2')
tools.WriteFile(u_boot_fname2, expected2) tools.write_file(u_boot_fname2, expected2)
expected_text = b'not the same text' expected_text = b'not the same text'
text_fname = os.path.join(outdir, 'text') text_fname = os.path.join(outdir, 'text')
tools.WriteFile(text_fname, expected_text) tools.write_file(text_fname, expected_text)
dtb_fname = os.path.join(outdir, 'u-boot-dtb') dtb_fname = os.path.join(outdir, 'u-boot-dtb')
dtb = fdt.FdtScan(dtb_fname) dtb = fdt.FdtScan(dtb_fname)
@ -3475,10 +3475,10 @@ class TestFunctional(unittest.TestCase):
fname = os.path.join(tmpdir, 'update-u-boot.bin') fname = os.path.join(tmpdir, 'update-u-boot.bin')
expected = b'x' * len(U_BOOT_DATA) expected = b'x' * len(U_BOOT_DATA)
tools.WriteFile(fname, expected) tools.write_file(fname, expected)
self._DoBinman('replace', '-i', updated_fname, 'u-boot', '-f', fname) self._DoBinman('replace', '-i', updated_fname, 'u-boot', '-f', fname)
data = tools.ReadFile(updated_fname) data = tools.read_file(updated_fname)
self.assertEqual(expected, data[:len(expected)]) self.assertEqual(expected, data[:len(expected)])
map_fname = os.path.join(tmpdir, 'image-updated.map') map_fname = os.path.join(tmpdir, 'image-updated.map')
self.assertFalse(os.path.exists(map_fname)) self.assertFalse(os.path.exists(map_fname))
@ -3493,7 +3493,7 @@ class TestFunctional(unittest.TestCase):
self._DoBinman('replace', '-i', updated_fname, '-I', outdir, self._DoBinman('replace', '-i', updated_fname, '-I', outdir,
'u-boot2', 'text') 'u-boot2', 'text')
tools.PrepareOutputDir(None) tools.prepare_output_dir(None)
image = Image.FromFile(updated_fname) image = Image.FromFile(updated_fname)
image.LoadData() image.LoadData()
entries = image.GetEntries() entries = image.GetEntries()
@ -3531,7 +3531,7 @@ class TestFunctional(unittest.TestCase):
fname = os.path.join(self._indir, 'update-u-boot.bin') fname = os.path.join(self._indir, 'update-u-boot.bin')
expected = b'x' * len(U_BOOT_DATA) expected = b'x' * len(U_BOOT_DATA)
tools.WriteFile(fname, expected) tools.write_file(fname, expected)
self._DoBinman('replace', '-i', updated_fname, 'u-boot', self._DoBinman('replace', '-i', updated_fname, 'u-boot',
'-f', fname, '-m') '-f', fname, '-m')
@ -3543,7 +3543,7 @@ class TestFunctional(unittest.TestCase):
def testReplaceNoEntryPaths(self): def testReplaceNoEntryPaths(self):
"""Test replacing an entry without an entry path""" """Test replacing an entry without an entry path"""
self._DoReadFileRealDtb('143_replace_all.dts') self._DoReadFileRealDtb('143_replace_all.dts')
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
with self.assertRaises(ValueError) as e: with self.assertRaises(ValueError) as e:
control.ReplaceEntries(image_fname, 'fname', None, []) control.ReplaceEntries(image_fname, 'fname', None, [])
self.assertIn('Must specify an entry path to read with -f', self.assertIn('Must specify an entry path to read with -f',
@ -3552,7 +3552,7 @@ class TestFunctional(unittest.TestCase):
def testReplaceTooManyEntryPaths(self): def testReplaceTooManyEntryPaths(self):
"""Test extracting some entries""" """Test extracting some entries"""
self._DoReadFileRealDtb('143_replace_all.dts') self._DoReadFileRealDtb('143_replace_all.dts')
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
with self.assertRaises(ValueError) as e: with self.assertRaises(ValueError) as e:
control.ReplaceEntries(image_fname, 'fname', None, ['a', 'b']) control.ReplaceEntries(image_fname, 'fname', None, ['a', 'b'])
self.assertIn('Must specify exactly one entry path to write with -f', self.assertIn('Must specify exactly one entry path to write with -f',
@ -3597,15 +3597,15 @@ class TestFunctional(unittest.TestCase):
data = self._DoReadFile(dts) data = self._DoReadFile(dts)
sym_values = struct.pack('<LQLL', *expected_vals) sym_values = struct.pack('<LQLL', *expected_vals)
upto1 = 4 + len(U_BOOT_SPL_DATA) upto1 = 4 + len(U_BOOT_SPL_DATA)
expected1 = tools.GetBytes(0xff, 4) + sym_values + U_BOOT_SPL_DATA[20:] expected1 = tools.get_bytes(0xff, 4) + sym_values + U_BOOT_SPL_DATA[20:]
self.assertEqual(expected1, data[:upto1]) self.assertEqual(expected1, data[:upto1])
upto2 = upto1 + 1 + len(U_BOOT_SPL_DATA) upto2 = upto1 + 1 + len(U_BOOT_SPL_DATA)
expected2 = tools.GetBytes(0xff, 1) + sym_values + U_BOOT_SPL_DATA[20:] expected2 = tools.get_bytes(0xff, 1) + sym_values + U_BOOT_SPL_DATA[20:]
self.assertEqual(expected2, data[upto1:upto2]) self.assertEqual(expected2, data[upto1:upto2])
upto3 = 0x34 + len(U_BOOT_DATA) upto3 = 0x34 + len(U_BOOT_DATA)
expected3 = tools.GetBytes(0xff, 1) + U_BOOT_DATA expected3 = tools.get_bytes(0xff, 1) + U_BOOT_DATA
self.assertEqual(expected3, data[upto2:upto3]) self.assertEqual(expected3, data[upto2:upto3])
expected4 = sym_values + U_BOOT_TPL_DATA[20:] expected4 = sym_values + U_BOOT_TPL_DATA[20:]
@ -3727,8 +3727,8 @@ class TestFunctional(unittest.TestCase):
self.assertIn('data', fnode.props) self.assertIn('data', fnode.props)
fname = os.path.join(self._indir, 'fit_data.fit') fname = os.path.join(self._indir, 'fit_data.fit')
tools.WriteFile(fname, fit_data) tools.write_file(fname, fit_data)
out = tools.Run('dumpimage', '-l', fname) out = tools.run('dumpimage', '-l', fname)
# Check a few features to make sure the plumbing works. We don't need # Check a few features to make sure the plumbing works. We don't need
# to test the operation of mkimage or dumpimage here. First convert the # to test the operation of mkimage or dumpimage here. First convert the
@ -3763,7 +3763,7 @@ class TestFunctional(unittest.TestCase):
# Size of the external-data region as set up by mkimage # Size of the external-data region as set up by mkimage
external_data_size = len(U_BOOT_DATA) + 2 external_data_size = len(U_BOOT_DATA) + 2
expected_size = (len(U_BOOT_DATA) + 0x400 + expected_size = (len(U_BOOT_DATA) + 0x400 +
tools.Align(external_data_size, 4) + tools.align(external_data_size, 4) +
len(U_BOOT_NODTB_DATA)) len(U_BOOT_NODTB_DATA))
# The data should be outside the FIT # The data should be outside the FIT
@ -3802,8 +3802,8 @@ class TestFunctional(unittest.TestCase):
"""Test pad-before, pad-after for entries in sections""" """Test pad-before, pad-after for entries in sections"""
data, _, _, out_dtb_fname = self._DoReadFileDtb( data, _, _, out_dtb_fname = self._DoReadFileDtb(
'166_pad_in_sections.dts', update_dtb=True) '166_pad_in_sections.dts', update_dtb=True)
expected = (U_BOOT_DATA + tools.GetBytes(ord('!'), 12) + expected = (U_BOOT_DATA + tools.get_bytes(ord('!'), 12) +
U_BOOT_DATA + tools.GetBytes(ord('!'), 6) + U_BOOT_DATA + tools.get_bytes(ord('!'), 6) +
U_BOOT_DATA) U_BOOT_DATA)
self.assertEqual(expected, data) self.assertEqual(expected, data)
@ -3846,14 +3846,14 @@ class TestFunctional(unittest.TestCase):
node = dtb.GetNode('/images/kernel') node = dtb.GetNode('/images/kernel')
data = dtb.GetProps(node)["data"].bytes data = dtb.GetProps(node)["data"].bytes
align_pad = 0x10 - (len(U_BOOT_SPL_DATA) % 0x10) align_pad = 0x10 - (len(U_BOOT_SPL_DATA) % 0x10)
expected = (tools.GetBytes(0, 0x20) + U_BOOT_SPL_DATA + expected = (tools.get_bytes(0, 0x20) + U_BOOT_SPL_DATA +
tools.GetBytes(0, align_pad) + U_BOOT_DATA) tools.get_bytes(0, align_pad) + U_BOOT_DATA)
self.assertEqual(expected, data) self.assertEqual(expected, data)
node = dtb.GetNode('/images/fdt-1') node = dtb.GetNode('/images/fdt-1')
data = dtb.GetProps(node)["data"].bytes data = dtb.GetProps(node)["data"].bytes
expected = (U_BOOT_SPL_DTB_DATA + tools.GetBytes(0, 20) + expected = (U_BOOT_SPL_DTB_DATA + tools.get_bytes(0, 20) +
tools.ToBytes(TEXT_DATA) + tools.GetBytes(0, 30) + tools.to_bytes(TEXT_DATA) + tools.get_bytes(0, 30) +
U_BOOT_DTB_DATA) U_BOOT_DTB_DATA)
self.assertEqual(expected, data) self.assertEqual(expected, data)
@ -4069,8 +4069,8 @@ class TestFunctional(unittest.TestCase):
def testSkipAtStartPad(self): def testSkipAtStartPad(self):
"""Test handling of skip-at-start section with padded entry""" """Test handling of skip-at-start section with padded entry"""
data = self._DoReadFile('178_skip_at_start_pad.dts') data = self._DoReadFile('178_skip_at_start_pad.dts')
before = tools.GetBytes(0, 8) before = tools.get_bytes(0, 8)
after = tools.GetBytes(0, 4) after = tools.get_bytes(0, 4)
all = before + U_BOOT_DATA + after all = before + U_BOOT_DATA + after
self.assertEqual(all, data) self.assertEqual(all, data)
@ -4089,8 +4089,8 @@ class TestFunctional(unittest.TestCase):
def testSkipAtStartSectionPad(self): def testSkipAtStartSectionPad(self):
"""Test handling of skip-at-start section with padding""" """Test handling of skip-at-start section with padding"""
data = self._DoReadFile('179_skip_at_start_section_pad.dts') data = self._DoReadFile('179_skip_at_start_section_pad.dts')
before = tools.GetBytes(0, 8) before = tools.get_bytes(0, 8)
after = tools.GetBytes(0, 4) after = tools.get_bytes(0, 4)
all = before + U_BOOT_DATA + after all = before + U_BOOT_DATA + after
self.assertEqual(all, data) self.assertEqual(all, data)
@ -4110,23 +4110,23 @@ class TestFunctional(unittest.TestCase):
def testSectionPad(self): def testSectionPad(self):
"""Testing padding with sections""" """Testing padding with sections"""
data = self._DoReadFile('180_section_pad.dts') data = self._DoReadFile('180_section_pad.dts')
expected = (tools.GetBytes(ord('&'), 3) + expected = (tools.get_bytes(ord('&'), 3) +
tools.GetBytes(ord('!'), 5) + tools.get_bytes(ord('!'), 5) +
U_BOOT_DATA + U_BOOT_DATA +
tools.GetBytes(ord('!'), 1) + tools.get_bytes(ord('!'), 1) +
tools.GetBytes(ord('&'), 2)) tools.get_bytes(ord('&'), 2))
self.assertEqual(expected, data) self.assertEqual(expected, data)
def testSectionAlign(self): def testSectionAlign(self):
"""Testing alignment with sections""" """Testing alignment with sections"""
data = self._DoReadFileDtb('181_section_align.dts', map=True)[0] data = self._DoReadFileDtb('181_section_align.dts', map=True)[0]
expected = (b'\0' + # fill section expected = (b'\0' + # fill section
tools.GetBytes(ord('&'), 1) + # padding to section align tools.get_bytes(ord('&'), 1) + # padding to section align
b'\0' + # fill section b'\0' + # fill section
tools.GetBytes(ord('!'), 3) + # padding to u-boot align tools.get_bytes(ord('!'), 3) + # padding to u-boot align
U_BOOT_DATA + U_BOOT_DATA +
tools.GetBytes(ord('!'), 4) + # padding to u-boot size tools.get_bytes(ord('!'), 4) + # padding to u-boot size
tools.GetBytes(ord('!'), 4)) # padding to section size tools.get_bytes(ord('!'), 4)) # padding to section size
self.assertEqual(expected, data) self.assertEqual(expected, data)
def testCompressImage(self): def testCompressImage(self):
@ -4357,7 +4357,7 @@ class TestFunctional(unittest.TestCase):
'188_image_entryarg.dts',use_real_dtb=True, update_dtb=True, '188_image_entryarg.dts',use_real_dtb=True, update_dtb=True,
entry_args=entry_args) entry_args=entry_args)
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
orig_image = control.images['image'] orig_image = control.images['image']
# This should not generate an error about the missing 'cros-ec-rw-path' # This should not generate an error about the missing 'cros-ec-rw-path'
@ -4378,7 +4378,7 @@ class TestFunctional(unittest.TestCase):
def testReadImageSkip(self): def testReadImageSkip(self):
"""Test reading an image and accessing its FDT map""" """Test reading an image and accessing its FDT map"""
data = self.data = self._DoReadFileRealDtb('191_read_image_skip.dts') data = self.data = self._DoReadFileRealDtb('191_read_image_skip.dts')
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
orig_image = control.images['image'] orig_image = control.images['image']
image = Image.FromFile(image_fname) image = Image.FromFile(image_fname)
self.assertEqual(orig_image.GetEntries().keys(), self.assertEqual(orig_image.GetEntries().keys(),
@ -4406,7 +4406,7 @@ class TestFunctional(unittest.TestCase):
# ELF file with a '__bss_size' symbol # ELF file with a '__bss_size' symbol
self._SetupTplElf() self._SetupTplElf()
data = self._DoReadFile('193_tpl_bss_pad.dts') data = self._DoReadFile('193_tpl_bss_pad.dts')
self.assertEqual(U_BOOT_TPL_DATA + tools.GetBytes(0, 10) + U_BOOT_DATA, self.assertEqual(U_BOOT_TPL_DATA + tools.get_bytes(0, 10) + U_BOOT_DATA,
data) data)
def testTplBssPadMissing(self): def testTplBssPadMissing(self):
@ -4605,8 +4605,8 @@ class TestFunctional(unittest.TestCase):
"""Test a collection""" """Test a collection"""
data = self._DoReadFile('198_collection.dts') data = self._DoReadFile('198_collection.dts')
self.assertEqual(U_BOOT_NODTB_DATA + U_BOOT_DTB_DATA + self.assertEqual(U_BOOT_NODTB_DATA + U_BOOT_DTB_DATA +
tools.GetBytes(0xff, 2) + U_BOOT_NODTB_DATA + tools.get_bytes(0xff, 2) + U_BOOT_NODTB_DATA +
tools.GetBytes(0xfe, 3) + U_BOOT_DTB_DATA, tools.get_bytes(0xfe, 3) + U_BOOT_DTB_DATA,
data) data)
def testCollectionSection(self): def testCollectionSection(self):
@ -4617,21 +4617,21 @@ class TestFunctional(unittest.TestCase):
# missing. # missing.
data = self._DoReadFile('199_collection_section.dts') data = self._DoReadFile('199_collection_section.dts')
section = U_BOOT_NODTB_DATA + U_BOOT_DTB_DATA section = U_BOOT_NODTB_DATA + U_BOOT_DTB_DATA
self.assertEqual(section + U_BOOT_DATA + tools.GetBytes(0xff, 2) + self.assertEqual(section + U_BOOT_DATA + tools.get_bytes(0xff, 2) +
section + tools.GetBytes(0xfe, 3) + U_BOOT_DATA, section + tools.get_bytes(0xfe, 3) + U_BOOT_DATA,
data) data)
def testAlignDefault(self): def testAlignDefault(self):
"""Test that default alignment works on sections""" """Test that default alignment works on sections"""
data = self._DoReadFile('200_align_default.dts') data = self._DoReadFile('200_align_default.dts')
expected = (U_BOOT_DATA + tools.GetBytes(0, 8 - len(U_BOOT_DATA)) + expected = (U_BOOT_DATA + tools.get_bytes(0, 8 - len(U_BOOT_DATA)) +
U_BOOT_DATA) U_BOOT_DATA)
# Special alignment for section # Special alignment for section
expected += tools.GetBytes(0, 32 - len(expected)) expected += tools.get_bytes(0, 32 - len(expected))
# No alignment within the nested section # No alignment within the nested section
expected += U_BOOT_DATA + U_BOOT_NODTB_DATA; expected += U_BOOT_DATA + U_BOOT_NODTB_DATA;
# Now the final piece, which should be default-aligned # Now the final piece, which should be default-aligned
expected += tools.GetBytes(0, 88 - len(expected)) + U_BOOT_NODTB_DATA expected += tools.get_bytes(0, 88 - len(expected)) + U_BOOT_NODTB_DATA
self.assertEqual(expected, data) self.assertEqual(expected, data)
def testPackOpenSBI(self): def testPackOpenSBI(self):
@ -4642,9 +4642,9 @@ class TestFunctional(unittest.TestCase):
def testSectionsSingleThread(self): def testSectionsSingleThread(self):
"""Test sections without multithreading""" """Test sections without multithreading"""
data = self._DoReadFileDtb('055_sections.dts', threads=0)[0] data = self._DoReadFileDtb('055_sections.dts', threads=0)[0]
expected = (U_BOOT_DATA + tools.GetBytes(ord('!'), 12) + expected = (U_BOOT_DATA + tools.get_bytes(ord('!'), 12) +
U_BOOT_DATA + tools.GetBytes(ord('a'), 12) + U_BOOT_DATA + tools.get_bytes(ord('a'), 12) +
U_BOOT_DATA + tools.GetBytes(ord('&'), 4)) U_BOOT_DATA + tools.get_bytes(ord('&'), 4))
self.assertEqual(expected, data) self.assertEqual(expected, data)
def testThreadTimeout(self): def testThreadTimeout(self):
@ -4677,7 +4677,7 @@ class TestFunctional(unittest.TestCase):
# definition in the correct place # definition in the correct place
syms = elf.GetSymbolFileOffset(infile, syms = elf.GetSymbolFileOffset(infile,
['dtb_embed_begin', 'dtb_embed_end']) ['dtb_embed_begin', 'dtb_embed_end'])
data = tools.ReadFile(outfile) data = tools.read_file(outfile)
dtb_data = data[syms['dtb_embed_begin'].offset: dtb_data = data[syms['dtb_embed_begin'].offset:
syms['dtb_embed_end'].offset] syms['dtb_embed_end'].offset]
@ -4756,7 +4756,7 @@ class TestFunctional(unittest.TestCase):
# Set up a version file to make sure that works # Set up a version file to make sure that works
version = 'v2025.01-rc2' version = 'v2025.01-rc2'
tools.WriteFile(os.path.join(self._indir, 'version'), version, tools.write_file(os.path.join(self._indir, 'version'), version,
binary=False) binary=False)
self.assertEqual(version, state.GetVersion(self._indir)) self.assertEqual(version, state.GetVersion(self._indir))
@ -4780,7 +4780,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
# Check that we can read it and it can be scanning, meaning it does # Check that we can read it and it can be scanning, meaning it does
# not have a 16-byte fdtmap header # not have a 16-byte fdtmap header
data = tools.ReadFile(dtb) data = tools.read_file(dtb)
dtb = fdt.Fdt.FromData(data) dtb = fdt.Fdt.FromData(data)
dtb.Scan() dtb.Scan()
@ -4788,7 +4788,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
fname = os.path.join(tmpdir, 'fdt.dtb') fname = os.path.join(tmpdir, 'fdt.dtb')
self._DoBinman('extract', '-i', updated_fname, '-F', 'dummy', self._DoBinman('extract', '-i', updated_fname, '-F', 'dummy',
'-f', fname, 'u-boot') '-f', fname, 'u-boot')
data = tools.ReadFile(fname) data = tools.read_file(fname)
self.assertEqual(U_BOOT_DATA, data) self.assertEqual(U_BOOT_DATA, data)
finally: finally:
@ -4917,7 +4917,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
fdtmap_data = data[fdtmap.image_pos:fdtmap.image_pos + fdtmap.size] fdtmap_data = data[fdtmap.image_pos:fdtmap.image_pos + fdtmap.size]
magic = fdtmap_data[:8] magic = fdtmap_data[:8]
self.assertEqual(b'_FDTMAP_', magic) self.assertEqual(b'_FDTMAP_', magic)
self.assertEqual(tools.GetBytes(0, 8), fdtmap_data[8:16]) self.assertEqual(tools.get_bytes(0, 8), fdtmap_data[8:16])
fdt_data = fdtmap_data[16:] fdt_data = fdtmap_data[16:]
dtb = fdt.Fdt.FromData(fdt_data) dtb = fdt.Fdt.FromData(fdt_data)
@ -4944,25 +4944,25 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testFipExtractOneEntry(self): def testFipExtractOneEntry(self):
"""Test extracting a single entry fron an FIP""" """Test extracting a single entry fron an FIP"""
self._DoReadFileRealDtb('207_fip_ls.dts') self._DoReadFileRealDtb('207_fip_ls.dts')
image_fname = tools.GetOutputFilename('image.bin') image_fname = tools.get_output_filename('image.bin')
fname = os.path.join(self._indir, 'output.extact') fname = os.path.join(self._indir, 'output.extact')
control.ExtractEntries(image_fname, fname, None, ['atf-fip/u-boot']) control.ExtractEntries(image_fname, fname, None, ['atf-fip/u-boot'])
data = tools.ReadFile(fname) data = tools.read_file(fname)
self.assertEqual(U_BOOT_DATA, data) self.assertEqual(U_BOOT_DATA, data)
def testFipReplace(self): def testFipReplace(self):
"""Test replacing a single file in a FIP""" """Test replacing a single file in a FIP"""
expected = U_BOOT_DATA + tools.GetBytes(0x78, 50) expected = U_BOOT_DATA + tools.get_bytes(0x78, 50)
data = self._DoReadFileRealDtb('208_fip_replace.dts') data = self._DoReadFileRealDtb('208_fip_replace.dts')
updated_fname = tools.GetOutputFilename('image-updated.bin') updated_fname = tools.get_output_filename('image-updated.bin')
tools.WriteFile(updated_fname, data) tools.write_file(updated_fname, data)
entry_name = 'atf-fip/u-boot' entry_name = 'atf-fip/u-boot'
control.WriteEntry(updated_fname, entry_name, expected, control.WriteEntry(updated_fname, entry_name, expected,
allow_resize=True) allow_resize=True)
actual = control.ReadEntry(updated_fname, entry_name) actual = control.ReadEntry(updated_fname, entry_name)
self.assertEqual(expected, actual) self.assertEqual(expected, actual)
new_data = tools.ReadFile(updated_fname) new_data = tools.read_file(updated_fname)
hdr, fents = fip_util.decode_fip(new_data) hdr, fents = fip_util.decode_fip(new_data)
self.assertEqual(2, len(fents)) self.assertEqual(2, len(fents))
@ -4999,7 +4999,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
self.assertEqual(True, fent.valid) self.assertEqual(True, fent.valid)
rest = data[0x60 + len(ATF_BL31_DATA):0x100] rest = data[0x60 + len(ATF_BL31_DATA):0x100]
self.assertEqual(tools.GetBytes(0xff, len(rest)), rest) self.assertEqual(tools.get_bytes(0xff, len(rest)), rest)
def testFipBadAlign(self): def testFipBadAlign(self):
"""Test that an invalid alignment value in a FIP is detected""" """Test that an invalid alignment value in a FIP is detected"""
@ -5055,7 +5055,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
def testFetchBintools(self): def testFetchBintools(self):
def fail_download(url): def fail_download(url):
"""Take the tools.Download() function by raising an exception""" """Take the tools.download() function by raising an exception"""
raise urllib.error.URLError('my error') raise urllib.error.URLError('my error')
args = ['tool'] args = ['tool']
@ -5070,7 +5070,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
self.assertIn('Please specify bintools to fetch', str(e.exception)) self.assertIn('Please specify bintools to fetch', str(e.exception))
args = ['tool', '--fetch', '_testing'] args = ['tool', '--fetch', '_testing']
with unittest.mock.patch.object(tools, 'Download', with unittest.mock.patch.object(tools, 'download',
side_effect=fail_download): side_effect=fail_download):
with test_util.capture_sys_output() as (stdout, _): with test_util.capture_sys_output() as (stdout, _):
self._DoBinman(*args) self._DoBinman(*args)

View file

@ -111,7 +111,7 @@ class Image(section.Entry_section):
Raises: Raises:
ValueError if something goes wrong ValueError if something goes wrong
""" """
data = tools.ReadFile(fname) data = tools.read_file(fname)
size = len(data) size = len(data)
# First look for an image header # First look for an image header
@ -128,8 +128,8 @@ class Image(section.Entry_section):
dtb_size = probe_dtb.GetFdtObj().totalsize() dtb_size = probe_dtb.GetFdtObj().totalsize()
fdtmap_data = data[pos:pos + dtb_size + fdtmap.FDTMAP_HDR_LEN] fdtmap_data = data[pos:pos + dtb_size + fdtmap.FDTMAP_HDR_LEN]
fdt_data = fdtmap_data[fdtmap.FDTMAP_HDR_LEN:] fdt_data = fdtmap_data[fdtmap.FDTMAP_HDR_LEN:]
out_fname = tools.GetOutputFilename('fdtmap.in.dtb') out_fname = tools.get_output_filename('fdtmap.in.dtb')
tools.WriteFile(out_fname, fdt_data) tools.write_file(out_fname, fdt_data)
dtb = fdt.Fdt(out_fname) dtb = fdt.Fdt(out_fname)
dtb.Scan() dtb.Scan()
@ -174,7 +174,7 @@ class Image(section.Entry_section):
def BuildImage(self): def BuildImage(self):
"""Write the image to a file""" """Write the image to a file"""
fname = tools.GetOutputFilename(self._filename) fname = tools.get_output_filename(self._filename)
tout.Info("Writing image to '%s'" % fname) tout.Info("Writing image to '%s'" % fname)
with open(fname, 'wb') as fd: with open(fname, 'wb') as fd:
data = self.GetPaddedData() data = self.GetPaddedData()
@ -188,7 +188,7 @@ class Image(section.Entry_section):
Filename of map file written Filename of map file written
""" """
filename = '%s.map' % self.image_name filename = '%s.map' % self.image_name
fname = tools.GetOutputFilename(filename) fname = tools.get_output_filename(filename)
with open(fname, 'w') as fd: with open(fname, 'w') as fd:
print('%8s %8s %8s %s' % ('ImagePos', 'Offset', 'Size', 'Name'), print('%8s %8s %8s %s' % ('ImagePos', 'Offset', 'Size', 'Name'),
file=fd) file=fd)

View file

@ -138,8 +138,8 @@ def GetFdtContents(etype='u-boot-dtb'):
data = GetFdtForEtype(etype).GetContents() data = GetFdtForEtype(etype).GetContents()
else: else:
fname = output_fdt_info[etype][1] fname = output_fdt_info[etype][1]
pathname = tools.GetInputFilename(fname) pathname = tools.get_input_filename(fname)
data = tools.ReadFile(pathname) data = tools.read_file(pathname)
return pathname, data return pathname, data
def UpdateFdtContents(etype, data): def UpdateFdtContents(etype, data):
@ -154,7 +154,7 @@ def UpdateFdtContents(etype, data):
""" """
dtb, fname = output_fdt_info[etype] dtb, fname = output_fdt_info[etype]
dtb_fname = dtb.GetFilename() dtb_fname = dtb.GetFilename()
tools.WriteFile(dtb_fname, data) tools.write_file(dtb_fname, data)
dtb = fdt.FdtScan(dtb_fname) dtb = fdt.FdtScan(dtb_fname)
output_fdt_info[etype] = [dtb, fname] output_fdt_info[etype] = [dtb, fname]
@ -235,12 +235,12 @@ def Prepare(images, dtb):
else: else:
fdt_set = {} fdt_set = {}
for etype, fname in DTB_TYPE_FNAME.items(): for etype, fname in DTB_TYPE_FNAME.items():
infile = tools.GetInputFilename(fname, allow_missing=True) infile = tools.get_input_filename(fname, allow_missing=True)
if infile and os.path.exists(infile): if infile and os.path.exists(infile):
fname_dtb = fdt_util.EnsureCompiled(infile) fname_dtb = fdt_util.EnsureCompiled(infile)
out_fname = tools.GetOutputFilename('%s.out' % out_fname = tools.get_output_filename('%s.out' %
os.path.split(fname)[1]) os.path.split(fname)[1])
tools.WriteFile(out_fname, tools.ReadFile(fname_dtb)) tools.write_file(out_fname, tools.read_file(fname_dtb))
other_dtb = fdt.FdtScan(out_fname) other_dtb = fdt.FdtScan(out_fname)
output_fdt_info[etype] = [other_dtb, out_fname] output_fdt_info[etype] = [other_dtb, out_fname]
@ -271,13 +271,13 @@ def PrepareFromLoadedData(image):
tout.Info(" Found device tree type 'fdtmap' '%s'" % image.fdtmap_dtb.name) tout.Info(" Found device tree type 'fdtmap' '%s'" % image.fdtmap_dtb.name)
for etype, value in image.GetFdts().items(): for etype, value in image.GetFdts().items():
entry, fname = value entry, fname = value
out_fname = tools.GetOutputFilename('%s.dtb' % entry.etype) out_fname = tools.get_output_filename('%s.dtb' % entry.etype)
tout.Info(" Found device tree type '%s' at '%s' path '%s'" % tout.Info(" Found device tree type '%s' at '%s' path '%s'" %
(etype, out_fname, entry.GetPath())) (etype, out_fname, entry.GetPath()))
entry._filename = entry.GetDefaultFilename() entry._filename = entry.GetDefaultFilename()
data = entry.ReadData() data = entry.ReadData()
tools.WriteFile(out_fname, data) tools.write_file(out_fname, data)
dtb = fdt.Fdt(out_fname) dtb = fdt.Fdt(out_fname)
dtb.Scan() dtb.Scan()
image_node = dtb.GetNode('/binman') image_node = dtb.GetNode('/binman')
@ -529,7 +529,7 @@ def GetVersion(path=OUR_PATH):
""" """
version_fname = os.path.join(path, 'version') version_fname = os.path.join(path, 'version')
if os.path.exists(version_fname): if os.path.exists(version_fname):
version = tools.ReadFile(version_fname, binary=False) version = tools.read_file(version_fname, binary=False)
else: else:
version = '(unreleased)' version = '(unreleased)'
return version return version

View file

@ -135,7 +135,7 @@ def DoBuildman(options, args, toolchains=None, make_func=None, boards=None,
global builder global builder
if options.full_help: if options.full_help:
tools.PrintFullHelp( tools.print_full_help(
os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README') os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README')
) )
return 0 return 0

View file

@ -422,7 +422,7 @@ class TestFunctional(unittest.TestCase):
if arg.startswith('O='): if arg.startswith('O='):
out_dir = arg[2:] out_dir = arg[2:]
fname = os.path.join(cwd or '', out_dir, 'u-boot') fname = os.path.join(cwd or '', out_dir, 'u-boot')
tools.WriteFile(fname, b'U-Boot') tools.write_file(fname, b'U-Boot')
if type(commit) is not str: if type(commit) is not str:
stderr = self._error.get((brd.target, commit.sequence)) stderr = self._error.get((brd.target, commit.sequence))
if stderr: if stderr:

View file

@ -607,7 +607,7 @@ class TestBuild(unittest.TestCase):
def testPrepareOutputSpace(self): def testPrepareOutputSpace(self):
def _Touch(fname): def _Touch(fname):
tools.WriteFile(os.path.join(base_dir, fname), b'') tools.write_file(os.path.join(base_dir, fname), b'')
base_dir = tempfile.mkdtemp() base_dir = tempfile.mkdtemp()

View file

@ -201,11 +201,11 @@ class Toolchain:
# We'll use MakeArgs() to provide this # We'll use MakeArgs() to provide this
pass pass
elif full_path: elif full_path:
env[b'CROSS_COMPILE'] = tools.ToBytes( env[b'CROSS_COMPILE'] = tools.to_bytes(
wrapper + os.path.join(self.path, self.cross)) wrapper + os.path.join(self.path, self.cross))
else: else:
env[b'CROSS_COMPILE'] = tools.ToBytes(wrapper + self.cross) env[b'CROSS_COMPILE'] = tools.to_bytes(wrapper + self.cross)
env[b'PATH'] = tools.ToBytes(self.path) + b':' + env[b'PATH'] env[b'PATH'] = tools.to_bytes(self.path) + b':' + env[b'PATH']
env[b'LC_ALL'] = b'C' env[b'LC_ALL'] = b'C'
@ -504,7 +504,7 @@ class Toolchains:
url = '%s/%s/%s/' % (base, arch, version) url = '%s/%s/%s/' % (base, arch, version)
print('Checking: %s' % url) print('Checking: %s' % url)
response = urllib.request.urlopen(url) response = urllib.request.urlopen(url)
html = tools.ToString(response.read()) html = tools.to_string(response.read())
parser = MyHTMLParser(fetch_arch) parser = MyHTMLParser(fetch_arch)
parser.feed(html) parser.feed(html)
if fetch_arch == 'list': if fetch_arch == 'list':
@ -571,7 +571,7 @@ class Toolchains:
os.mkdir(dest) os.mkdir(dest)
# Download the tar file for this toolchain and unpack it # Download the tar file for this toolchain and unpack it
tarfile, tmpdir = tools.Download(url, '.buildman') tarfile, tmpdir = tools.download(url, '.buildman')
if not tarfile: if not tarfile:
return 1 return 1
print(col.Color(col.GREEN, 'Unpacking to: %s' % dest), end=' ') print(col.Color(col.GREEN, 'Unpacking to: %s' % dest), end=' ')

View file

@ -396,7 +396,7 @@ class Node:
prop_name: Name of property prop_name: Name of property
""" """
self.props[prop_name] = Prop(self, None, prop_name, self.props[prop_name] = Prop(self, None, prop_name,
tools.GetBytes(0, 4)) tools.get_bytes(0, 4))
def AddEmptyProp(self, prop_name, len): def AddEmptyProp(self, prop_name, len):
"""Add a property with a fixed data size, for filling in later """Add a property with a fixed data size, for filling in later
@ -408,7 +408,7 @@ class Node:
prop_name: Name of property prop_name: Name of property
len: Length of data in property len: Length of data in property
""" """
value = tools.GetBytes(0, len) value = tools.get_bytes(0, len)
self.props[prop_name] = Prop(self, None, prop_name, value) self.props[prop_name] = Prop(self, None, prop_name, value)
def _CheckProp(self, prop_name): def _CheckProp(self, prop_name):

View file

@ -75,12 +75,12 @@ def EnsureCompiled(fname, tmpdir=None, capture_stderr=False):
dts_input = os.path.join(tmpdir, 'source.dts') dts_input = os.path.join(tmpdir, 'source.dts')
dtb_output = os.path.join(tmpdir, 'source.dtb') dtb_output = os.path.join(tmpdir, 'source.dtb')
else: else:
dts_input = tools.GetOutputFilename('source.dts') dts_input = tools.get_output_filename('source.dts')
dtb_output = tools.GetOutputFilename('source.dtb') dtb_output = tools.get_output_filename('source.dtb')
search_paths = [os.path.join(os.getcwd(), 'include')] search_paths = [os.path.join(os.getcwd(), 'include')]
root, _ = os.path.splitext(fname) root, _ = os.path.splitext(fname)
cc, args = tools.GetTargetCompileTool('cc') cc, args = tools.get_target_compile_tool('cc')
args += ['-E', '-P', '-x', 'assembler-with-cpp', '-D__ASSEMBLY__'] args += ['-E', '-P', '-x', 'assembler-with-cpp', '-D__ASSEMBLY__']
args += ['-Ulinux'] args += ['-Ulinux']
for path in search_paths: for path in search_paths:
@ -92,7 +92,7 @@ def EnsureCompiled(fname, tmpdir=None, capture_stderr=False):
search_list = [] search_list = []
for path in search_paths: for path in search_paths:
search_list.extend(['-i', path]) search_list.extend(['-i', path])
dtc, args = tools.GetTargetCompileTool('dtc') dtc, args = tools.get_target_compile_tool('dtc')
args += ['-I', 'dts', '-o', dtb_output, '-O', 'dtb', args += ['-I', 'dts', '-o', dtb_output, '-O', 'dtb',
'-W', 'no-unit_address_vs_reg'] '-W', 'no-unit_address_vs_reg']
args.extend(search_list) args.extend(search_list)

View file

@ -112,12 +112,12 @@ class TestDtoc(unittest.TestCase):
"""Tests for dtoc""" """Tests for dtoc"""
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
tools.PrepareOutputDir(None) tools.prepare_output_dir(None)
cls.maxDiff = None cls.maxDiff = None
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
tools.FinaliseOutputDir() tools.finalise_output_dir()
@staticmethod @staticmethod
def _write_python_string(fname, data): def _write_python_string(fname, data):
@ -218,7 +218,7 @@ class TestDtoc(unittest.TestCase):
def test_empty_file(self): def test_empty_file(self):
"""Test output from a device tree file with no nodes""" """Test output from a device tree file with no nodes"""
dtb_file = get_dtb_file('dtoc_test_empty.dts') dtb_file = get_dtb_file('dtoc_test_empty.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
# Run this one without saved_scan to complete test coverage # Run this one without saved_scan to complete test coverage
dtb_platdata.run_steps(['struct'], dtb_file, False, output, [], None, dtb_platdata.run_steps(['struct'], dtb_file, False, output, [], None,
@ -801,7 +801,7 @@ DM_DEVICE_INST(test0) = {
def test_simple(self): def test_simple(self):
"""Test output from some simple nodes with various types of data""" """Test output from some simple nodes with various types of data"""
dtb_file = get_dtb_file('dtoc_test_simple.dts') dtb_file = get_dtb_file('dtoc_test_simple.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
with open(output) as infile: with open(output) as infile:
data = infile.read() data = infile.read()
@ -822,14 +822,14 @@ DM_DEVICE_INST(test0) = {
# Try the 'all' command # Try the 'all' command
self.run_test(['all'], dtb_file, output) self.run_test(['all'], dtb_file, output)
data = tools.ReadFile(output, binary=False) data = tools.read_file(output, binary=False)
self._check_strings( self._check_strings(
self.decl_text + self.platdata_text + self.struct_text, data) self.decl_text + self.platdata_text + self.struct_text, data)
def test_driver_alias(self): def test_driver_alias(self):
"""Test output from a device tree file with a driver alias""" """Test output from a device tree file with a driver alias"""
dtb_file = get_dtb_file('dtoc_test_driver_alias.dts') dtb_file = get_dtb_file('dtoc_test_driver_alias.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
with open(output) as infile: with open(output) as infile:
data = infile.read() data = infile.read()
@ -875,7 +875,7 @@ U_BOOT_DRVINFO(gpios_at_0) = {
def test_invalid_driver(self): def test_invalid_driver(self):
"""Test output from a device tree file with an invalid driver""" """Test output from a device tree file with an invalid driver"""
dtb_file = get_dtb_file('dtoc_test_invalid_driver.dts') dtb_file = get_dtb_file('dtoc_test_invalid_driver.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
with test_util.capture_sys_output() as _: with test_util.capture_sys_output() as _:
dtb_platdata.run_steps( dtb_platdata.run_steps(
['struct'], dtb_file, False, output, [], None, False, ['struct'], dtb_file, False, output, [], None, False,
@ -918,7 +918,7 @@ U_BOOT_DRVINFO(spl_test) = {
def test_phandle(self): def test_phandle(self):
"""Test output from a node containing a phandle reference""" """Test output from a node containing a phandle reference"""
dtb_file = get_dtb_file('dtoc_test_phandle.dts') dtb_file = get_dtb_file('dtoc_test_phandle.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
with open(output) as infile: with open(output) as infile:
data = infile.read() data = infile.read()
@ -1013,7 +1013,7 @@ U_BOOT_DRVINFO(phandle_target) = {
def test_phandle_single(self): def test_phandle_single(self):
"""Test output from a node containing a phandle reference""" """Test output from a node containing a phandle reference"""
dtb_file = get_dtb_file('dtoc_test_phandle_single.dts') dtb_file = get_dtb_file('dtoc_test_phandle_single.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
with open(output) as infile: with open(output) as infile:
data = infile.read() data = infile.read()
@ -1029,7 +1029,7 @@ struct dtd_target {
def test_phandle_reorder(self): def test_phandle_reorder(self):
"""Test that phandle targets are generated before their references""" """Test that phandle targets are generated before their references"""
dtb_file = get_dtb_file('dtoc_test_phandle_reorder.dts') dtb_file = get_dtb_file('dtoc_test_phandle_reorder.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
self.run_test(['platdata'], dtb_file, output) self.run_test(['platdata'], dtb_file, output)
with open(output) as infile: with open(output) as infile:
data = infile.read() data = infile.read()
@ -1071,7 +1071,7 @@ U_BOOT_DRVINFO(phandle_target) = {
def test_phandle_cd_gpio(self): def test_phandle_cd_gpio(self):
"""Test that phandle targets are generated when unsing cd-gpios""" """Test that phandle targets are generated when unsing cd-gpios"""
dtb_file = get_dtb_file('dtoc_test_phandle_cd_gpios.dts') dtb_file = get_dtb_file('dtoc_test_phandle_cd_gpios.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
dtb_platdata.run_steps( dtb_platdata.run_steps(
['platdata'], dtb_file, False, output, [], None, False, ['platdata'], dtb_file, False, output, [], None, False,
warning_disabled=True, scan=copy_scan()) warning_disabled=True, scan=copy_scan())
@ -1157,7 +1157,7 @@ U_BOOT_DRVINFO(phandle_target) = {
"""Test a node containing an invalid phandle fails""" """Test a node containing an invalid phandle fails"""
dtb_file = get_dtb_file('dtoc_test_phandle_bad.dts', dtb_file = get_dtb_file('dtoc_test_phandle_bad.dts',
capture_stderr=True) capture_stderr=True)
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
with self.assertRaises(ValueError) as exc: with self.assertRaises(ValueError) as exc:
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
self.assertIn("Cannot parse 'clocks' in node 'phandle-source'", self.assertIn("Cannot parse 'clocks' in node 'phandle-source'",
@ -1167,7 +1167,7 @@ U_BOOT_DRVINFO(phandle_target) = {
"""Test a phandle target missing its #*-cells property""" """Test a phandle target missing its #*-cells property"""
dtb_file = get_dtb_file('dtoc_test_phandle_bad2.dts', dtb_file = get_dtb_file('dtoc_test_phandle_bad2.dts',
capture_stderr=True) capture_stderr=True)
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
with self.assertRaises(ValueError) as exc: with self.assertRaises(ValueError) as exc:
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
self.assertIn("Node 'phandle-target' has no cells property", self.assertIn("Node 'phandle-target' has no cells property",
@ -1176,7 +1176,7 @@ U_BOOT_DRVINFO(phandle_target) = {
def test_addresses64(self): def test_addresses64(self):
"""Test output from a node with a 'reg' property with na=2, ns=2""" """Test output from a node with a 'reg' property with na=2, ns=2"""
dtb_file = get_dtb_file('dtoc_test_addr64.dts') dtb_file = get_dtb_file('dtoc_test_addr64.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
with open(output) as infile: with open(output) as infile:
data = infile.read() data = infile.read()
@ -1245,7 +1245,7 @@ U_BOOT_DRVINFO(test3) = {
def test_addresses32(self): def test_addresses32(self):
"""Test output from a node with a 'reg' property with na=1, ns=1""" """Test output from a node with a 'reg' property with na=1, ns=1"""
dtb_file = get_dtb_file('dtoc_test_addr32.dts') dtb_file = get_dtb_file('dtoc_test_addr32.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
with open(output) as infile: with open(output) as infile:
data = infile.read() data = infile.read()
@ -1299,7 +1299,7 @@ U_BOOT_DRVINFO(test2) = {
def test_addresses64_32(self): def test_addresses64_32(self):
"""Test output from a node with a 'reg' property with na=2, ns=1""" """Test output from a node with a 'reg' property with na=2, ns=1"""
dtb_file = get_dtb_file('dtoc_test_addr64_32.dts') dtb_file = get_dtb_file('dtoc_test_addr64_32.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
with open(output) as infile: with open(output) as infile:
data = infile.read() data = infile.read()
@ -1368,7 +1368,7 @@ U_BOOT_DRVINFO(test3) = {
def test_addresses32_64(self): def test_addresses32_64(self):
"""Test output from a node with a 'reg' property with na=1, ns=2""" """Test output from a node with a 'reg' property with na=1, ns=2"""
dtb_file = get_dtb_file('dtoc_test_addr32_64.dts') dtb_file = get_dtb_file('dtoc_test_addr32_64.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
with open(output) as infile: with open(output) as infile:
data = infile.read() data = infile.read()
@ -1438,7 +1438,7 @@ U_BOOT_DRVINFO(test3) = {
"""Test that a reg property with an invalid type generates an error""" """Test that a reg property with an invalid type generates an error"""
# Capture stderr since dtc will emit warnings for this file # Capture stderr since dtc will emit warnings for this file
dtb_file = get_dtb_file('dtoc_test_bad_reg.dts', capture_stderr=True) dtb_file = get_dtb_file('dtoc_test_bad_reg.dts', capture_stderr=True)
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
with self.assertRaises(ValueError) as exc: with self.assertRaises(ValueError) as exc:
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
self.assertIn("Node 'spl-test' reg property is not an int", self.assertIn("Node 'spl-test' reg property is not an int",
@ -1448,7 +1448,7 @@ U_BOOT_DRVINFO(test3) = {
"""Test that a reg property with an invalid cell count is detected""" """Test that a reg property with an invalid cell count is detected"""
# Capture stderr since dtc will emit warnings for this file # Capture stderr since dtc will emit warnings for this file
dtb_file = get_dtb_file('dtoc_test_bad_reg2.dts', capture_stderr=True) dtb_file = get_dtb_file('dtoc_test_bad_reg2.dts', capture_stderr=True)
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
with self.assertRaises(ValueError) as exc: with self.assertRaises(ValueError) as exc:
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
self.assertIn( self.assertIn(
@ -1458,7 +1458,7 @@ U_BOOT_DRVINFO(test3) = {
def test_add_prop(self): def test_add_prop(self):
"""Test that a subequent node can add a new property to a struct""" """Test that a subequent node can add a new property to a struct"""
dtb_file = get_dtb_file('dtoc_test_add_prop.dts') dtb_file = get_dtb_file('dtoc_test_add_prop.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
with open(output) as infile: with open(output) as infile:
data = infile.read() data = infile.read()
@ -1523,9 +1523,9 @@ U_BOOT_DRVINFO(spl_test2) = {
def test_multi_to_file(self): def test_multi_to_file(self):
"""Test output of multiple pieces to a single file""" """Test output of multiple pieces to a single file"""
dtb_file = get_dtb_file('dtoc_test_simple.dts') dtb_file = get_dtb_file('dtoc_test_simple.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
self.run_test(['all'], dtb_file, output) self.run_test(['all'], dtb_file, output)
data = tools.ReadFile(output, binary=False) data = tools.read_file(output, binary=False)
self._check_strings( self._check_strings(
self.decl_text + self.platdata_text + self.struct_text, data) self.decl_text + self.platdata_text + self.struct_text, data)
@ -1539,7 +1539,7 @@ U_BOOT_DRVINFO(spl_test2) = {
def test_bad_command(self): def test_bad_command(self):
"""Test running dtoc with an invalid command""" """Test running dtoc with an invalid command"""
dtb_file = get_dtb_file('dtoc_test_simple.dts') dtb_file = get_dtb_file('dtoc_test_simple.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
with self.assertRaises(ValueError) as exc: with self.assertRaises(ValueError) as exc:
self.run_test(['invalid-cmd'], dtb_file, output) self.run_test(['invalid-cmd'], dtb_file, output)
self.assertIn( self.assertIn(
@ -1557,12 +1557,12 @@ U_BOOT_DRVINFO(spl_test2) = {
def check_output_dirs(self, instantiate): def check_output_dirs(self, instantiate):
# Remove the directory so that files from other tests are not there # Remove the directory so that files from other tests are not there
tools._RemoveOutputDir() tools._remove_output_dir()
tools.PrepareOutputDir(None) tools.prepare_output_dir(None)
# This should create the .dts and .dtb in the output directory # This should create the .dts and .dtb in the output directory
dtb_file = get_dtb_file('dtoc_test_simple.dts') dtb_file = get_dtb_file('dtoc_test_simple.dts')
outdir = tools.GetOutputDir() outdir = tools.get_output_dir()
fnames = glob.glob(outdir + '/*') fnames = glob.glob(outdir + '/*')
self.assertEqual(2, len(fnames)) self.assertEqual(2, len(fnames))
@ -1606,7 +1606,7 @@ U_BOOT_DRVINFO(spl_test2) = {
Scanner: scanner to use Scanner: scanner to use
""" """
dtb_file = get_dtb_file('dtoc_test_simple.dts') dtb_file = get_dtb_file('dtoc_test_simple.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
# Take a copy before messing with it # Take a copy before messing with it
scan = copy_scan() scan = copy_scan()
@ -1694,7 +1694,7 @@ U_BOOT_DRVINFO(spl_test2) = {
def test_alias_read(self): def test_alias_read(self):
"""Test obtaining aliases""" """Test obtaining aliases"""
dtb_file = get_dtb_file('dtoc_test_inst.dts') dtb_file = get_dtb_file('dtoc_test_inst.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
plat = self.run_test(['struct'], dtb_file, output) plat = self.run_test(['struct'], dtb_file, output)
scan = plat._scan scan = plat._scan
@ -1716,7 +1716,7 @@ U_BOOT_DRVINFO(spl_test2) = {
def test_alias_read_bad(self): def test_alias_read_bad(self):
"""Test invalid alias property name""" """Test invalid alias property name"""
dtb_file = get_dtb_file('dtoc_test_alias_bad.dts') dtb_file = get_dtb_file('dtoc_test_alias_bad.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
with self.assertRaises(ValueError) as exc: with self.assertRaises(ValueError) as exc:
plat = self.run_test(['struct'], dtb_file, output) plat = self.run_test(['struct'], dtb_file, output)
self.assertIn("Cannot decode alias 'i2c4-'", str(exc.exception)) self.assertIn("Cannot decode alias 'i2c4-'", str(exc.exception))
@ -1728,7 +1728,7 @@ U_BOOT_DRVINFO(spl_test2) = {
# node (/does/not/exist) # node (/does/not/exist)
dtb_file = get_dtb_file('dtoc_test_alias_bad_path.dts', True) dtb_file = get_dtb_file('dtoc_test_alias_bad_path.dts', True)
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
with self.assertRaises(ValueError) as exc: with self.assertRaises(ValueError) as exc:
plat = self.run_test(['struct'], dtb_file, output) plat = self.run_test(['struct'], dtb_file, output)
self.assertIn("Alias 'i2c4' path '/does/not/exist' not found", self.assertIn("Alias 'i2c4' path '/does/not/exist' not found",
@ -1737,7 +1737,7 @@ U_BOOT_DRVINFO(spl_test2) = {
def test_alias_read_bad_uclass(self): def test_alias_read_bad_uclass(self):
"""Test alias for a uclass that doesn't exist""" """Test alias for a uclass that doesn't exist"""
dtb_file = get_dtb_file('dtoc_test_alias_bad_uc.dts') dtb_file = get_dtb_file('dtoc_test_alias_bad_uc.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
with test_util.capture_sys_output() as (stdout, _): with test_util.capture_sys_output() as (stdout, _):
plat = self.run_test(['struct'], dtb_file, output) plat = self.run_test(['struct'], dtb_file, output)
self.assertEqual("Could not find uclass for alias 'other1'", self.assertEqual("Could not find uclass for alias 'other1'",
@ -1746,7 +1746,7 @@ U_BOOT_DRVINFO(spl_test2) = {
def test_sequence(self): def test_sequence(self):
"""Test assignment of sequence numnbers""" """Test assignment of sequence numnbers"""
dtb_file = get_dtb_file('dtoc_test_inst.dts') dtb_file = get_dtb_file('dtoc_test_inst.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
plat = self.run_test(['struct'], dtb_file, output) plat = self.run_test(['struct'], dtb_file, output)
scan = plat._scan scan = plat._scan
@ -1762,7 +1762,7 @@ U_BOOT_DRVINFO(spl_test2) = {
def test_process_root(self): def test_process_root(self):
"""Test assignment of sequence numnbers""" """Test assignment of sequence numnbers"""
dtb_file = get_dtb_file('dtoc_test_simple.dts') dtb_file = get_dtb_file('dtoc_test_simple.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
# Take a copy before messing with it # Take a copy before messing with it
scan = copy_scan() scan = copy_scan()
@ -1781,7 +1781,7 @@ U_BOOT_DRVINFO(spl_test2) = {
def test_simple_inst(self): def test_simple_inst(self):
"""Test output from some simple nodes with instantiate enabled""" """Test output from some simple nodes with instantiate enabled"""
dtb_file = get_dtb_file('dtoc_test_inst.dts') dtb_file = get_dtb_file('dtoc_test_inst.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
self.run_test(['decl'], dtb_file, output, True) self.run_test(['decl'], dtb_file, output, True)
with open(output) as infile: with open(output) as infile:
@ -1804,7 +1804,7 @@ U_BOOT_DRVINFO(spl_test2) = {
def test_inst_no_hdr(self): def test_inst_no_hdr(self):
"""Test dealing with a struct tsssshat has no header""" """Test dealing with a struct tsssshat has no header"""
dtb_file = get_dtb_file('dtoc_test_inst.dts') dtb_file = get_dtb_file('dtoc_test_inst.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
# Run it once to set everything up # Run it once to set everything up
plat = self.run_test(['decl'], dtb_file, output, True) plat = self.run_test(['decl'], dtb_file, output, True)
@ -1824,7 +1824,7 @@ U_BOOT_DRVINFO(spl_test2) = {
def test_missing_props(self): def test_missing_props(self):
"""Test detection of a parent node with no properties""" """Test detection of a parent node with no properties"""
dtb_file = get_dtb_file('dtoc_test_noprops.dts', capture_stderr=True) dtb_file = get_dtb_file('dtoc_test_noprops.dts', capture_stderr=True)
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
with self.assertRaises(ValueError) as exc: with self.assertRaises(ValueError) as exc:
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
self.assertIn("Parent node '/i2c@0' has no properties - do you need", self.assertIn("Parent node '/i2c@0' has no properties - do you need",
@ -1833,13 +1833,13 @@ U_BOOT_DRVINFO(spl_test2) = {
def test_single_reg(self): def test_single_reg(self):
"""Test detection of a parent node with no properties""" """Test detection of a parent node with no properties"""
dtb_file = get_dtb_file('dtoc_test_single_reg.dts') dtb_file = get_dtb_file('dtoc_test_single_reg.dts')
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
self.run_test(['struct'], dtb_file, output) self.run_test(['struct'], dtb_file, output)
def test_missing_parent(self): def test_missing_parent(self):
"""Test detection of a parent node with no properties""" """Test detection of a parent node with no properties"""
dtb_file = get_dtb_file('dtoc_test_noparent.dts', capture_stderr=True) dtb_file = get_dtb_file('dtoc_test_noparent.dts', capture_stderr=True)
output = tools.GetOutputFilename('output') output = tools.get_output_filename('output')
with self.assertRaises(ValueError) as exc: with self.assertRaises(ValueError) as exc:
self.run_test(['device'], dtb_file, output, instantiate=True) self.run_test(['device'], dtb_file, output, instantiate=True)
self.assertIn("Node '/i2c@0/spl-test/pmic@9' requires parent node " self.assertIn("Node '/i2c@0/spl-test/pmic@9' requires parent node "

View file

@ -74,11 +74,11 @@ class TestFdt(unittest.TestCase):
""" """
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
tools.PrepareOutputDir(None) tools.prepare_output_dir(None)
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
tools.FinaliseOutputDir() tools.finalise_output_dir()
def setUp(self): def setUp(self):
self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts')) self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts'))
@ -152,11 +152,11 @@ class TestNode(unittest.TestCase):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
tools.PrepareOutputDir(None) tools.prepare_output_dir(None)
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
tools.FinaliseOutputDir() tools.finalise_output_dir()
def setUp(self): def setUp(self):
self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts')) self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts'))
@ -294,11 +294,11 @@ class TestProp(unittest.TestCase):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
tools.PrepareOutputDir(None) tools.prepare_output_dir(None)
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
tools.FinaliseOutputDir() tools.finalise_output_dir()
def setUp(self): def setUp(self):
self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts')) self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts'))
@ -370,7 +370,7 @@ class TestProp(unittest.TestCase):
"""Tests the GetEmpty() function for the various supported types""" """Tests the GetEmpty() function for the various supported types"""
self.assertEqual(True, fdt.Prop.GetEmpty(Type.BOOL)) self.assertEqual(True, fdt.Prop.GetEmpty(Type.BOOL))
self.assertEqual(chr(0), fdt.Prop.GetEmpty(Type.BYTE)) self.assertEqual(chr(0), fdt.Prop.GetEmpty(Type.BYTE))
self.assertEqual(tools.GetBytes(0, 4), fdt.Prop.GetEmpty(Type.INT)) self.assertEqual(tools.get_bytes(0, 4), fdt.Prop.GetEmpty(Type.INT))
self.assertEqual('', fdt.Prop.GetEmpty(Type.STRING)) self.assertEqual('', fdt.Prop.GetEmpty(Type.STRING))
def testGetOffset(self): def testGetOffset(self):
@ -501,7 +501,7 @@ class TestProp(unittest.TestCase):
self.node.AddString('string', val) self.node.AddString('string', val)
self.dtb.Sync(auto_resize=True) self.dtb.Sync(auto_resize=True)
data = self.fdt.getprop(self.node.Offset(), 'string') data = self.fdt.getprop(self.node.Offset(), 'string')
self.assertEqual(tools.ToBytes(val) + b'\0', data) self.assertEqual(tools.to_bytes(val) + b'\0', data)
self.fdt.pack() self.fdt.pack()
self.node.SetString('string', val + 'x') self.node.SetString('string', val + 'x')
@ -511,24 +511,24 @@ class TestProp(unittest.TestCase):
self.node.SetString('string', val[:-1]) self.node.SetString('string', val[:-1])
prop = self.node.props['string'] prop = self.node.props['string']
prop.SetData(tools.ToBytes(val)) prop.SetData(tools.to_bytes(val))
self.dtb.Sync(auto_resize=False) self.dtb.Sync(auto_resize=False)
data = self.fdt.getprop(self.node.Offset(), 'string') data = self.fdt.getprop(self.node.Offset(), 'string')
self.assertEqual(tools.ToBytes(val), data) self.assertEqual(tools.to_bytes(val), data)
self.node.AddEmptyProp('empty', 5) self.node.AddEmptyProp('empty', 5)
self.dtb.Sync(auto_resize=True) self.dtb.Sync(auto_resize=True)
prop = self.node.props['empty'] prop = self.node.props['empty']
prop.SetData(tools.ToBytes(val)) prop.SetData(tools.to_bytes(val))
self.dtb.Sync(auto_resize=False) self.dtb.Sync(auto_resize=False)
data = self.fdt.getprop(self.node.Offset(), 'empty') data = self.fdt.getprop(self.node.Offset(), 'empty')
self.assertEqual(tools.ToBytes(val), data) self.assertEqual(tools.to_bytes(val), data)
self.node.SetData('empty', b'123') self.node.SetData('empty', b'123')
self.assertEqual(b'123', prop.bytes) self.assertEqual(b'123', prop.bytes)
# Trying adding a lot of data at once # Trying adding a lot of data at once
self.node.AddData('data', tools.GetBytes(65, 20000)) self.node.AddData('data', tools.get_bytes(65, 20000))
self.dtb.Sync(auto_resize=True) self.dtb.Sync(auto_resize=True)
def testFromData(self): def testFromData(self):
@ -562,7 +562,7 @@ class TestProp(unittest.TestCase):
def testGetFilename(self): def testGetFilename(self):
"""Test the dtb filename can be provided""" """Test the dtb filename can be provided"""
self.assertEqual(tools.GetOutputFilename('source.dtb'), self.assertEqual(tools.get_output_filename('source.dtb'),
self.dtb.GetFilename()) self.dtb.GetFilename())
@ -575,11 +575,11 @@ class TestFdtUtil(unittest.TestCase):
""" """
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
tools.PrepareOutputDir(None) tools.prepare_output_dir(None)
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
tools.FinaliseOutputDir() tools.finalise_output_dir()
def setUp(self): def setUp(self):
self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts')) self.dtb = fdt.FdtScan(find_dtb_file('dtoc_test_simple.dts'))

View file

@ -43,11 +43,11 @@ class TestSrcScan(unittest.TestCase):
"""Tests for src_scan""" """Tests for src_scan"""
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
tools.PrepareOutputDir(None) tools.prepare_output_dir(None)
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
tools.FinaliseOutputDir() tools.finalise_output_dir()
def test_simple(self): def test_simple(self):
"""Simple test of scanning drivers""" """Simple test of scanning drivers"""
@ -113,7 +113,7 @@ class TestSrcScan(unittest.TestCase):
pathname = os.path.join(indir, fname) pathname = os.path.join(indir, fname)
dirname = os.path.dirname(pathname) dirname = os.path.dirname(pathname)
os.makedirs(dirname, exist_ok=True) os.makedirs(dirname, exist_ok=True)
tools.WriteFile(pathname, '', binary=False) tools.write_file(pathname, '', binary=False)
fname_list.append(pathname) fname_list.append(pathname)
try: try:
@ -142,7 +142,7 @@ class TestSrcScan(unittest.TestCase):
def test_scan(self): def test_scan(self):
"""Test scanning of a driver""" """Test scanning of a driver"""
fname = os.path.join(OUR_PATH, '..', '..', 'drivers/i2c/tegra_i2c.c') fname = os.path.join(OUR_PATH, '..', '..', 'drivers/i2c/tegra_i2c.c')
buff = tools.ReadFile(fname, False) buff = tools.read_file(fname, False)
scan = src_scan.Scanner(None, None) scan = src_scan.Scanner(None, None)
scan._parse_driver(fname, buff) scan._parse_driver(fname, buff)
self.assertIn('i2c_tegra', scan._drivers) self.assertIn('i2c_tegra', scan._drivers)
@ -374,8 +374,8 @@ struct another_struct {
def test_struct_scan_errors(self): def test_struct_scan_errors(self):
"""Test scanning a header file with an invalid unicode file""" """Test scanning a header file with an invalid unicode file"""
output = tools.GetOutputFilename('output.h') output = tools.get_output_filename('output.h')
tools.WriteFile(output, b'struct this is a test \x81 of bad unicode') tools.write_file(output, b'struct this is a test \x81 of bad unicode')
scan = src_scan.Scanner(None, None) scan = src_scan.Scanner(None, None)
with test_util.capture_sys_output() as (stdout, _): with test_util.capture_sys_output() as (stdout, _):

View file

@ -338,7 +338,7 @@ Changes in v2:
text (str): Text to put into the file text (str): Text to put into the file
""" """
path = os.path.join(self.gitdir, fname) path = os.path.join(self.gitdir, fname)
tools.WriteFile(path, text, binary=False) tools.write_file(path, text, binary=False)
index = self.repo.index index = self.repo.index
index.add(fname) index.add(fname)
author = pygit2.Signature('Test user', 'test@email.com') author = pygit2.Signature('Test user', 'test@email.com')

View file

@ -159,7 +159,7 @@ elif args.cmd == 'send':
fd.close() fd.close()
elif args.full_help: elif args.full_help:
tools.PrintFullHelp( tools.print_full_help(
os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README') os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README')
) )

View file

@ -23,7 +23,7 @@ preserve_outdir = False
# Path to the Chrome OS chroot, if we know it # Path to the Chrome OS chroot, if we know it
chroot_path = None chroot_path = None
# Search paths to use for Filename(), used to find files # Search paths to use for filename(), used to find files
search_paths = [] search_paths = []
tool_search_paths = [] tool_search_paths = []
@ -36,7 +36,7 @@ packages = {
# List of paths to use when looking for an input file # List of paths to use when looking for an input file
indir = [] indir = []
def PrepareOutputDir(dirname, preserve=False): def prepare_output_dir(dirname, preserve=False):
"""Select an output directory, ensuring it exists. """Select an output directory, ensuring it exists.
This either creates a temporary directory or checks that the one supplied This either creates a temporary directory or checks that the one supplied
@ -69,22 +69,22 @@ def PrepareOutputDir(dirname, preserve=False):
outdir = tempfile.mkdtemp(prefix='binman.') outdir = tempfile.mkdtemp(prefix='binman.')
tout.Debug("Using temporary directory '%s'" % outdir) tout.Debug("Using temporary directory '%s'" % outdir)
def _RemoveOutputDir(): def _remove_output_dir():
global outdir global outdir
shutil.rmtree(outdir) shutil.rmtree(outdir)
tout.Debug("Deleted temporary directory '%s'" % outdir) tout.Debug("Deleted temporary directory '%s'" % outdir)
outdir = None outdir = None
def FinaliseOutputDir(): def finalise_output_dir():
global outdir, preserve_outdir global outdir, preserve_outdir
"""Tidy up: delete output directory if temporary and not preserved.""" """Tidy up: delete output directory if temporary and not preserved."""
if outdir and not preserve_outdir: if outdir and not preserve_outdir:
_RemoveOutputDir() _remove_output_dir()
outdir = None outdir = None
def GetOutputFilename(fname): def get_output_filename(fname):
"""Return a filename within the output directory. """Return a filename within the output directory.
Args: Args:
@ -95,7 +95,7 @@ def GetOutputFilename(fname):
""" """
return os.path.join(outdir, fname) return os.path.join(outdir, fname)
def GetOutputDir(): def get_output_dir():
"""Return the current output directory """Return the current output directory
Returns: Returns:
@ -103,15 +103,15 @@ def GetOutputDir():
""" """
return outdir return outdir
def _FinaliseForTest(): def _finalise_for_test():
"""Remove the output directory (for use by tests)""" """Remove the output directory (for use by tests)"""
global outdir global outdir
if outdir: if outdir:
_RemoveOutputDir() _remove_output_dir()
outdir = None outdir = None
def SetInputDirs(dirname): def set_input_dirs(dirname):
"""Add a list of input directories, where input files are kept. """Add a list of input directories, where input files are kept.
Args: Args:
@ -123,7 +123,7 @@ def SetInputDirs(dirname):
indir = dirname indir = dirname
tout.Debug("Using input directories %s" % indir) tout.Debug("Using input directories %s" % indir)
def GetInputFilename(fname, allow_missing=False): def get_input_filename(fname, allow_missing=False):
"""Return a filename for use as input. """Return a filename for use as input.
Args: Args:
@ -150,7 +150,7 @@ def GetInputFilename(fname, allow_missing=False):
raise ValueError("Filename '%s' not found in input path (%s) (cwd='%s')" % raise ValueError("Filename '%s' not found in input path (%s) (cwd='%s')" %
(fname, ','.join(indir), os.getcwd())) (fname, ','.join(indir), os.getcwd()))
def GetInputFilenameGlob(pattern): def get_input_filename_glob(pattern):
"""Return a list of filenames for use as input. """Return a list of filenames for use as input.
Args: Args:
@ -167,26 +167,26 @@ def GetInputFilenameGlob(pattern):
files += glob.glob(pathname) files += glob.glob(pathname)
return sorted(files) return sorted(files)
def Align(pos, align): def align(pos, align):
if align: if align:
mask = align - 1 mask = align - 1
pos = (pos + mask) & ~mask pos = (pos + mask) & ~mask
return pos return pos
def NotPowerOfTwo(num): def not_power_of_two(num):
return num and (num & (num - 1)) return num and (num & (num - 1))
def SetToolPaths(toolpaths): def set_tool_paths(toolpaths):
"""Set the path to search for tools """Set the path to search for tools
Args: Args:
toolpaths: List of paths to search for tools executed by Run() toolpaths: List of paths to search for tools executed by run()
""" """
global tool_search_paths global tool_search_paths
tool_search_paths = toolpaths tool_search_paths = toolpaths
def PathHasFile(path_spec, fname): def path_has_file(path_spec, fname):
"""Check if a given filename is in the PATH """Check if a given filename is in the PATH
Args: Args:
@ -201,7 +201,7 @@ def PathHasFile(path_spec, fname):
return True return True
return False return False
def GetHostCompileTool(name): def get_host_compile_tool(name):
"""Get the host-specific version for a compile tool """Get the host-specific version for a compile tool
This checks the environment variables that specify which version of This checks the environment variables that specify which version of
@ -244,7 +244,7 @@ def GetHostCompileTool(name):
return host_name, extra_args return host_name, extra_args
return name, [] return name, []
def GetTargetCompileTool(name, cross_compile=None): def get_target_compile_tool(name, cross_compile=None):
"""Get the target-specific version for a compile tool """Get the target-specific version for a compile tool
This first checks the environment variables that specify which This first checks the environment variables that specify which
@ -298,7 +298,7 @@ def GetTargetCompileTool(name, cross_compile=None):
target_name = cross_compile + name target_name = cross_compile + name
elif name == 'ld': elif name == 'ld':
try: try:
if Run(cross_compile + 'ld.bfd', '-v'): if run(cross_compile + 'ld.bfd', '-v'):
target_name = cross_compile + 'ld.bfd' target_name = cross_compile + 'ld.bfd'
except: except:
target_name = cross_compile + 'ld' target_name = cross_compile + 'ld'
@ -353,10 +353,10 @@ def run_result(name, *args, **kwargs):
raise_on_error = kwargs.get('raise_on_error', True) raise_on_error = kwargs.get('raise_on_error', True)
env = get_env_with_path() env = get_env_with_path()
if for_target: if for_target:
name, extra_args = GetTargetCompileTool(name) name, extra_args = get_target_compile_tool(name)
args = tuple(extra_args) + args args = tuple(extra_args) + args
elif for_host: elif for_host:
name, extra_args = GetHostCompileTool(name) name, extra_args = get_host_compile_tool(name)
args = tuple(extra_args) + args args = tuple(extra_args) + args
name = os.path.expanduser(name) # Expand paths containing ~ name = os.path.expanduser(name) # Expand paths containing ~
all_args = (name,) + args all_args = (name,) + args
@ -369,7 +369,7 @@ def run_result(name, *args, **kwargs):
result.stderr or result.stdout)) result.stderr or result.stdout))
return result return result
except ValueError: except ValueError:
if env and not PathHasFile(env['PATH'], name): if env and not path_has_file(env['PATH'], name):
msg = "Please install tool '%s'" % name msg = "Please install tool '%s'" % name
package = packages.get(name) package = packages.get(name)
if package: if package:
@ -380,7 +380,7 @@ def run_result(name, *args, **kwargs):
def tool_find(name): def tool_find(name):
"""Search the current path for a tool """Search the current path for a tool
This uses both PATH and any value from SetToolPaths() to search for a tool This uses both PATH and any value from set_tool_paths() to search for a tool
Args: Args:
name (str): Name of tool to locate name (str): Name of tool to locate
@ -400,7 +400,7 @@ def tool_find(name):
if os.path.isfile(fname) and os.access(fname, os.X_OK): if os.path.isfile(fname) and os.access(fname, os.X_OK):
return fname return fname
def Run(name, *args, **kwargs): def run(name, *args, **kwargs):
"""Run a tool with some arguments """Run a tool with some arguments
This runs a 'tool', which is a program used by binman to process files and This runs a 'tool', which is a program used by binman to process files and
@ -421,7 +421,7 @@ def Run(name, *args, **kwargs):
if result is not None: if result is not None:
return result.stdout return result.stdout
def Filename(fname): def filename(fname):
"""Resolve a file path to an absolute path. """Resolve a file path to an absolute path.
If fname starts with ##/ and chroot is available, ##/ gets replaced with If fname starts with ##/ and chroot is available, ##/ gets replaced with
@ -455,7 +455,7 @@ def Filename(fname):
# If not found, just return the standard, unchanged path # If not found, just return the standard, unchanged path
return fname return fname
def ReadFile(fname, binary=True): def read_file(fname, binary=True):
"""Read and return the contents of a file. """Read and return the contents of a file.
Args: Args:
@ -464,13 +464,13 @@ def ReadFile(fname, binary=True):
Returns: Returns:
data read from file, as a string. data read from file, as a string.
""" """
with open(Filename(fname), binary and 'rb' or 'r') as fd: with open(filename(fname), binary and 'rb' or 'r') as fd:
data = fd.read() data = fd.read()
#self._out.Info("Read file '%s' size %d (%#0x)" % #self._out.Info("Read file '%s' size %d (%#0x)" %
#(fname, len(data), len(data))) #(fname, len(data), len(data)))
return data return data
def WriteFile(fname, data, binary=True): def write_file(fname, data, binary=True):
"""Write data into a file. """Write data into a file.
Args: Args:
@ -479,10 +479,10 @@ def WriteFile(fname, data, binary=True):
""" """
#self._out.Info("Write file '%s' size %d (%#0x)" % #self._out.Info("Write file '%s' size %d (%#0x)" %
#(fname, len(data), len(data))) #(fname, len(data), len(data)))
with open(Filename(fname), binary and 'wb' or 'w') as fd: with open(filename(fname), binary and 'wb' or 'w') as fd:
fd.write(data) fd.write(data)
def GetBytes(byte, size): def get_bytes(byte, size):
"""Get a string of bytes of a given size """Get a string of bytes of a given size
Args: Args:
@ -494,7 +494,7 @@ def GetBytes(byte, size):
""" """
return bytes([byte]) * size return bytes([byte]) * size
def ToBytes(string): def to_bytes(string):
"""Convert a str type into a bytes type """Convert a str type into a bytes type
Args: Args:
@ -505,7 +505,7 @@ def ToBytes(string):
""" """
return string.encode('utf-8') return string.encode('utf-8')
def ToString(bval): def to_string(bval):
"""Convert a bytes type into a str type """Convert a bytes type into a str type
Args: Args:
@ -517,7 +517,7 @@ def ToString(bval):
""" """
return bval.decode('utf-8') return bval.decode('utf-8')
def ToHex(val): def to_hex(val):
"""Convert an integer value (or None) to a string """Convert an integer value (or None) to a string
Returns: Returns:
@ -525,7 +525,7 @@ def ToHex(val):
""" """
return 'None' if val is None else '%#x' % val return 'None' if val is None else '%#x' % val
def ToHexSize(val): def to_hex_size(val):
"""Return the size of an object in hex """Return the size of an object in hex
Returns: Returns:
@ -533,7 +533,7 @@ def ToHexSize(val):
""" """
return 'None' if val is None else '%#x' % len(val) return 'None' if val is None else '%#x' % len(val)
def PrintFullHelp(fname): def print_full_help(fname):
"""Print the full help message for a tool using an appropriate pager. """Print the full help message for a tool using an appropriate pager.
Args: Args:
@ -547,7 +547,7 @@ def PrintFullHelp(fname):
pager = ['more'] pager = ['more']
command.Run(*pager, fname) command.Run(*pager, fname)
def Download(url, tmpdir_pattern='.patman'): def download(url, tmpdir_pattern='.patman'):
"""Download a file to a temporary directory """Download a file to a temporary directory
Args: Args: