mirror of
https://github.com/smaeul/u-boot.git
synced 2025-11-05 06:20:16 +00:00
binman fixes for options, etc.
binman template fixes / tweaks -----BEGIN PGP SIGNATURE----- iQFFBAABCgAvFiEEslwAIq+Gp8wWVbYnfxc6PpAIreYFAmTKnFARHHNqZ0BjaHJv bWl1bS5vcmcACgkQfxc6PpAIreaT/wgA22K1THEDBCi6Hk8rd918KM0YRfuxI/68 gV5udfMw6jlIZc9M63QsSDOlMi0UBgZPdJisY9nlsJkw3TiRA6o08FE9i/oZFbCz LWG49OYhc1P9qXOBdrjCoUKPgTmTk+miK67cHZQBAa5G5htlhWFKcalz/bvceAhH Nh/fHppo/QS24z6jIduT5YtNNWSDkWERTnU4DrfbJLUtK6wtlxHAKoHU77V1jhUZ vGxgStv2kl3cq1a4uT2KeRI+XqaxuExWTk/B7sN98isA0m9U5BBvGF1DNatUtHk+ ub6jdU++gZaZ/3EcHgSuxkQjI5Trflpz4k24fg/AquYmjbbNMGzhQA== =r+aH -----END PGP SIGNATURE----- Merge tag 'dm-pull-2aug23' of https://source.denx.de/u-boot/custodians/u-boot-dm binman fixes for options, etc. binman template fixes / tweaks
This commit is contained in:
commit
35e6c89b76
8
Makefile
8
Makefile
@ -1328,13 +1328,18 @@ u-boot.ldr: u-boot
|
|||||||
# Use 'make BINMAN_VERBOSE=3' to set vebosity level
|
# Use 'make BINMAN_VERBOSE=3' to set vebosity level
|
||||||
default_dt := $(if $(DEVICE_TREE),$(DEVICE_TREE),$(CONFIG_DEFAULT_DEVICE_TREE))
|
default_dt := $(if $(DEVICE_TREE),$(DEVICE_TREE),$(CONFIG_DEFAULT_DEVICE_TREE))
|
||||||
|
|
||||||
|
# Temporary workaround for Venice boards
|
||||||
|
ifneq ($(CONFIG_TARGET_IMX8MM_VENICE),$(CONFIG_TARGET_IMX8MN_VENICE),$(CONFIG_TARGET_IMX8MP_VENICE),)
|
||||||
|
ignore_dups := --ignore-dup-phandles
|
||||||
|
endif
|
||||||
|
|
||||||
quiet_cmd_binman = BINMAN $@
|
quiet_cmd_binman = BINMAN $@
|
||||||
cmd_binman = $(srctree)/tools/binman/binman $(if $(BINMAN_DEBUG),-D) \
|
cmd_binman = $(srctree)/tools/binman/binman $(if $(BINMAN_DEBUG),-D) \
|
||||||
$(foreach f,$(BINMAN_TOOLPATHS),--toolpath $(f)) \
|
$(foreach f,$(BINMAN_TOOLPATHS),--toolpath $(f)) \
|
||||||
--toolpath $(objtree)/tools \
|
--toolpath $(objtree)/tools \
|
||||||
$(if $(BINMAN_VERBOSE),-v$(BINMAN_VERBOSE)) \
|
$(if $(BINMAN_VERBOSE),-v$(BINMAN_VERBOSE)) \
|
||||||
build -u -d u-boot.dtb -O . -m \
|
build -u -d u-boot.dtb -O . -m \
|
||||||
$(if $(BINMAN_ALLOW_MISSING),--allow-missing --ignore-missing) \
|
--allow-missing $(if $(BINMAN_ALLOW_MISSING),--ignore-missing) \
|
||||||
-I . -I $(srctree) -I $(srctree)/board/$(BOARDDIR) \
|
-I . -I $(srctree) -I $(srctree)/board/$(BOARDDIR) \
|
||||||
-I arch/$(ARCH)/dts -a of-list=$(CONFIG_OF_LIST) \
|
-I arch/$(ARCH)/dts -a of-list=$(CONFIG_OF_LIST) \
|
||||||
$(foreach f,$(BINMAN_INDIRS),-I $(f)) \
|
$(foreach f,$(BINMAN_INDIRS),-I $(f)) \
|
||||||
@ -1349,6 +1354,7 @@ cmd_binman = $(srctree)/tools/binman/binman $(if $(BINMAN_DEBUG),-D) \
|
|||||||
-a spl-dtb=$(CONFIG_SPL_OF_REAL) \
|
-a spl-dtb=$(CONFIG_SPL_OF_REAL) \
|
||||||
-a tpl-dtb=$(CONFIG_TPL_OF_REAL) \
|
-a tpl-dtb=$(CONFIG_TPL_OF_REAL) \
|
||||||
-a pre-load-key-path=${PRE_LOAD_KEY_PATH} \
|
-a pre-load-key-path=${PRE_LOAD_KEY_PATH} \
|
||||||
|
$(ignore_dups) \
|
||||||
$(BINMAN_$(@F))
|
$(BINMAN_$(@F))
|
||||||
|
|
||||||
OBJCOPYFLAGS_u-boot.ldr.hex := -I binary -O ihex
|
OBJCOPYFLAGS_u-boot.ldr.hex := -I binary -O ihex
|
||||||
|
|||||||
@ -216,7 +216,7 @@ void bootdev_list(bool probe)
|
|||||||
for (i = 0; dev; i++) {
|
for (i = 0; dev; i++) {
|
||||||
printf("%3x [ %c ] %6s %-9.9s %s\n", dev_seq(dev),
|
printf("%3x [ %c ] %6s %-9.9s %s\n", dev_seq(dev),
|
||||||
device_active(dev) ? '+' : ' ',
|
device_active(dev) ? '+' : ' ',
|
||||||
ret ? simple_itoa(ret) : "OK",
|
ret ? simple_itoa(-ret) : "OK",
|
||||||
dev_get_uclass_name(dev_get_parent(dev)), dev->name);
|
dev_get_uclass_name(dev_get_parent(dev)), dev->name);
|
||||||
if (probe)
|
if (probe)
|
||||||
ret = uclass_next_device_check(&dev);
|
ret = uclass_next_device_check(&dev);
|
||||||
|
|||||||
@ -99,7 +99,7 @@ static int do_bootdev_info(struct cmd_tbl *cmdtp, int flag, int argc,
|
|||||||
|
|
||||||
printf("Name: %s\n", dev->name);
|
printf("Name: %s\n", dev->name);
|
||||||
printf("Sequence: %d\n", dev_seq(dev));
|
printf("Sequence: %d\n", dev_seq(dev));
|
||||||
printf("Status: %s\n", ret ? simple_itoa(ret) : device_active(dev) ?
|
printf("Status: %s\n", ret ? simple_itoa(-ret) : device_active(dev) ?
|
||||||
"Probed" : "OK");
|
"Probed" : "OK");
|
||||||
printf("Uclass: %s\n", dev_get_uclass_name(dev_get_parent(dev)));
|
printf("Uclass: %s\n", dev_get_uclass_name(dev_get_parent(dev)));
|
||||||
printf("Bootflows: %d (%d valid)\n", i, num_valid);
|
printf("Bootflows: %d (%d valid)\n", i, num_valid);
|
||||||
|
|||||||
@ -1256,8 +1256,32 @@ Properties in the template node are inserted into the destination node if they
|
|||||||
do not exist there. In the example above, `some-property` is added to each of
|
do not exist there. In the example above, `some-property` is added to each of
|
||||||
`spi-image` and `mmc-image`.
|
`spi-image` and `mmc-image`.
|
||||||
|
|
||||||
Note that template nodes are not removed from the binman description at present.
|
Note that template nodes are removed from the binman description after
|
||||||
|
processing and before binman builds the image descriptions.
|
||||||
|
|
||||||
|
The initial devicetree produced by the templating process is written to the
|
||||||
|
`u-boot.dtb.tmpl1` file. This can be useful to see what is going on if there is
|
||||||
|
a failure before the final `u-boot.dtb.out` file is written. A second
|
||||||
|
`u-boot.dtb.tmpl2` file is written when the templates themselves are removed.
|
||||||
|
|
||||||
|
Dealing with phandles
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
Templates can contain phandles and these are copied to the destination node.
|
||||||
|
However this should be used with care, since if a template is instantiated twice
|
||||||
|
then the phandle will be copied twice, resulting in a devicetree with duplicate
|
||||||
|
phandles, i.e. the same phandle used by two different nodes. Binman detects this
|
||||||
|
situation and produces an error, for example::
|
||||||
|
|
||||||
|
Duplicate phandle 1 in nodes /binman/image/fit/images/atf/atf-bl31 and
|
||||||
|
/binman/image-2/fit/images/atf/atf-bl31
|
||||||
|
|
||||||
|
In this case an atf-bl31 node containing a phandle has been copied into two
|
||||||
|
different target nodes, resulting in the same phandle for each. See
|
||||||
|
testTemplatePhandleDup() for the test case.
|
||||||
|
|
||||||
|
The solution is typically to put the phandles in the corresponding target nodes
|
||||||
|
(one for each) and remove the phandle from the template.
|
||||||
|
|
||||||
Updating an ELF file
|
Updating an ELF file
|
||||||
====================
|
====================
|
||||||
|
|||||||
@ -126,6 +126,8 @@ controlled by a description in the board device tree.'''
|
|||||||
help='Comma-separated list of bintools to consider missing (for testing)')
|
help='Comma-separated list of bintools to consider missing (for testing)')
|
||||||
build_parser.add_argument('-i', '--image', type=str, action='append',
|
build_parser.add_argument('-i', '--image', type=str, action='append',
|
||||||
help='Image filename to build (if not specified, build all)')
|
help='Image filename to build (if not specified, build all)')
|
||||||
|
build_parser.add_argument('--ignore-dup-phandles', action='store_true',
|
||||||
|
help='Temporary option to ignore duplicate phandles')
|
||||||
build_parser.add_argument('-I', '--indir', action='append',
|
build_parser.add_argument('-I', '--indir', action='append',
|
||||||
help='Add a path to the list of directories to use for input files')
|
help='Add a path to the list of directories to use for input files')
|
||||||
build_parser.add_argument('-m', '--map', action='store_true',
|
build_parser.add_argument('-m', '--map', action='store_true',
|
||||||
|
|||||||
@ -22,6 +22,7 @@ from binman import bintool
|
|||||||
from binman import cbfs_util
|
from binman import cbfs_util
|
||||||
from binman import elf
|
from binman import elf
|
||||||
from binman import entry
|
from binman import entry
|
||||||
|
from dtoc import fdt
|
||||||
from dtoc import fdt_util
|
from dtoc import fdt_util
|
||||||
from u_boot_pylib import command
|
from u_boot_pylib import command
|
||||||
from u_boot_pylib import tools
|
from u_boot_pylib import tools
|
||||||
@ -57,7 +58,7 @@ def _ReadImageDesc(binman_node, use_expanded):
|
|||||||
images = OrderedDict()
|
images = OrderedDict()
|
||||||
if 'multiple-images' in binman_node.props:
|
if 'multiple-images' in binman_node.props:
|
||||||
for node in binman_node.subnodes:
|
for node in binman_node.subnodes:
|
||||||
if 'template' not in node.name:
|
if not node.name.startswith('template'):
|
||||||
images[node.name] = Image(node.name, node,
|
images[node.name] = Image(node.name, node,
|
||||||
use_expanded=use_expanded)
|
use_expanded=use_expanded)
|
||||||
else:
|
else:
|
||||||
@ -112,12 +113,13 @@ def _ReadMissingBlobHelp():
|
|||||||
_FinishTag(tag, msg, result)
|
_FinishTag(tag, msg, result)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def _ShowBlobHelp(path, text):
|
def _ShowBlobHelp(level, path, text, fname):
|
||||||
tout.warning('\n%s:' % path)
|
tout.do_output(level, '%s (%s):' % (path, fname))
|
||||||
for line in text.splitlines():
|
for line in text.splitlines():
|
||||||
tout.warning(' %s' % line)
|
tout.do_output(level, ' %s' % line)
|
||||||
|
tout.do_output(level, '')
|
||||||
|
|
||||||
def _ShowHelpForMissingBlobs(missing_list):
|
def _ShowHelpForMissingBlobs(level, missing_list):
|
||||||
"""Show help for each missing blob to help the user take action
|
"""Show help for each missing blob to help the user take action
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -132,10 +134,17 @@ def _ShowHelpForMissingBlobs(missing_list):
|
|||||||
tags = entry.GetHelpTags()
|
tags = entry.GetHelpTags()
|
||||||
|
|
||||||
# Show the first match help message
|
# Show the first match help message
|
||||||
|
shown_help = False
|
||||||
for tag in tags:
|
for tag in tags:
|
||||||
if tag in missing_blob_help:
|
if tag in missing_blob_help:
|
||||||
_ShowBlobHelp(entry._node.path, missing_blob_help[tag])
|
_ShowBlobHelp(level, entry._node.path, missing_blob_help[tag],
|
||||||
|
entry.GetDefaultFilename())
|
||||||
|
shown_help = True
|
||||||
break
|
break
|
||||||
|
# Or a generic help message
|
||||||
|
if not shown_help:
|
||||||
|
_ShowBlobHelp(level, entry._node.path, "Missing blob",
|
||||||
|
entry.GetDefaultFilename())
|
||||||
|
|
||||||
def GetEntryModules(include_testing=True):
|
def GetEntryModules(include_testing=True):
|
||||||
"""Get a set of entry class implementations
|
"""Get a set of entry class implementations
|
||||||
@ -486,6 +495,9 @@ def _ProcessTemplates(parent):
|
|||||||
Args:
|
Args:
|
||||||
parent: Binman node to process (typically /binman)
|
parent: Binman node to process (typically /binman)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if any templates were processed
|
||||||
|
|
||||||
Search though each target node looking for those with an 'insert-template'
|
Search though each target node looking for those with an 'insert-template'
|
||||||
property. Use that as a list of references to template nodes to use to
|
property. Use that as a list of references to template nodes to use to
|
||||||
adjust the target node.
|
adjust the target node.
|
||||||
@ -498,11 +510,22 @@ def _ProcessTemplates(parent):
|
|||||||
|
|
||||||
See 'Templates' in the Binman documnentation for details.
|
See 'Templates' in the Binman documnentation for details.
|
||||||
"""
|
"""
|
||||||
|
found = False
|
||||||
for node in parent.subnodes:
|
for node in parent.subnodes:
|
||||||
tmpl = fdt_util.GetPhandleList(node, 'insert-template')
|
tmpl = fdt_util.GetPhandleList(node, 'insert-template')
|
||||||
if tmpl:
|
if tmpl:
|
||||||
node.copy_subnodes_from_phandles(tmpl)
|
node.copy_subnodes_from_phandles(tmpl)
|
||||||
_ProcessTemplates(node)
|
found = True
|
||||||
|
|
||||||
|
found |= _ProcessTemplates(node)
|
||||||
|
return found
|
||||||
|
|
||||||
|
def _RemoveTemplates(parent):
|
||||||
|
"""Remove any templates in the binman description
|
||||||
|
"""
|
||||||
|
for node in parent.subnodes:
|
||||||
|
if node.name.startswith('template'):
|
||||||
|
node.Delete()
|
||||||
|
|
||||||
def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded):
|
def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded):
|
||||||
"""Prepare the images to be processed and select the device tree
|
"""Prepare the images to be processed and select the device tree
|
||||||
@ -546,7 +569,19 @@ def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded):
|
|||||||
raise ValueError("Device tree '%s' does not have a 'binman' "
|
raise ValueError("Device tree '%s' does not have a 'binman' "
|
||||||
"node" % dtb_fname)
|
"node" % dtb_fname)
|
||||||
|
|
||||||
_ProcessTemplates(node)
|
if _ProcessTemplates(node):
|
||||||
|
dtb.Sync(True)
|
||||||
|
fname = tools.get_output_filename('u-boot.dtb.tmpl1')
|
||||||
|
tools.write_file(fname, dtb.GetContents())
|
||||||
|
|
||||||
|
_RemoveTemplates(node)
|
||||||
|
dtb.Sync(True)
|
||||||
|
|
||||||
|
# Rescan the dtb to pick up the new phandles
|
||||||
|
dtb.Scan()
|
||||||
|
node = _FindBinmanNode(dtb)
|
||||||
|
fname = tools.get_output_filename('u-boot.dtb.tmpl2')
|
||||||
|
tools.write_file(fname, dtb.GetContents())
|
||||||
|
|
||||||
images = _ReadImageDesc(node, use_expanded)
|
images = _ReadImageDesc(node, use_expanded)
|
||||||
|
|
||||||
@ -658,15 +693,15 @@ def ProcessImage(image, update_fdt, write_map, get_contents=True,
|
|||||||
missing_list = []
|
missing_list = []
|
||||||
image.CheckMissing(missing_list)
|
image.CheckMissing(missing_list)
|
||||||
if missing_list:
|
if missing_list:
|
||||||
tout.warning("Image '%s' is missing external blobs and is non-functional: %s" %
|
tout.error("Image '%s' is missing external blobs and is non-functional: %s\n" %
|
||||||
(image.name, ' '.join([e.name for e in missing_list])))
|
(image.name, ' '.join([e.name for e in missing_list])))
|
||||||
_ShowHelpForMissingBlobs(missing_list)
|
_ShowHelpForMissingBlobs(tout.ERROR, missing_list)
|
||||||
|
|
||||||
faked_list = []
|
faked_list = []
|
||||||
image.CheckFakedBlobs(faked_list)
|
image.CheckFakedBlobs(faked_list)
|
||||||
if faked_list:
|
if faked_list:
|
||||||
tout.warning(
|
tout.warning(
|
||||||
"Image '%s' has faked external blobs and is non-functional: %s" %
|
"Image '%s' has faked external blobs and is non-functional: %s\n" %
|
||||||
(image.name, ' '.join([os.path.basename(e.GetDefaultFilename())
|
(image.name, ' '.join([os.path.basename(e.GetDefaultFilename())
|
||||||
for e in faked_list])))
|
for e in faked_list])))
|
||||||
|
|
||||||
@ -674,15 +709,15 @@ def ProcessImage(image, update_fdt, write_map, get_contents=True,
|
|||||||
image.CheckOptional(optional_list)
|
image.CheckOptional(optional_list)
|
||||||
if optional_list:
|
if optional_list:
|
||||||
tout.warning(
|
tout.warning(
|
||||||
"Image '%s' is missing external blobs but is still functional: %s" %
|
"Image '%s' is missing optional external blobs but is still functional: %s\n" %
|
||||||
(image.name, ' '.join([e.name for e in optional_list])))
|
(image.name, ' '.join([e.name for e in optional_list])))
|
||||||
_ShowHelpForMissingBlobs(optional_list)
|
_ShowHelpForMissingBlobs(tout.WARNING, optional_list)
|
||||||
|
|
||||||
missing_bintool_list = []
|
missing_bintool_list = []
|
||||||
image.check_missing_bintools(missing_bintool_list)
|
image.check_missing_bintools(missing_bintool_list)
|
||||||
if missing_bintool_list:
|
if missing_bintool_list:
|
||||||
tout.warning(
|
tout.warning(
|
||||||
"Image '%s' has missing bintools and is non-functional: %s" %
|
"Image '%s' has missing bintools and is non-functional: %s\n" %
|
||||||
(image.name, ' '.join([os.path.basename(bintool.name)
|
(image.name, ' '.join([os.path.basename(bintool.name)
|
||||||
for bintool in missing_bintool_list])))
|
for bintool in missing_bintool_list])))
|
||||||
return any([missing_list, faked_list, missing_bintool_list])
|
return any([missing_list, faked_list, missing_bintool_list])
|
||||||
@ -782,6 +817,10 @@ def Binman(args):
|
|||||||
cbfs_util.VERBOSE = args.verbosity > 2
|
cbfs_util.VERBOSE = args.verbosity > 2
|
||||||
state.use_fake_dtb = args.fake_dtb
|
state.use_fake_dtb = args.fake_dtb
|
||||||
|
|
||||||
|
# Temporary hack
|
||||||
|
if args.ignore_dup_phandles: # pragma: no cover
|
||||||
|
fdt.IGNORE_DUP_PHANDLES = True
|
||||||
|
|
||||||
# Normally we replace the 'u-boot' etype with 'u-boot-expanded', etc.
|
# Normally we replace the 'u-boot' etype with 'u-boot-expanded', etc.
|
||||||
# When running tests this can be disabled using this flag. When not
|
# When running tests this can be disabled using this flag. When not
|
||||||
# updating the FDT in image, it is not needed by binman, but we use it
|
# updating the FDT in image, it is not needed by binman, but we use it
|
||||||
@ -827,7 +866,7 @@ def Binman(args):
|
|||||||
# This can only be True if -M is provided, since otherwise binman
|
# This can only be True if -M is provided, since otherwise binman
|
||||||
# would have raised an error already
|
# would have raised an error already
|
||||||
if invalid:
|
if invalid:
|
||||||
msg = '\nSome images are invalid'
|
msg = 'Some images are invalid'
|
||||||
if args.ignore_missing:
|
if args.ignore_missing:
|
||||||
tout.warning(msg)
|
tout.warning(msg)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@ -447,13 +447,15 @@ def DecodeElf(data, location):
|
|||||||
Returns:
|
Returns:
|
||||||
ElfInfo object containing information about the decoded ELF file
|
ElfInfo object containing information about the decoded ELF file
|
||||||
"""
|
"""
|
||||||
|
if not ELF_TOOLS:
|
||||||
|
raise ValueError("Python: No module named 'elftools'")
|
||||||
file_size = len(data)
|
file_size = len(data)
|
||||||
with io.BytesIO(data) as fd:
|
with io.BytesIO(data) as fd:
|
||||||
elf = ELFFile(fd)
|
elf = ELFFile(fd)
|
||||||
data_start = 0xffffffff;
|
data_start = 0xffffffff
|
||||||
data_end = 0;
|
data_end = 0
|
||||||
mem_end = 0;
|
mem_end = 0
|
||||||
virt_to_phys = 0;
|
virt_to_phys = 0
|
||||||
|
|
||||||
for i in range(elf.num_segments()):
|
for i in range(elf.num_segments()):
|
||||||
segment = elf.get_segment(i)
|
segment = elf.get_segment(i)
|
||||||
|
|||||||
@ -255,7 +255,19 @@ class TestElf(unittest.TestCase):
|
|||||||
fname = self.ElfTestFile('embed_data')
|
fname = self.ElfTestFile('embed_data')
|
||||||
with self.assertRaises(ValueError) as e:
|
with self.assertRaises(ValueError) as e:
|
||||||
elf.GetSymbolFileOffset(fname, ['embed_start', 'embed_end'])
|
elf.GetSymbolFileOffset(fname, ['embed_start', 'embed_end'])
|
||||||
self.assertIn("Python: No module named 'elftools'",
|
with self.assertRaises(ValueError) as e:
|
||||||
|
elf.DecodeElf(tools.read_file(fname), 0xdeadbeef)
|
||||||
|
with self.assertRaises(ValueError) as e:
|
||||||
|
elf.GetFileOffset(fname, 0xdeadbeef)
|
||||||
|
with self.assertRaises(ValueError) as e:
|
||||||
|
elf.GetSymbolFromAddress(fname, 0xdeadbeef)
|
||||||
|
with self.assertRaises(ValueError) as e:
|
||||||
|
entry = FakeEntry(10)
|
||||||
|
section = FakeSection()
|
||||||
|
elf.LookupAndWriteSymbols(fname, entry, section, True)
|
||||||
|
|
||||||
|
self.assertIn(
|
||||||
|
"Section 'section_path': entry 'entry_path': Cannot write symbols to an ELF file without Python elftools",
|
||||||
str(e.exception))
|
str(e.exception))
|
||||||
finally:
|
finally:
|
||||||
elf.ELF_TOOLS = old_val
|
elf.ELF_TOOLS = old_val
|
||||||
|
|||||||
@ -842,6 +842,13 @@ class Entry_fit(Entry_section):
|
|||||||
for entry in self._priv_entries.values():
|
for entry in self._priv_entries.values():
|
||||||
entry.CheckMissing(missing_list)
|
entry.CheckMissing(missing_list)
|
||||||
|
|
||||||
|
def CheckOptional(self, optional_list):
|
||||||
|
# We must use our private entry list for this since generator nodes
|
||||||
|
# which are removed from self._entries will otherwise not show up as
|
||||||
|
# optional
|
||||||
|
for entry in self._priv_entries.values():
|
||||||
|
entry.CheckOptional(optional_list)
|
||||||
|
|
||||||
def CheckEntries(self):
|
def CheckEntries(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@ -3806,6 +3806,7 @@ class TestFunctional(unittest.TestCase):
|
|||||||
allow_missing=True)
|
allow_missing=True)
|
||||||
self.assertEqual(103, ret)
|
self.assertEqual(103, ret)
|
||||||
err = stderr.getvalue()
|
err = stderr.getvalue()
|
||||||
|
self.assertIn('(missing-file)', err)
|
||||||
self.assertRegex(err, "Image 'image'.*missing.*: blob-ext")
|
self.assertRegex(err, "Image 'image'.*missing.*: blob-ext")
|
||||||
self.assertIn('Some images are invalid', err)
|
self.assertIn('Some images are invalid', err)
|
||||||
|
|
||||||
@ -3816,6 +3817,7 @@ class TestFunctional(unittest.TestCase):
|
|||||||
allow_missing=True, ignore_missing=True)
|
allow_missing=True, ignore_missing=True)
|
||||||
self.assertEqual(0, ret)
|
self.assertEqual(0, ret)
|
||||||
err = stderr.getvalue()
|
err = stderr.getvalue()
|
||||||
|
self.assertIn('(missing-file)', err)
|
||||||
self.assertRegex(err, "Image 'image'.*missing.*: blob-ext")
|
self.assertRegex(err, "Image 'image'.*missing.*: blob-ext")
|
||||||
self.assertIn('Some images are invalid', err)
|
self.assertIn('Some images are invalid', err)
|
||||||
|
|
||||||
@ -6358,6 +6360,13 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
|
|||||||
fdt_util.fdt32_to_cpu(node.props['entry'].value))
|
fdt_util.fdt32_to_cpu(node.props['entry'].value))
|
||||||
self.assertEqual(U_BOOT_DATA, node.props['data'].bytes)
|
self.assertEqual(U_BOOT_DATA, node.props['data'].bytes)
|
||||||
|
|
||||||
|
with test_util.capture_sys_output() as (stdout, stderr):
|
||||||
|
self.checkFitTee('264_tee_os_opt_fit.dts', '')
|
||||||
|
err = stderr.getvalue()
|
||||||
|
self.assertRegex(
|
||||||
|
err,
|
||||||
|
"Image '.*' is missing optional external blobs but is still functional: tee-os")
|
||||||
|
|
||||||
def testFitTeeOsOptionalFitBad(self):
|
def testFitTeeOsOptionalFitBad(self):
|
||||||
"""Test an image with a FIT with an optional OP-TEE binary"""
|
"""Test an image with a FIT with an optional OP-TEE binary"""
|
||||||
with self.assertRaises(ValueError) as exc:
|
with self.assertRaises(ValueError) as exc:
|
||||||
@ -6390,7 +6399,7 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
|
|||||||
err = stderr.getvalue()
|
err = stderr.getvalue()
|
||||||
self.assertRegex(
|
self.assertRegex(
|
||||||
err,
|
err,
|
||||||
"Image '.*' is missing external blobs but is still functional: missing")
|
"Image '.*' is missing optional external blobs but is still functional: missing")
|
||||||
|
|
||||||
def testSectionInner(self):
|
def testSectionInner(self):
|
||||||
"""Test an inner section with a size"""
|
"""Test an inner section with a size"""
|
||||||
@ -6853,6 +6862,22 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
|
|||||||
second = U_BOOT_DATA + b'#' + VGA_DATA + U_BOOT_DTB_DATA
|
second = U_BOOT_DATA + b'#' + VGA_DATA + U_BOOT_DTB_DATA
|
||||||
self.assertEqual(U_BOOT_IMG_DATA + first + second, data)
|
self.assertEqual(U_BOOT_IMG_DATA + first + second, data)
|
||||||
|
|
||||||
|
dtb_fname1 = tools.get_output_filename('u-boot.dtb.tmpl1')
|
||||||
|
self.assertTrue(os.path.exists(dtb_fname1))
|
||||||
|
dtb = fdt.Fdt.FromData(tools.read_file(dtb_fname1))
|
||||||
|
dtb.Scan()
|
||||||
|
node1 = dtb.GetNode('/binman/template')
|
||||||
|
self.assertTrue(node1)
|
||||||
|
vga = dtb.GetNode('/binman/first/intel-vga')
|
||||||
|
self.assertTrue(vga)
|
||||||
|
|
||||||
|
dtb_fname2 = tools.get_output_filename('u-boot.dtb.tmpl2')
|
||||||
|
self.assertTrue(os.path.exists(dtb_fname2))
|
||||||
|
dtb2 = fdt.Fdt.FromData(tools.read_file(dtb_fname2))
|
||||||
|
dtb2.Scan()
|
||||||
|
node2 = dtb2.GetNode('/binman/template')
|
||||||
|
self.assertFalse(node2)
|
||||||
|
|
||||||
def testTemplateBlobMulti(self):
|
def testTemplateBlobMulti(self):
|
||||||
"""Test using a template with 'multiple-images' enabled"""
|
"""Test using a template with 'multiple-images' enabled"""
|
||||||
TestFunctional._MakeInputFile('my-blob.bin', b'blob')
|
TestFunctional._MakeInputFile('my-blob.bin', b'blob')
|
||||||
@ -6944,6 +6969,33 @@ fdt fdtmap Extract the devicetree blob from the fdtmap
|
|||||||
# Move to next
|
# Move to next
|
||||||
spl_data = content[:0x18]
|
spl_data = content[:0x18]
|
||||||
|
|
||||||
|
def testTemplatePhandle(self):
|
||||||
|
"""Test using a template in a node containing a phandle"""
|
||||||
|
entry_args = {
|
||||||
|
'atf-bl31-path': 'bl31.elf',
|
||||||
|
}
|
||||||
|
data = self._DoReadFileDtb('291_template_phandle.dts',
|
||||||
|
entry_args=entry_args)
|
||||||
|
fname = tools.get_output_filename('image.bin')
|
||||||
|
out = tools.run('dumpimage', '-l', fname)
|
||||||
|
|
||||||
|
# We should see the FIT description and one for each of the two images
|
||||||
|
lines = out.splitlines()
|
||||||
|
descs = [line.split()[-1] for line in lines if 'escription' in line]
|
||||||
|
self.assertEqual(['test-desc', 'atf', 'fdt'], descs)
|
||||||
|
|
||||||
|
def testTemplatePhandleDup(self):
|
||||||
|
"""Test using a template in a node containing a phandle"""
|
||||||
|
entry_args = {
|
||||||
|
'atf-bl31-path': 'bl31.elf',
|
||||||
|
}
|
||||||
|
with self.assertRaises(ValueError) as e:
|
||||||
|
self._DoReadFileDtb('292_template_phandle_dup.dts',
|
||||||
|
entry_args=entry_args)
|
||||||
|
self.assertIn(
|
||||||
|
'Duplicate phandle 1 in nodes /binman/image/fit/images/atf/atf-bl31 and /binman/image-2/fit/images/atf/atf-bl31',
|
||||||
|
str(e.exception))
|
||||||
|
|
||||||
def testTIBoardConfig(self):
|
def testTIBoardConfig(self):
|
||||||
"""Test that a schema validated board config file can be generated"""
|
"""Test that a schema validated board config file can be generated"""
|
||||||
data = self._DoReadFile('293_ti_board_cfg.dts')
|
data = self._DoReadFile('293_ti_board_cfg.dts')
|
||||||
|
|||||||
@ -43,7 +43,7 @@ for the external TPL binary is https://github.com/rockchip-linux/rkbin.
|
|||||||
|
|
||||||
tee-os:
|
tee-os:
|
||||||
See the documentation for your board. You may need to build Open Portable
|
See the documentation for your board. You may need to build Open Portable
|
||||||
Trusted Execution Environment (OP-TEE) with TEE=/path/to/tee.bin
|
Trusted Execution Environment (OP-TEE) and build with TEE=/path/to/tee.bin
|
||||||
|
|
||||||
opensbi:
|
opensbi:
|
||||||
See the documentation for your board. The OpenSBI git repo is at
|
See the documentation for your board. The OpenSBI git repo is at
|
||||||
|
|||||||
@ -25,6 +25,7 @@
|
|||||||
fit,data;
|
fit,data;
|
||||||
|
|
||||||
tee-os {
|
tee-os {
|
||||||
|
optional;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|||||||
51
tools/binman/test/291_template_phandle.dts
Normal file
51
tools/binman/test/291_template_phandle.dts
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
// SPDX-License-Identifier: GPL-2.0+
|
||||||
|
|
||||||
|
/dts-v1/;
|
||||||
|
|
||||||
|
/ {
|
||||||
|
#address-cells = <1>;
|
||||||
|
#size-cells = <1>;
|
||||||
|
|
||||||
|
binman {
|
||||||
|
multiple-images;
|
||||||
|
|
||||||
|
ti_spl_template: template-1 {
|
||||||
|
fit {
|
||||||
|
description = "test-desc";
|
||||||
|
#address-cells = <1>;
|
||||||
|
images {
|
||||||
|
atf {
|
||||||
|
description = "atf";
|
||||||
|
ti-secure {
|
||||||
|
type = "collection";
|
||||||
|
content = <&atf>;
|
||||||
|
keyfile = "key.pem";
|
||||||
|
};
|
||||||
|
atf: atf-bl31 {
|
||||||
|
description = "atf";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
image {
|
||||||
|
insert-template = <&ti_spl_template>;
|
||||||
|
fit {
|
||||||
|
images {
|
||||||
|
fdt-0 {
|
||||||
|
description = "fdt";
|
||||||
|
ti-secure {
|
||||||
|
type = "collection";
|
||||||
|
content = <&foo_dtb>;
|
||||||
|
keyfile = "key.pem";
|
||||||
|
};
|
||||||
|
foo_dtb: blob-ext {
|
||||||
|
filename = "vga.bin";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
65
tools/binman/test/292_template_phandle_dup.dts
Normal file
65
tools/binman/test/292_template_phandle_dup.dts
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
// SPDX-License-Identifier: GPL-2.0+
|
||||||
|
|
||||||
|
/dts-v1/;
|
||||||
|
|
||||||
|
/ {
|
||||||
|
#address-cells = <1>;
|
||||||
|
#size-cells = <1>;
|
||||||
|
|
||||||
|
binman {
|
||||||
|
multiple-images;
|
||||||
|
|
||||||
|
ti_spl_template: template-1 {
|
||||||
|
fit {
|
||||||
|
description = "test-desc";
|
||||||
|
#address-cells = <1>;
|
||||||
|
images {
|
||||||
|
atf {
|
||||||
|
description = "atf";
|
||||||
|
ti-secure {
|
||||||
|
type = "collection";
|
||||||
|
content = <&atf>;
|
||||||
|
keyfile = "key.pem";
|
||||||
|
};
|
||||||
|
atf: atf-bl31 {
|
||||||
|
description = "atf";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
image {
|
||||||
|
insert-template = <&ti_spl_template>;
|
||||||
|
fit {
|
||||||
|
images {
|
||||||
|
fdt-0 {
|
||||||
|
description = "fdt";
|
||||||
|
ti-secure {
|
||||||
|
type = "collection";
|
||||||
|
content = <&foo_dtb>;
|
||||||
|
keyfile = "key.pem";
|
||||||
|
};
|
||||||
|
foo_dtb: blob-ext {
|
||||||
|
filename = "vga.bin";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
image-2 {
|
||||||
|
insert-template = <&ti_spl_template>;
|
||||||
|
fit {
|
||||||
|
images {
|
||||||
|
fdt-0 {
|
||||||
|
description = "fdt";
|
||||||
|
blob-ext {
|
||||||
|
filename = "vga.bin";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
@ -15,6 +15,9 @@ from libfdt import QUIET_NOTFOUND
|
|||||||
from u_boot_pylib import tools
|
from u_boot_pylib import tools
|
||||||
from u_boot_pylib import tout
|
from u_boot_pylib import tout
|
||||||
|
|
||||||
|
# Temporary hack
|
||||||
|
IGNORE_DUP_PHANDLES = False
|
||||||
|
|
||||||
# This deals with a device tree, presenting it as an assortment of Node and
|
# This deals with a device tree, presenting it as an assortment of Node and
|
||||||
# Prop objects, representing nodes and properties, respectively. This file
|
# Prop objects, representing nodes and properties, respectively. This file
|
||||||
# contains the base classes and defines the high-level API. You can use
|
# contains the base classes and defines the high-level API. You can use
|
||||||
@ -249,6 +252,7 @@ class Prop:
|
|||||||
"""
|
"""
|
||||||
if self.dirty:
|
if self.dirty:
|
||||||
node = self._node
|
node = self._node
|
||||||
|
tout.debug(f'sync {node.path}: {self.name}')
|
||||||
fdt_obj = node._fdt._fdt_obj
|
fdt_obj = node._fdt._fdt_obj
|
||||||
node_name = fdt_obj.get_name(node._offset)
|
node_name = fdt_obj.get_name(node._offset)
|
||||||
if node_name and node_name != node.name:
|
if node_name and node_name != node.name:
|
||||||
@ -272,6 +276,7 @@ class Prop:
|
|||||||
the FDT is synced
|
the FDT is synced
|
||||||
"""
|
"""
|
||||||
self._offset = None
|
self._offset = None
|
||||||
|
self.dirty = True
|
||||||
|
|
||||||
class Node:
|
class Node:
|
||||||
"""A device tree node
|
"""A device tree node
|
||||||
@ -335,6 +340,12 @@ class Node:
|
|||||||
self.props = self._fdt.GetProps(self)
|
self.props = self._fdt.GetProps(self)
|
||||||
phandle = fdt_obj.get_phandle(self.Offset())
|
phandle = fdt_obj.get_phandle(self.Offset())
|
||||||
if phandle:
|
if phandle:
|
||||||
|
dup = self._fdt.phandle_to_node.get(phandle)
|
||||||
|
if dup:
|
||||||
|
if not IGNORE_DUP_PHANDLES:
|
||||||
|
raise ValueError(
|
||||||
|
f'Duplicate phandle {phandle} in nodes {dup.path} and {self.path}')
|
||||||
|
else:
|
||||||
self._fdt.phandle_to_node[phandle] = self
|
self._fdt.phandle_to_node[phandle] = self
|
||||||
|
|
||||||
offset = fdt_obj.first_subnode(self.Offset(), QUIET_NOTFOUND)
|
offset = fdt_obj.first_subnode(self.Offset(), QUIET_NOTFOUND)
|
||||||
@ -705,30 +716,38 @@ class Node:
|
|||||||
prop.Sync(auto_resize)
|
prop.Sync(auto_resize)
|
||||||
return added
|
return added
|
||||||
|
|
||||||
def merge_props(self, src):
|
def merge_props(self, src, copy_phandles):
|
||||||
"""Copy missing properties (except 'phandle') from another node
|
"""Copy missing properties (except 'phandle') from another node
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
src (Node): Node containing properties to copy
|
src (Node): Node containing properties to copy
|
||||||
|
copy_phandles (bool): True to copy phandle properties in nodes
|
||||||
|
|
||||||
Adds properties which are present in src but not in this node. Any
|
Adds properties which are present in src but not in this node. Any
|
||||||
'phandle' property is not copied since this might result in two nodes
|
'phandle' property is not copied since this might result in two nodes
|
||||||
with the same phandle, thus making phandle references ambiguous.
|
with the same phandle, thus making phandle references ambiguous.
|
||||||
"""
|
"""
|
||||||
|
tout.debug(f'copy to {self.path}: {src.path}')
|
||||||
for name, src_prop in src.props.items():
|
for name, src_prop in src.props.items():
|
||||||
if name != 'phandle' and name not in self.props:
|
done = False
|
||||||
|
if name not in self.props:
|
||||||
|
if copy_phandles or name != 'phandle':
|
||||||
self.props[name] = Prop(self, None, name, src_prop.bytes)
|
self.props[name] = Prop(self, None, name, src_prop.bytes)
|
||||||
|
done = True
|
||||||
|
tout.debug(f" {name}{'' if done else ' - ignored'}")
|
||||||
|
|
||||||
def copy_node(self, src):
|
def copy_node(self, src, copy_phandles=False):
|
||||||
"""Copy a node and all its subnodes into this node
|
"""Copy a node and all its subnodes into this node
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
src (Node): Node to copy
|
src (Node): Node to copy
|
||||||
|
copy_phandles (bool): True to copy phandle properties in nodes
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Node: Resulting destination node
|
Node: Resulting destination node
|
||||||
|
|
||||||
This works recursively.
|
This works recursively, with copy_phandles being set to True for the
|
||||||
|
recursive calls
|
||||||
|
|
||||||
The new node is put before all other nodes. If the node already
|
The new node is put before all other nodes. If the node already
|
||||||
exists, just its subnodes and properties are copied, placing them before
|
exists, just its subnodes and properties are copied, placing them before
|
||||||
@ -740,12 +759,12 @@ class Node:
|
|||||||
dst.move_to_first()
|
dst.move_to_first()
|
||||||
else:
|
else:
|
||||||
dst = self.insert_subnode(src.name)
|
dst = self.insert_subnode(src.name)
|
||||||
dst.merge_props(src)
|
dst.merge_props(src, copy_phandles)
|
||||||
|
|
||||||
# Process in reverse order so that they appear correctly in the result,
|
# Process in reverse order so that they appear correctly in the result,
|
||||||
# since copy_node() puts the node first in the list
|
# since copy_node() puts the node first in the list
|
||||||
for node in reversed(src.subnodes):
|
for node in reversed(src.subnodes):
|
||||||
dst.copy_node(node)
|
dst.copy_node(node, True)
|
||||||
return dst
|
return dst
|
||||||
|
|
||||||
def copy_subnodes_from_phandles(self, phandle_list):
|
def copy_subnodes_from_phandles(self, phandle_list):
|
||||||
@ -768,7 +787,7 @@ class Node:
|
|||||||
dst = self.copy_node(node)
|
dst = self.copy_node(node)
|
||||||
|
|
||||||
tout.debug(f'merge props from {parent.path} to {dst.path}')
|
tout.debug(f'merge props from {parent.path} to {dst.path}')
|
||||||
self.merge_props(parent)
|
self.merge_props(parent, False)
|
||||||
|
|
||||||
|
|
||||||
class Fdt:
|
class Fdt:
|
||||||
@ -829,6 +848,7 @@ class Fdt:
|
|||||||
|
|
||||||
TODO(sjg@chromium.org): Implement the 'root' parameter
|
TODO(sjg@chromium.org): Implement the 'root' parameter
|
||||||
"""
|
"""
|
||||||
|
self.phandle_to_node = {}
|
||||||
self._cached_offsets = True
|
self._cached_offsets = True
|
||||||
self._root = self.Node(self, None, 0, '/', '/')
|
self._root = self.Node(self, None, 0, '/', '/')
|
||||||
self._root.Scan()
|
self._root.Scan()
|
||||||
|
|||||||
@ -37,11 +37,12 @@
|
|||||||
new-prop;
|
new-prop;
|
||||||
};
|
};
|
||||||
|
|
||||||
second1 {
|
second1: second1 {
|
||||||
new-prop;
|
new-prop;
|
||||||
};
|
};
|
||||||
|
|
||||||
second4 {
|
second4 {
|
||||||
|
use_second1 = <&second1>;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
@ -65,12 +66,13 @@
|
|||||||
};
|
};
|
||||||
|
|
||||||
second: second {
|
second: second {
|
||||||
second1 {
|
second_1_bad: second1 {
|
||||||
some-prop;
|
some-prop;
|
||||||
};
|
};
|
||||||
|
|
||||||
second2 {
|
second2 {
|
||||||
some-prop;
|
some-prop;
|
||||||
|
use_second1_bad = <&second_1_bad>;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|||||||
@ -32,6 +32,7 @@ from dtoc.fdt import Type, BytesToValue
|
|||||||
import libfdt
|
import libfdt
|
||||||
from u_boot_pylib import test_util
|
from u_boot_pylib import test_util
|
||||||
from u_boot_pylib import tools
|
from u_boot_pylib import tools
|
||||||
|
from u_boot_pylib import tout
|
||||||
|
|
||||||
#pylint: disable=protected-access
|
#pylint: disable=protected-access
|
||||||
|
|
||||||
@ -308,7 +309,7 @@ class TestNode(unittest.TestCase):
|
|||||||
|
|
||||||
def test_copy_node(self):
|
def test_copy_node(self):
|
||||||
"""Test copy_node() function"""
|
"""Test copy_node() function"""
|
||||||
def do_copy_checks(dtb, dst, expect_none):
|
def do_copy_checks(dtb, dst, second1_ph_val, expect_none):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
['/dest/base', '/dest/first@0', '/dest/existing'],
|
['/dest/base', '/dest/first@0', '/dest/existing'],
|
||||||
[n.path for n in dst.subnodes])
|
[n.path for n in dst.subnodes])
|
||||||
@ -339,8 +340,8 @@ class TestNode(unittest.TestCase):
|
|||||||
over = dtb.GetNode('/dest/base/over')
|
over = dtb.GetNode('/dest/base/over')
|
||||||
self.assertTrue(over)
|
self.assertTrue(over)
|
||||||
|
|
||||||
# Make sure that the phandle for 'over' is not copied
|
# Make sure that the phandle for 'over' is copied
|
||||||
self.assertNotIn('phandle', over.props.keys())
|
self.assertIn('phandle', over.props.keys())
|
||||||
|
|
||||||
second = dtb.GetNode('/dest/base/second')
|
second = dtb.GetNode('/dest/base/second')
|
||||||
self.assertTrue(second)
|
self.assertTrue(second)
|
||||||
@ -348,7 +349,7 @@ class TestNode(unittest.TestCase):
|
|||||||
[n.name for n in chk.subnodes])
|
[n.name for n in chk.subnodes])
|
||||||
self.assertEqual(chk, over.parent)
|
self.assertEqual(chk, over.parent)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
{'bootph-all', 'compatible', 'reg', 'low-power'},
|
{'bootph-all', 'compatible', 'reg', 'low-power', 'phandle'},
|
||||||
over.props.keys())
|
over.props.keys())
|
||||||
|
|
||||||
if expect_none:
|
if expect_none:
|
||||||
@ -365,20 +366,43 @@ class TestNode(unittest.TestCase):
|
|||||||
['second1', 'second2', 'second3', 'second4'],
|
['second1', 'second2', 'second3', 'second4'],
|
||||||
[n.name for n in second.subnodes])
|
[n.name for n in second.subnodes])
|
||||||
|
|
||||||
|
# Check the 'second_1_bad' phandle is not copied over
|
||||||
|
second1 = second.FindNode('second1')
|
||||||
|
self.assertTrue(second1)
|
||||||
|
sph = second1.props.get('phandle')
|
||||||
|
self.assertTrue(sph)
|
||||||
|
self.assertEqual(second1_ph_val, sph.bytes)
|
||||||
|
|
||||||
|
|
||||||
dtb = fdt.FdtScan(find_dtb_file('dtoc_test_copy.dts'))
|
dtb = fdt.FdtScan(find_dtb_file('dtoc_test_copy.dts'))
|
||||||
tmpl = dtb.GetNode('/base')
|
tmpl = dtb.GetNode('/base')
|
||||||
dst = dtb.GetNode('/dest')
|
dst = dtb.GetNode('/dest')
|
||||||
|
second1_ph_val = (dtb.GetNode('/dest/base/second/second1').
|
||||||
|
props['phandle'].bytes)
|
||||||
dst.copy_node(tmpl)
|
dst.copy_node(tmpl)
|
||||||
|
|
||||||
do_copy_checks(dtb, dst, expect_none=True)
|
do_copy_checks(dtb, dst, second1_ph_val, expect_none=True)
|
||||||
|
|
||||||
dtb.Sync(auto_resize=True)
|
dtb.Sync(auto_resize=True)
|
||||||
|
|
||||||
# Now check that the FDT looks correct
|
# Now check the resulting FDT. It should have duplicate phandles since
|
||||||
|
# 'over' has been copied to 'dest/base/over' but still exists in its old
|
||||||
|
# place
|
||||||
new_dtb = fdt.Fdt.FromData(dtb.GetContents())
|
new_dtb = fdt.Fdt.FromData(dtb.GetContents())
|
||||||
|
with self.assertRaises(ValueError) as exc:
|
||||||
new_dtb.Scan()
|
new_dtb.Scan()
|
||||||
|
self.assertIn(
|
||||||
|
'Duplicate phandle 1 in nodes /dest/base/over and /base/over',
|
||||||
|
str(exc.exception))
|
||||||
|
|
||||||
|
# Remove the source nodes for the copy
|
||||||
|
new_dtb.GetNode('/base').Delete()
|
||||||
|
|
||||||
|
# Now it should scan OK
|
||||||
|
new_dtb.Scan()
|
||||||
|
|
||||||
dst = new_dtb.GetNode('/dest')
|
dst = new_dtb.GetNode('/dest')
|
||||||
do_copy_checks(new_dtb, dst, expect_none=False)
|
do_copy_checks(new_dtb, dst, second1_ph_val, expect_none=False)
|
||||||
|
|
||||||
def test_copy_subnodes_from_phandles(self):
|
def test_copy_subnodes_from_phandles(self):
|
||||||
"""Test copy_node() function"""
|
"""Test copy_node() function"""
|
||||||
@ -404,7 +428,7 @@ class TestNode(unittest.TestCase):
|
|||||||
|
|
||||||
# Make sure that the phandle for 'over' is not copied
|
# Make sure that the phandle for 'over' is not copied
|
||||||
over = dst.FindNode('over')
|
over = dst.FindNode('over')
|
||||||
print('keys', over.props.keys())
|
tout.debug(f'keys: {over.props.keys()}')
|
||||||
self.assertNotIn('phandle', over.props.keys())
|
self.assertNotIn('phandle', over.props.keys())
|
||||||
|
|
||||||
# Check the merged properties, first the base ones in '/dest'
|
# Check the merged properties, first the base ones in '/dest'
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user