| diff --git a/scripts/make-squashfs-hashed.sh b/scripts/make-squashfs-hashed.sh |
| new file mode 100755 |
| index 0000000..a4b183e |
| --- /dev/null |
| +++ b/scripts/make-squashfs-hashed.sh |
| @@ -0,0 +1,23 @@ |
| +#!/bin/bash |
| +# |
| +# 1. Using veritysetup to append hash image into squashfs |
| +# 2. Parsing output of veritysetup to generate uboot script |
| +# |
| +SQUASHFS_FILE_PATH=$1 |
| +STAGING_DIR_HOST=$2 |
| +TOPDIR=$3 |
| +SUMMARY_FILE=$4 |
| + |
| +FILE_SIZE=`stat -c "%s" ${SQUASHFS_FILE_PATH}` |
| +BLOCK_SIZE=4096 |
| + |
| +DATA_BLOCKS=$((${FILE_SIZE} / ${BLOCK_SIZE})) |
| +[ $((${FILE_SIZE} % ${BLOCK_SIZE})) -ne 0 ] && DATA_BLOCKS=$((${DATA_BLOCKS} + 1)) |
| + |
| +HASH_OFFSET=$((${DATA_BLOCKS} * ${BLOCK_SIZE})) |
| + |
| +${STAGING_DIR_HOST}/bin/veritysetup format \ |
| + --data-blocks=${DATA_BLOCKS} \ |
| + --hash-offset=${HASH_OFFSET} \ |
| + ${SQUASHFS_FILE_PATH} ${SQUASHFS_FILE_PATH} \ |
| + > ${SUMMARY_FILE} |
| diff --git a/scripts/prepare-dm-verity-uboot-script.sh b/scripts/prepare-dm-verity-uboot-script.sh |
| new file mode 100755 |
| index 0000000..a66b921 |
| --- /dev/null |
| +++ b/scripts/prepare-dm-verity-uboot-script.sh |
| @@ -0,0 +1,54 @@ |
| +#!/bin/bash |
| + |
| +ROOT_DEVICE=$1 |
| +EXTRA_ARGS=$2 |
| + |
| +while read line; do |
| + key=$(echo ${line} | cut -f1 -d':') |
| + value=$(echo ${line} | cut -f2 -d':') |
| + |
| + case "${key}" in |
| + "UUID") |
| + UUID=${value} |
| + ;; |
| + "Data blocks") |
| + DATA_BLOCKS=${value} |
| + ;; |
| + "Data block size") |
| + DATA_BLOCK_SIZE=${value} |
| + ;; |
| + "Hash block size") |
| + HASH_BLOCK_SIZE=${value} |
| + ;; |
| + "Hash algorithm") |
| + HASH_ALG=${value} |
| + ;; |
| + "Salt") |
| + SALT=${value} |
| + ;; |
| + "Root hash") |
| + ROOT_HASH=${value} |
| + ;; |
| + esac |
| +done |
| + |
| +# |
| +# dm-mod.create=<name>,<uuid>,<minor>,<flags>, |
| +# <start_sector> <num_sectors> <target_type> <target_args> |
| +# <target_type>=verity |
| +# <target_args>=<version> <data_dev> <hash_dev> <data_block_size> <hash_block_size> |
| +# <num_data_blocks> <hash_start_block> <algorithm> <root_hash> <salt> |
| +# |
| +# <uuid> ::= xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx | "" |
| +# <minor> ::= The device minor number | "" |
| +# <flags> ::= "ro" | "rw" |
| +# |
| +# More detail in field you can ref. |
| +# Documentation/admin-guide/device-mapper/dm-init.rst |
| +# Documentation/admin-guide/device-mapper/verity.rst |
| +# |
| + |
| +BOOTARGS=$( printf '%s root=/dev/dm-0 dm-mod.create="dm-verity,,,ro,0 %s verity 1 %s %s %s %s %s %s %s %s %s"' \ |
| + "${EXTRA_ARGS}" $((${DATA_BLOCKS} * 8)) ${ROOT_DEVICE} ${ROOT_DEVICE} ${DATA_BLOCK_SIZE} ${HASH_BLOCK_SIZE} ${DATA_BLOCKS} $((${DATA_BLOCKS} + 1)) ${HASH_ALG} ${ROOT_HASH} ${SALT} ) |
| + |
| +echo setenv bootargs ${BOOTARGS} |
| diff --git a/tools/ar-tool/Makefile b/tools/ar-tool/Makefile |
| new file mode 100644 |
| index 0000000..2b22ac0 |
| --- /dev/null |
| +++ b/tools/ar-tool/Makefile |
| @@ -0,0 +1,36 @@ |
| +# |
| +# Copyright (C) 2011-2012 OpenWrt.org |
| +# |
| +# This is free software, licensed under the GNU General Public License v2. |
| +# See /LICENSE for more information. |
| +# |
| + |
| +include $(TOPDIR)/rules.mk |
| + |
| +PKG_NAME:=ar-tool |
| +PKG_VERSION:=1 |
| + |
| +include $(INCLUDE_DIR)/host-build.mk |
| + |
| +define Host/Prepare |
| + mkdir -p $(HOST_BUILD_DIR) |
| + $(CP) ./src/* $(HOST_BUILD_DIR)/ |
| +endef |
| + |
| +define Host/Compile |
| + $(MAKE) -C $(HOST_BUILD_DIR) |
| +endef |
| + |
| +define Host/Configure |
| +endef |
| + |
| +define Host/Install |
| + $(CP) $(HOST_BUILD_DIR)/ar-tool $(STAGING_DIR_HOST)/bin/ |
| +endef |
| + |
| +define Host/Clean |
| + rm -f $(HOST_BUILD_DIR)/ar-tool |
| + rm -f $(STAGING_DIR_HOST)/bin/ar-tool |
| +endef |
| + |
| +$(eval $(call HostBuild)) |
| diff --git a/tools/ar-tool/src/Makefile b/tools/ar-tool/src/Makefile |
| new file mode 100644 |
| index 0000000..26ab3cf |
| --- /dev/null |
| +++ b/tools/ar-tool/src/Makefile |
| @@ -0,0 +1,20 @@ |
| +# |
| +# Copyright (C) 2019 MediaTek Inc. |
| +# |
| +# Author: Sam Shih <sam.shih@mediatek.com> |
| +# |
| +# SPDX-License-Identifier: BSD-3-Clause |
| +# https://spdx.org/licenses |
| +# |
| + |
| +TARGET := ar-tool |
| + |
| +.PHONY: all clean |
| + |
| +all: ${TARGET} |
| + |
| +%: %.py Makefile |
| + cp $< $@ |
| + |
| +clean: |
| + rm ${TARGET} |
| diff --git a/tools/ar-tool/src/ar-tool.py b/tools/ar-tool/src/ar-tool.py |
| new file mode 100755 |
| index 0000000..e33510b |
| --- /dev/null |
| +++ b/tools/ar-tool/src/ar-tool.py |
| @@ -0,0 +1,302 @@ |
| +#!/usr/bin/python |
| +import os |
| +import sys |
| +from xml.dom import minidom |
| +import pdb |
| +import traceback |
| +import re |
| + |
| + |
| +class bl_ar_table_t: |
| + |
| + def __init__(self, input_file): |
| + self.input_file = input_file |
| + self.ar_ver_list = [] |
| + |
| + def generate_ar_ver_code(self): |
| + code = "" |
| + code += "/* \n" |
| + code += " * This file is auto-generated by ar-tool\n" |
| + code += " * please do not modify this file manually\n" |
| + code += " */\n" |
| + code += "#include <plat/common/platform.h>\n" |
| + code += "const uint32_t bl_ar_ver = %d;\n" % self.ar_ver_list[-1] |
| + return code |
| + |
| + def generate_ar_conf_code(self): |
| + code = "" |
| + code += "BL_AR_VER\t:=\t%d\n" % self.ar_ver_list[-1] |
| + return code |
| + |
| + def check_and_set_ar_ver_list(self, ar_ver): |
| + if ((ar_ver not in self.ar_ver_list) and (ar_ver <= 64) and (ar_ver >= 0)): |
| + self.ar_ver_list.append(ar_ver) |
| + return True |
| + else: |
| + return False |
| + |
| + def get_data_by_name_from_ar_entry(self, xml_node, entry_id, name, print_err=True): |
| + i = entry_id |
| + datalist = xml_node.getElementsByTagName(name) |
| + if not datalist: |
| + if print_err is True: |
| + print("XML parse fail in ar_entry[%d]:" % i) |
| + print(" Chilld node '%s' not exist" % name) |
| + return None |
| + data = None |
| + if len(datalist) != 1: |
| + if print_err is True: |
| + print("XML parse fail in ar_entry[%d]:" % i) |
| + print(" Duplicate '%s' node exist" % name) |
| + return None |
| + datanode = datalist[0].firstChild |
| + if not datanode: |
| + if print_err is True: |
| + print("XML parse fail in ar_entry[%d].%s:" % (i, name)) |
| + print(" '%s' data not exist" % name) |
| + return None |
| + if datanode.nodeType != datanode.TEXT_NODE: |
| + if print_err is True: |
| + print("XML parse fail in ar_entry[%d].%s:" % (i, name)) |
| + print(" '%s' data not exist" % name) |
| + return None |
| + return str(datanode.data) |
| + |
| + def get_int_by_name_from_ar_entry(self, xml_node, entry_id, name, print_err=True): |
| + data = self.get_data_by_name_from_ar_entry(xml_node, entry_id, name, print_err) |
| + if data: |
| + data = data.strip() |
| + if not data.isdigit(): |
| + if print_err is True: |
| + print("XML parse fail in ar_entry[%d].%s:" % (i, name)) |
| + print(" '%s' must be an integer" % name) |
| + return None |
| + return data |
| + return None |
| + |
| + def xml_debug_show(self, line, column): |
| + f = open(self.input_file, "r") |
| + if not f: |
| + sys.stderr.write("Unable to open file '%s'\n" % self.input_file) |
| + raise |
| + xml_data = f.read() |
| + xml_lines = xml_data.split("\n") |
| + f.close() |
| + print("input xml fail at line %d, column %d" % (line, column)) |
| + if line < 2: |
| + show_lines = [xml_lines[line]] |
| + elif line+2 >= len(xml_lines): |
| + show_lines = [xml_lines[line]] |
| + else: |
| + show_lines = xml_lines[line-1:line+1] |
| + for line in show_lines: |
| + print(line) |
| + |
| + def parse(self): |
| + data = None |
| + try: |
| + f = open(self.input_file, "r") |
| + if not f: |
| + raise |
| + f.close() |
| + except: |
| + sys.stderr.write("Unable to open file '%s'\n" % self.input_file) |
| + return 1 |
| + try: |
| + xmldoc = minidom.parse(self.input_file) |
| + ar_entry_list = xmldoc.getElementsByTagName('bl_ar_entry') |
| + |
| + for i in range(0, len(ar_entry_list)): |
| + ar_entry = ar_entry_list[i] |
| + data = self.get_int_by_name_from_ar_entry(ar_entry, i, "USED", False) |
| + if not data: |
| + continue |
| + |
| + data = self.get_int_by_name_from_ar_entry(ar_entry, i, "BL_AR_VER") |
| + if not data: |
| + return 1 |
| + if data: |
| + data = data.strip() |
| + if self.check_and_set_ar_ver_list(int(data)) is False: |
| + print("XML parse fail in bl_ar_entry[%d].BL_AR_VER:" % i) |
| + print(" 'BL_AR_VER' value duplicate or exceed range") |
| + return 1 |
| + print("Get %d record in bl_ar_table" % len(self.ar_ver_list)) |
| + except: |
| + sys.stderr.write("Unable to parse file '%s'\n" % self.input_file) |
| + crash_info = traceback.format_exc() |
| + m = re.search("ExpatError: mismatched tag: line (.+), column (.+)", crash_info) |
| + if m: |
| + line = int(m.group(1)) |
| + column = int(m.group(2)) |
| + self.xml_debug_show(line, column) |
| + print(m.group(0)) |
| + else: |
| + print(crash_info) |
| + return 1 |
| + return 0 |
| + |
| + |
| +class fw_ar_table_t: |
| + |
| + def __init__(self, input_file): |
| + self.input_file = input_file |
| + self.ar_ver_list = [] |
| + |
| + def generate_ar_ver_code(self): |
| + code = "" |
| + code += "/* \n" |
| + code += " * This file is auto-generated by ar-tool\n" |
| + code += " * please do not modify this file manually\n" |
| + code += " */\n" |
| + code += "const uint32_t fw_ar_ver = %d;\n" % self.ar_ver_list[-1] |
| + return code |
| + |
| + def generate_ar_conf_code(self): |
| + code = "" |
| + code += "FW_AR_VER\t:=\t%d\n" % self.ar_ver_list[-1] |
| + return code |
| + |
| + def check_and_set_ar_ver_list(self, ar_ver): |
| + if ((ar_ver not in self.ar_ver_list) and (ar_ver <= 64) and (ar_ver >= 0)): |
| + self.ar_ver_list.append(ar_ver) |
| + return True |
| + else: |
| + return False |
| + |
| + def get_data_by_name_from_ar_entry(self, xml_node, entry_id, name, print_err=True): |
| + i = entry_id |
| + datalist = xml_node.getElementsByTagName(name) |
| + if not datalist: |
| + if print_err is True: |
| + print("XML parse fail in ar_entry[%d]:" % i) |
| + print(" Chilld node '%s' not exist" % name) |
| + return None |
| + data = None |
| + if len(datalist) != 1: |
| + if print_err is True: |
| + print("XML parse fail in ar_entry[%d]:" % i) |
| + print(" Duplicate '%s' node exist" % name) |
| + return None |
| + datanode = datalist[0].firstChild |
| + if not datanode: |
| + if print_err is True: |
| + print("XML parse fail in ar_entry[%d].%s:" % (i, name)) |
| + print(" '%s' data not exist" % name) |
| + return None |
| + if datanode.nodeType != datanode.TEXT_NODE: |
| + if print_err is True: |
| + print("XML parse fail in ar_entry[%d].%s:" % (i, name)) |
| + print(" '%s' data not exist" % name) |
| + return None |
| + return str(datanode.data) |
| + |
| + def get_int_by_name_from_ar_entry(self, xml_node, entry_id, name, print_err=True): |
| + data = self.get_data_by_name_from_ar_entry(xml_node, entry_id, name, print_err) |
| + if data: |
| + data = data.strip() |
| + if not data.isdigit(): |
| + if print_err is True: |
| + print("XML parse fail in ar_entry[%d].%s:" % (i, name)) |
| + print(" '%s' must be an integer" % name) |
| + return None |
| + return data |
| + return None |
| + |
| + def xml_debug_show(self, line, column): |
| + f = open(self.input_file, "r") |
| + if not f: |
| + sys.stderr.write("Unable to open file '%s'\n" % self.input_file) |
| + raise |
| + xml_data = f.read() |
| + xml_lines = xml_data.split("\n") |
| + f.close() |
| + print("input xml fail at line %d, column %d" % (line, column)) |
| + if line < 2: |
| + show_lines = [xml_lines[line]] |
| + elif line+2 >= len(xml_lines): |
| + show_lines = [xml_lines[line]] |
| + else: |
| + show_lines = xml_lines[line-1:line+1] |
| + for line in show_lines: |
| + print(line) |
| + |
| + def parse(self): |
| + data = None |
| + try: |
| + f = open(self.input_file, "r") |
| + if not f: |
| + raise |
| + f.close() |
| + except: |
| + sys.stderr.write("Unable to open file '%s'\n" % self.input_file) |
| + return 1 |
| + try: |
| + xmldoc = minidom.parse(self.input_file) |
| + ar_entry_list = xmldoc.getElementsByTagName('fw_ar_entry') |
| + |
| + for i in range(0, len(ar_entry_list)): |
| + ar_entry = ar_entry_list[i] |
| + data = self.get_int_by_name_from_ar_entry(ar_entry, i, "USED", False) |
| + if not data: |
| + continue |
| + |
| + data = self.get_int_by_name_from_ar_entry(ar_entry, i, "FW_AR_VER") |
| + if not data: |
| + return 1 |
| + if data: |
| + data = data.strip() |
| + if self.check_and_set_ar_ver_list(int(data)) is False: |
| + print("XML parse fail in fw_ar_entry[%d].FW_AR_VER:" % i) |
| + print(" 'FW_AR_VER' value duplicate or exceed range") |
| + return 1 |
| + print("Get %d record in fw_ar_table" % len(self.ar_ver_list)) |
| + except: |
| + sys.stderr.write("Unable to parse file '%s'\n" % self.input_file) |
| + crash_info = traceback.format_exc() |
| + m = re.search("ExpatError: mismatched tag: line (.+), column (.+)", crash_info) |
| + if m: |
| + line = int(m.group(1)) |
| + column = int(m.group(2)) |
| + self.xml_debug_show(line, column) |
| + print(m.group(0)) |
| + else: |
| + print(crash_info) |
| + return 1 |
| + return 0 |
| + |
| + |
| +def main(argc, argv): |
| + if argc != 5: |
| + sys.stdout.write("ar-tool [bl_ar_table|fw_ar_table] [create_ar_ver|create_ar_conf] $(input_file) $(output_file)\n") |
| + return 1 |
| + if argv[1] == "bl_ar_table": |
| + ar_table = bl_ar_table_t(argv[3]) |
| + else: |
| + ar_table = fw_ar_table_t(argv[3]) |
| + if ar_table.parse() != 0: |
| + return 1 |
| + if argv[2] == "create_ar_ver": |
| + code = ar_table.generate_ar_ver_code() |
| + print("(%s) --> (%s)" % (argv[3], argv[4])) |
| + #print(code) |
| + f = open(argv[4], "w") |
| + f.write(code) |
| + f.close() |
| + return 0 |
| + elif argv[2] == "create_ar_conf": |
| + code = ar_table.generate_ar_conf_code() |
| + print("(%s) --> (%s)" % (argv[3], argv[4])) |
| + #print(code) |
| + f = open(argv[4], "w") |
| + f.write(code) |
| + f.close() |
| + return 0 |
| + else: |
| + print("Unknow option '%s'" % argv[1]) |
| + return 1 |
| + |
| + |
| +if __name__ == '__main__': |
| + sys.exit(main(len(sys.argv), sys.argv)) |
| + |