From 167753679f5f067044c99746717fa9dfd10ecdc8 Mon Sep 17 00:00:00 2001 From: "Chen, Gang G" Date: Mon, 29 May 2023 18:27:06 +0800 Subject: [PATCH] build: Support build with Slim Bootloader 1. Add build with SBL support 2. Add scripts that build ELF as container image Tracked-On: OAM-110589 Signed-off-by: Chen, Gang G --- CommonUtility.py | 456 +++++++++++++ GenContainer.py | 884 ++++++++++++++++++++++++++ SingleSign.py | 301 +++++++++ core/abl_executable.mk | 55 ++ core/definitions.mk | 49 +- core/elf_ia32_abl.lds | 87 +++ core/elf_x86_64_abl.lds | 87 +++ testkeys/OS1_TestKey_Priv_RSA2048.pem | 27 + 8 files changed, 1944 insertions(+), 2 deletions(-) create mode 100644 CommonUtility.py create mode 100755 GenContainer.py create mode 100644 SingleSign.py create mode 100755 core/abl_executable.mk create mode 100644 core/elf_ia32_abl.lds create mode 100644 core/elf_x86_64_abl.lds create mode 100644 testkeys/OS1_TestKey_Priv_RSA2048.pem diff --git a/CommonUtility.py b/CommonUtility.py new file mode 100644 index 0000000..d1cce6a --- /dev/null +++ b/CommonUtility.py @@ -0,0 +1,456 @@ +#!/usr/bin/env python +## @ CommonUtility.py +# Common utility script +# +# Copyright (c) 2016 - 2020, Intel Corporation. All rights reserved.
+# SPDX-License-Identifier: BSD-2-Clause-Patent +# +## + +## +# Import Modules +# +import os +import sys +import re +import shutil +import subprocess +import struct +import hashlib +import string +from ctypes import * +from functools import reduce +from importlib.machinery import SourceFileLoader +from SingleSign import * + + +# Key types defined should match with cryptolib.h +PUB_KEY_TYPE = { + # key_type : key_val + "RSA" : 1, + "ECC" : 2, + "DSA" : 3, + } + +# Signing type schemes defined should match with cryptolib.h +SIGN_TYPE_SCHEME = { + # sign_type : key_val + "RSA_PKCS1" : 1, + "RSA_PSS" : 2, + "ECC" : 3, + "DSA" : 4, + } + +# Hash values defined should match with cryptolib.h +HASH_TYPE_VALUE = { + # Hash_string : Hash_Value + "SHA2_256" : 1, + "SHA2_384" : 2, + "SHA2_512" : 3, + "SM3_256" : 4, + } + +# Hash values defined should match with cryptolib.h +HASH_VAL_STRING = dict(map(reversed, HASH_TYPE_VALUE.items())) + +AUTH_TYPE_HASH_VALUE = { + # Auth_type : Hash_type + "SHA2_256" : 1, + "SHA2_384" : 2, + "SHA2_512" : 3, + "SM3_256" : 4, + "RSA2048SHA256" : 1, + "RSA3072SHA384" : 2, + } + +HASH_DIGEST_SIZE = { + # Hash_string : Hash_Size + "SHA2_256" : 32, + "SHA2_384" : 48, + "SHA2_512" : 64, + "SM3_256" : 32, + } + +class PUB_KEY_HDR (Structure): + _pack_ = 1 + _fields_ = [ + ('Identifier', ARRAY(c_char, 4)), #signature ('P', 'U', 'B', 'K') + ('KeySize', c_uint16), #Length of Public Key + ('KeyType', c_uint8), #RSA or ECC + ('Reserved', ARRAY(c_uint8, 1)), + ('KeyData', ARRAY(c_uint8, 0)), #Pubic key data with KeySize bytes for RSA_KEY() format + ] + + def __init__(self): + self.Identifier = b'PUBK' + +class SIGNATURE_HDR (Structure): + _pack_ = 1 + _fields_ = [ + ('Identifier', ARRAY(c_char, 4)), #signature Identifier('S', 'I', 'G', 'N') + ('SigSize', c_uint16), #Length of signature 2K and 3K in bytes + ('SigType', c_uint8), #PKCSv1.5 or RSA-PSS or ECC + ('HashAlg', c_uint8), #Hash Alg for signingh SHA256, 384 + ('Signature', ARRAY(c_uint8, 0)), #Signature length defined by SigSize bytes + ] + + def __init__(self): + self.Identifier = b'SIGN' + +class LZ_HEADER(Structure): + _pack_ = 1 + _fields_ = [ + ('signature', ARRAY(c_char, 4)), + ('compressed_len', c_uint32), + ('length', c_uint32), + ('version', c_uint16), + ('svn', c_uint8), + ('attribute', c_uint8) + ] + _compress_alg = { + b'LZDM' : 'Dummy', + b'LZ4 ' : 'Lz4', + b'LZMA' : 'Lzma', + } + +def print_bytes (data, indent=0, offset=0, show_ascii = False): + bytes_per_line = 16 + printable = ' ' + string.ascii_letters + string.digits + string.punctuation + str_fmt = '{:s}{:04x}: {:%ds} {:s}' % (bytes_per_line * 3) + bytes_per_line + data_array = bytearray(data) + for idx in range(0, len(data_array), bytes_per_line): + hex_str = ' '.join('%02X' % val for val in data_array[idx:idx + bytes_per_line]) + asc_str = ''.join('%c' % (val if (chr(val) in printable) else '.') + for val in data_array[idx:idx + bytes_per_line]) + print (str_fmt.format(indent * ' ', offset + idx, hex_str, ' ' + asc_str if show_ascii else '')) + +def get_bits_from_bytes (bytes, start, length): + if length == 0: + return 0 + byte_start = (start) // 8 + byte_end = (start + length - 1) // 8 + bit_start = start & 7 + mask = (1 << length) - 1 + val = bytes_to_value (bytes[byte_start:byte_end + 1]) + val = (val >> bit_start) & mask + return val + +def set_bits_to_bytes (bytes, start, length, bvalue): + if length == 0: + return + byte_start = (start) // 8 + byte_end = (start + length - 1) // 8 + bit_start = start & 7 + mask = (1 << length) - 1 + val = bytes_to_value (bytes[byte_start:byte_end + 1]) + val &= ~(mask << bit_start) + val |= ((bvalue & mask) << bit_start) + bytes[byte_start:byte_end+1] = value_to_bytearray (val, byte_end + 1 - byte_start) + +def value_to_bytes (value, length): + return value.to_bytes(length, 'little') + +def bytes_to_value (bytes): + return int.from_bytes (bytes, 'little') + +def value_to_bytearray (value, length): + return bytearray(value_to_bytes(value, length)) + +def value_to_bytearray (value, length): + return bytearray(value_to_bytes(value, length)) + +def get_aligned_value (value, alignment = 4): + if alignment != (1 << (alignment.bit_length() - 1)): + raise Exception ('Alignment (0x%x) should to be power of 2 !' % alignment) + value = (value + (alignment - 1)) & ~(alignment - 1) + return value + +def get_padding_length (data_len, alignment = 4): + new_data_len = get_aligned_value (data_len, alignment) + return new_data_len - data_len + +def get_file_data (file, mode = 'rb'): + return open(file, mode).read() + +def gen_file_from_object (file, object): + open (file, 'wb').write(object) + +def gen_file_with_size (file, size): + open (file, 'wb').write(b'\xFF' * size); + +def check_files_exist (base_name_list, dir = '', ext = ''): + for each in base_name_list: + if not os.path.exists (os.path.join (dir, each + ext)): + return False + return True + +def load_source (name, filepath): + mod = SourceFileLoader (name, filepath).load_module() + return mod + +def get_openssl_path (): + if os.name == 'nt': + if 'OPENSSL_PATH' not in os.environ: + openssl_dir = "C:\\Openssl\\bin\\" + if os.path.exists (openssl_dir): + os.environ['OPENSSL_PATH'] = openssl_dir + else: + os.environ['OPENSSL_PATH'] = "C:\\Openssl\\" + if 'OPENSSL_CONF' not in os.environ: + openssl_cfg = "C:\\Openssl\\openssl.cfg" + if os.path.exists(openssl_cfg): + os.environ['OPENSSL_CONF'] = openssl_cfg + openssl = os.path.join(os.environ.get ('OPENSSL_PATH', ''), 'openssl.exe') + else: + # Get openssl path for Linux cases + openssl = shutil.which('openssl') + + return openssl + +def run_process (arg_list, print_cmd = False, capture_out = False): + sys.stdout.flush() + if os.name == 'nt' and os.path.splitext(arg_list[0])[1] == '' and \ + os.path.exists (arg_list[0] + '.exe'): + arg_list[0] += '.exe' + if print_cmd: + print (' '.join(arg_list)) + + exc = None + result = 0 + output = '' + try: + if capture_out: + output = subprocess.check_output(arg_list).decode() + else: + result = subprocess.call (arg_list) + except Exception as ex: + result = 1 + exc = ex + + if result: + if not print_cmd: + print ('Error in running process:\n %s' % ' '.join(arg_list)) + if exc is None: + sys.exit(1) + else: + raise exc + + return output + +# Adjust hash type algorithm based on Public key file +def adjust_hash_type (pub_key_file): + key_type = get_key_type (pub_key_file) + if key_type == 'RSA2048': + hash_type = 'SHA2_256' + elif key_type == 'RSA3072': + hash_type = 'SHA2_384' + else: + hash_type = None + + return hash_type + +def rsa_sign_file (priv_key, pub_key, hash_type, sign_scheme, in_file, out_file, inc_dat = False, inc_key = False): + + bins = bytearray() + if inc_dat: + bins.extend(get_file_data(in_file)) + + single_sign_file(priv_key, hash_type, sign_scheme, in_file, out_file) + + out_data = get_file_data(out_file) + + sign = SIGNATURE_HDR() + sign.SigSize = len(out_data) + sign.SigType = SIGN_TYPE_SCHEME[sign_scheme] + sign.HashAlg = HASH_TYPE_VALUE[hash_type] + + bins.extend(bytearray(sign) + out_data) + if inc_key: + key = gen_pub_key (priv_key, pub_key) + bins.extend(key) + + if len(bins) != len(out_data): + gen_file_from_object (out_file, bins) + +def get_key_type (in_key): + + # Check in_key is file or key Id + if not os.path.exists(in_key): + key = bytearray(gen_pub_key (in_key)) + else: + # Check for public key in binary format. + key = bytearray(get_file_data(in_key)) + + pub_key_hdr = PUB_KEY_HDR.from_buffer(key) + if pub_key_hdr.Identifier != b'PUBK': + pub_key = gen_pub_key (in_key) + pub_key_hdr = PUB_KEY_HDR.from_buffer(pub_key) + + key_type = next((key for key, value in PUB_KEY_TYPE.items() if value == pub_key_hdr.KeyType)) + return '%s%d' % (key_type, (pub_key_hdr.KeySize - 4) * 8) + + +def get_auth_hash_type (key_type, sign_scheme): + if key_type == "RSA2048" and sign_scheme == "RSA_PKCS1": + hash_type = 'SHA2_256' + auth_type = 'RSA2048_PKCS1_SHA2_256' + elif key_type == "RSA3072" and sign_scheme == "RSA_PKCS1": + hash_type = 'SHA2_384' + auth_type = 'RSA3072_PKCS1_SHA2_384' + elif key_type == "RSA2048" and sign_scheme == "RSA_PSS": + hash_type = 'SHA2_256' + auth_type = 'RSA2048_PSS_SHA2_256' + elif key_type == "RSA3072" and sign_scheme == "RSA_PSS": + hash_type = 'SHA2_384' + auth_type = 'RSA3072_PSS_SHA2_384' + else: + hash_type = '' + auth_type = '' + return auth_type, hash_type + +def gen_pub_key (in_key, pub_key = None): + + keydata = single_sign_gen_pub_key (in_key, pub_key) + + publickey = PUB_KEY_HDR() + publickey.KeySize = len(keydata) + publickey.KeyType = PUB_KEY_TYPE['RSA'] + + key = bytearray(publickey) + keydata + + if pub_key: + gen_file_from_object (pub_key, key) + + return key + +def decompress (in_file, out_file, tool_dir = ''): + if not os.path.isfile(in_file): + raise Exception ("Invalid input file '%s' !" % in_file) + + # Remove the Lz Header + fi = open(in_file,'rb') + di = bytearray(fi.read()) + fi.close() + + lz_hdr = LZ_HEADER.from_buffer (di) + offset = sizeof (lz_hdr) + if lz_hdr.signature == b"LZDM" or lz_hdr.compressed_len == 0: + fo = open(out_file,'wb') + fo.write(di[offset:offset + lz_hdr.compressed_len]) + fo.close() + return + + temp = os.path.splitext(out_file)[0] + '.tmp' + if lz_hdr.signature == b"LZMA": + alg = "Lzma" + elif lz_hdr.signature == b"LZ4 ": + alg = "Lz4" + else: + raise Exception ("Unsupported compression '%s' !" % lz_hdr.signature) + + fo = open(temp, 'wb') + fo.write(di[offset:offset + lz_hdr.compressed_len]) + fo.close() + + compress_tool = "%sCompress" % alg + if alg == "Lz4": + try: + cmdline = [ + os.path.join (tool_dir, compress_tool), + "-d", + "-o", out_file, + temp] + run_process (cmdline, False, True) + except: + print("Could not find/use CompressLz4 tool, trying with python lz4...") + try: + import lz4.block + if lz4.VERSION != '3.1.1': + print("Recommended lz4 module version is '3.1.1', '%s' is currently installed." % lz4.VERSION) + except ImportError: + print("Could not import lz4, use 'python -m pip install lz4==3.1.1' to install it.") + exit(1) + decompress_data = lz4.block.decompress(get_file_data(temp)) + with open(out_file, "wb") as lz4bin: + lz4bin.write(decompress_data) + else: + cmdline = [ + os.path.join (tool_dir, compress_tool), + "-d", + "-o", out_file, + temp] + run_process (cmdline, False, True) + os.remove(temp) + +def compress (in_file, alg, svn=0, out_path = '', tool_dir = ''): + if not os.path.isfile(in_file): + raise Exception ("Invalid input file '%s' !" % in_file) + + basename, ext = os.path.splitext(os.path.basename (in_file)) + if out_path: + if os.path.isdir (out_path): + out_file = os.path.join(out_path, basename + '.lz') + else: + out_file = os.path.join(out_path) + else: + out_file = os.path.splitext(in_file)[0] + '.lz' + + if alg == "Lzma": + sig = "LZMA" + elif alg == "Tiano": + sig = "LZUF" + elif alg == "Lz4": + sig = "LZ4 " + elif alg == "Dummy": + sig = "LZDM" + else: + raise Exception ("Unsupported compression '%s' !" % alg) + + in_len = os.path.getsize(in_file) + if in_len > 0: + compress_tool = "%sCompress" % alg + if sig == "LZDM": + shutil.copy(in_file, out_file) + compress_data = get_file_data(out_file) + elif sig == "LZ4 ": + try: + cmdline = [ + os.path.join (tool_dir, compress_tool), + "-e", + "-o", out_file, + in_file] + run_process (cmdline, False, True) + compress_data = get_file_data(out_file) + except: + print("Could not find/use CompressLz4 tool, trying with python lz4...") + try: + import lz4.block + if lz4.VERSION != '3.1.1': + print("Recommended lz4 module version is '3.1.1', '%s' is currently installed." % lz4.VERSION) + except ImportError: + print("Could not import lz4, use 'python -m pip install lz4==3.1.1' to install it.") + exit(1) + compress_data = lz4.block.compress(get_file_data(in_file), mode='high_compression') + elif sig == "LZMA": + cmdline = [ + os.path.join (tool_dir, compress_tool), + "-e", + "-o", out_file, + in_file] + run_process (cmdline, False, True) + compress_data = get_file_data(out_file) + else: + compress_data = bytearray() + + lz_hdr = LZ_HEADER () + lz_hdr.signature = sig.encode() + lz_hdr.svn = svn + lz_hdr.compressed_len = len(compress_data) + lz_hdr.length = os.path.getsize(in_file) + data = bytearray () + data.extend (lz_hdr) + data.extend (compress_data) + gen_file_from_object (out_file, data) + + return out_file diff --git a/GenContainer.py b/GenContainer.py new file mode 100755 index 0000000..98c9f1a --- /dev/null +++ b/GenContainer.py @@ -0,0 +1,884 @@ +#!/usr/bin/env python +## @ GenContainer.py +# Tools to operate on a container image +# +# Copyright (c) 2019 - 2022, Intel Corporation. All rights reserved.
+# SPDX-License-Identifier: BSD-2-Clause-Patent +# +## +import sys +import argparse +import re +sys.dont_write_bytecode = True +from ctypes import * +from CommonUtility import * + + + +class COMPONENT_ENTRY (Structure): + _pack_ = 1 + _fields_ = [ + ('name', ARRAY(c_char, 4)), # SBL pod entry name + ('offset', c_uint32), # Component offset in byte from the payload (data) ('size', c_uint32), # Region/Component size in byte + ('size', c_uint32), # Region/Component size in byte + ('attribute', c_uint8), # Attribute: BIT7 Reserved component entry + ('alignment', c_uint8), # This image need to be loaded to memory in (1 << Alignment) address + ('auth_type', c_uint8), # Refer AUTH_TYPE_VALUE: 0 - "NONE"; 1- "SHA2_256"; 2- "SHA2_384"; 3- "RSA2048_PKCS1_SHA2_256"; 4 - RSA3072_PKCS1_SHA2_384; + # 5 - RSA2048_PSS_SHA2_256; 6 - RSA3072_PSS_SHA2_384 + ('hash_size', c_uint8) # Hash data size, it could be image hash or public key hash + ] + + _attr = { + 'RESERVED' : 0x80 + } + + def __new__(cls, buf = None): + if buf is None: + return Structure.__new__(cls) + else: + return cls.from_buffer_copy(buf) + + def __init__(self, buf = None): + if buf is None: + self.hash_data = bytearray() + else: + off = sizeof(COMPONENT_ENTRY) + self.hash_data = bytearray(buf[off : off + self.hash_size]) + self.data = bytearray() + self.auth_data = bytearray() + + +class CONTAINER_HDR (Structure): + _pack_ = 1 + _fields_ = [ + ('signature', ARRAY(c_char, 4)), # Identifies structure + ('version', c_uint8), # Header version + ('svn', c_uint8), # Security version number + ('data_offset', c_uint16), # Offset of payload (data) from header in byte + ('data_size', c_uint32), # Size of payload (data) in byte + ('auth_type', c_uint8), # Refer AUTH_TYPE_VALUE: 0 - "NONE"; 1- "SHA2_256"; 2- "SHA2_384"; 3- "RSA2048_PKCS1_SHA2_256"; 4 - RSA3072_PKCS1_SHA2_384; + # 5 - RSA2048_PSS_SHA2_256; 6 - RSA3072_PSS_SHA2_384 + ('image_type', c_uint8), # 0: Normal + ('flags', c_uint8), # BIT0: monolithic signing + ('entry_count', c_uint8), # Number of entry in the header + ] + + _flags = { + 'MONO_SIGNING' : 0x01 + } + + _image_type = { + 'NORMAL' : 0x00, # Used for boot images in FV, regular ELF, PE32, etc. formats + 'CLASSIC' : 0xF3, # Used for booting Linux with bzImage, cmdline, initrd, etc. + 'MULTIBOOT' : 0xF4, # Multiboot compliant ELF images + } + + def __new__(cls, buf = None): + if buf is None: + return Structure.__new__(cls) + else: + return cls.from_buffer_copy(buf) + + def __init__(self, buf = None): + self.priv_key = '' + self.alignment = 0x1000 + self.auth_data = bytearray() + self.comp_entry = [] + + if buf is not None: + # construct CONTAINER_HDR from existing buffer + offset = sizeof(self) + alignment = None + for i in range(self.entry_count): + component = COMPONENT_ENTRY(buf[offset:]) + if alignment is None: + alignment = 1 << component.alignment + offset += (sizeof(component) + component.hash_size) + comp_offset = component.offset + self.data_offset + lz_hdr = LZ_HEADER.from_buffer(bytearray(buf[comp_offset:comp_offset + sizeof(LZ_HEADER)])) + auth_offset = comp_offset + lz_hdr.compressed_len + sizeof(lz_hdr) + component.data = bytearray (buf[comp_offset:auth_offset]) + auth_offset = get_aligned_value (auth_offset, 4) + auth_size = CONTAINER.get_auth_size (component.auth_type, True) + component.auth_data = bytearray (buf[auth_offset:auth_offset + auth_size]) + self.comp_entry.append (component) + auth_size = CONTAINER.get_auth_size (self.auth_type, True) + auth_offset = get_aligned_value (offset, 4) + self.auth_data = bytearray (buf[auth_offset:auth_offset + auth_size]) + if alignment is not None: + self.alignment = alignment + +class CONTAINER (): + _struct_display_indent = 18 + _auth_type_value = { + "NONE" : 0, + "SHA2_256" : 1, + "SHA2_384" : 2, + "RSA2048_PKCS1_SHA2_256" : 3, + "RSA3072_PKCS1_SHA2_384" : 4, + "RSA2048_PSS_SHA2_256" : 5, + "RSA3072_PSS_SHA2_384" : 6, + } + + _auth_to_hashalg_str = { + "NONE" : "NONE", + "SHA2_256" : "SHA2_256", + "SHA2_384" : "SHA2_384", + "RSA2048_PKCS1_SHA2_256" : "SHA2_256", + "RSA3072_PKCS1_SHA2_384" : "SHA2_384", + "RSA2048_PSS_SHA2_256" : "SHA2_256", + "RSA3072_PSS_SHA2_384" : "SHA2_384", + } + + + _auth_to_signscheme_str = { + "NONE" : "", + "SHA2_256" : "", + "SHA2_384" : "", + "RSA2048_PKCS1_SHA2_256" : "RSA_PKCS1", + "RSA3072_PKCS1_SHA2_384" : "RSA_PKCS1", + "RSA2048_PSS_SHA2_256" : "RSA_PSS", + "RSA3072_PSS_SHA2_384" : "RSA_PSS", + } + + def __init__(self, buf = None): + self.out_dir = '.' + self.input_dir = '.' + self.key_dir = '.' + self.tool_dir = '.' + if buf is None: + self.header = CONTAINER_HDR () + else: + self.header = CONTAINER_HDR (buf) + # Check if image type is valid + image_type_str = CONTAINER.get_image_type_str(self.header.image_type) + + def init_header (self, signature, alignment, image_type = 'NORMAL'): + self.header.signature = signature + self.header.version = 1 + self.header.alignment = alignment + self.header.flags = 0 + if image_type not in CONTAINER_HDR._image_type.keys(): + raise Exception ("Invalid image type '%s' specified !" % image_type) + self.header.image_type = CONTAINER_HDR._image_type[image_type] + + @staticmethod + def get_image_type_str (image_type_val): + try: + image_type_str = next((key for key, value in CONTAINER_HDR._image_type.items() if value == image_type_val)) + except StopIteration: + raise Exception ("Unknown image type value 0x%x in container header !" % image_type_val) + return image_type_str + + @staticmethod + def get_auth_type_val (auth_type_str): + return CONTAINER._auth_type_value[auth_type_str] + + @staticmethod + def get_auth_type_str (auth_type_val): + try: + auth_type_str = next(k for k, v in CONTAINER._auth_type_value.items() if v == auth_type_val) + except StopIteration: + raise Exception ("Unknown auth type value 0x%x !" % auth_type_val) + return auth_type_str + + @staticmethod + def get_auth_size (auth_type, signed = False): + # calculate the length for the required authentication info + if type(auth_type) is type(1): + auth_type_str = CONTAINER.get_auth_type_str (auth_type) + else: + auth_type_str = auth_type + if auth_type_str == 'NONE': + auth_len = 0 + elif auth_type_str.startswith ('RSA'): + auth_len = int(auth_type_str[3:7]) >> 3 + if signed: + auth_len = auth_len * 2 + sizeof(PUB_KEY_HDR) + sizeof(SIGNATURE_HDR) + 4 + elif auth_type_str.startswith ('SHA2_'): + auth_len = int(auth_type_str[5:]) >> 3 + if signed: + auth_len = 0 + else: + raise Exception ("Unsupported authentication type '%s' !" % auth_type) + return auth_len + + @staticmethod + def decode_field (name, val): + # decode auth type into readable string + extra = '' + if name in ['CONTAINER_HDR.auth_type', 'COMPONENT_ENTRY.auth_type']: + auth_type = next(k for k, v in CONTAINER._auth_type_value.items() if v == val) + extra = '%d : %s' % (val, auth_type) + return extra + + @staticmethod + def hex_str (data, name = ''): + # convert bytearray to hex string + dlen = len(data) + if dlen == 0: + hex_str = '' + else: + if dlen <= 16: + hex_str = ' '.join(['%02x' % x for x in data]) + else: + hex_str = ' '.join(['%02x' % x for x in data[:8]]) + \ + ' .... ' + ' '.join(['%02x' % x for x in data[-8:]]) + hex_str = ' %s %s [%s]' % (name, ' ' * (CONTAINER._struct_display_indent - len(name) + 1), hex_str) + if len(data) > 0: + hex_str = hex_str + ' (len=0x%x)' % len(data) + return hex_str + + @staticmethod + def output_struct (obj, indent = 0, plen = 0): + # print out a struct info + body = '' if indent else (' ' * indent + '<%s>:\n') % obj.__class__.__name__ + if plen == 0: + plen = sizeof(obj) + pstr = (' ' * (indent + 1) + '{0:<%d} = {1}\n') % CONTAINER._struct_display_indent + for field in obj._fields_: + key = field[0] + val = getattr(obj, key) + rep = '' + if type(val) is str: + rep = "0x%X ('%s')" % (bytes_to_value(bytearray(val)), val) + elif type(val) in [int]: + rep = CONTAINER.decode_field ('%s.%s' % (obj.__class__.__name__, key), val) + if not rep: + rep = '0x%X' % (val) + else: + rep = str(val) + plen -= sizeof(field[1]) + body += pstr.format(key, rep) + if plen <= 0: + break + return body.strip() + + @staticmethod + def get_pub_key_hash (key, hash_type): + # calculate publish key hash + dh = bytearray (key)[sizeof(PUB_KEY_HDR):] + if hash_type == 'SHA2_256': + return bytearray(hashlib.sha256(dh).digest()) + elif hash_type == 'SHA2_384': + return bytearray(hashlib.sha384(dh).digest()) + else: + raise Exception ("Unsupported hash type in get_pub_key_hash!") + + @staticmethod + def calculate_auth_data (file, auth_type, priv_key, out_dir): + # calculate auth info for a given file + hash_data = bytearray() + auth_data = bytearray() + basename = os.path.basename (file) + if auth_type in ['NONE']: + pass + elif auth_type in ["SHA2_256"]: + data = get_file_data (file) + hash_data.extend (hashlib.sha256(data).digest()) + elif auth_type in ["SHA2_384"]: + data = get_file_data (file) + hash_data.extend (hashlib.sha384(data).digest()) + elif auth_type in ['RSA2048_PKCS1_SHA2_256', 'RSA3072_PKCS1_SHA2_384', 'RSA2048_PSS_SHA2_256', 'RSA3072_PSS_SHA2_384' ]: + auth_type = adjust_auth_type (auth_type, priv_key) + pub_key = os.path.join(out_dir, basename + '.pub') + di = gen_pub_key (priv_key, pub_key) + key_hash = CONTAINER.get_pub_key_hash (di, CONTAINER._auth_to_hashalg_str[auth_type]) + hash_data.extend (key_hash) + out_file = os.path.join(out_dir, basename + '.sig') + rsa_sign_file (priv_key, pub_key, CONTAINER._auth_to_hashalg_str[auth_type], CONTAINER._auth_to_signscheme_str[auth_type], file, out_file, False, True) + auth_data.extend (get_file_data(out_file)) + else: + raise Exception ("Unsupport AuthType '%s' !" % auth_type) + return hash_data, auth_data + + + def set_dir_path(self, out_dir, inp_dir, key_dir, tool_dir): + self.out_dir = out_dir + self.inp_dir = inp_dir + self.key_dir = key_dir + self.tool_dir = tool_dir + + def set_header_flags (self, flags, overwrite = False): + if overwrite: + self.header.flags = flags + else: + self.header.flags |= flags + + def set_header_svn_info (self, svn): + self.header.svn = svn + + def set_header_auth_info (self, auth_type_str = None, priv_key = None): + if priv_key is not None: + self.header.priv_key = priv_key + + if auth_type_str is not None: + self.header.auth_type = CONTAINER.get_auth_type_val (auth_type_str) + auth_size = CONTAINER.get_auth_size (self.header.auth_type, True) + self.header.auth_data = b'\xff' * auth_size + + def get_header_size (self): + length = sizeof (self.header) + for comp in self.header.comp_entry: + length += comp.hash_size + length += sizeof(COMPONENT_ENTRY) * len(self.header.comp_entry) + length += len(self.header.auth_data) + return length + + def get_auth_data (self, comp_file, auth_type_str): + # calculate auth info for a give component file with specified auth type + auth_size = CONTAINER.get_auth_size (auth_type_str, True) + file_data = bytearray(get_file_data (comp_file)) + auth_data = None + hash_data = bytearray() + + if len(file_data) < sizeof (LZ_HEADER): + return file_data, hash_data, auth_data + + lz_header = LZ_HEADER.from_buffer(file_data) + data = bytearray() + if lz_header.signature in LZ_HEADER._compress_alg: + offset = sizeof(lz_header) + get_aligned_value (lz_header.compressed_len) + if len(file_data) == auth_size + offset: + auth_data = file_data[offset:offset+auth_size] + data = file_data[:sizeof(lz_header) + lz_header.compressed_len] + if auth_type_str in ["SHA2_256"]: + hash_data.extend (hashlib.sha256(data).digest()) + if auth_type_str in ["SHA2_384"]: + hash_data.extend (hashlib.sha384(data).digest()) + elif auth_type_str in ['RSA2048', 'RSA3072']: + offset += ((CONTAINER.get_auth_size (auth_type_str))) + key_hash = self.get_pub_key_hash (file_data[offset:]) + hash_data.extend (key_hash) + else: + raise Exception ("Unsupport AuthType '%s' !" % auth_type) + return data, hash_data, auth_data + + def adjust_header (self): + # finalize the container + header = self.header + header.entry_count = len(header.comp_entry) + alignment = header.alignment - 1 + header.data_offset = (self.get_header_size() + alignment) & ~alignment + if header.entry_count > 0: + length = header.comp_entry[-1].offset + header.comp_entry[-1].size + header.data_size = (length + alignment) & ~alignment + else: + header.data_size = 0 + auth_type = self.get_auth_type_str (header.auth_type) + basename = header.signature.decode() + hdr_file = os.path.join(self.out_dir, basename + '.hdr') + hdr_data = bytearray (header) + for component in header.comp_entry: + hdr_data.extend (component) + hdr_data.extend (component.hash_data) + gen_file_from_object (hdr_file, hdr_data) + hash_data, auth_data = CONTAINER.calculate_auth_data (hdr_file, auth_type, header.priv_key, self.out_dir) + if len(auth_data) != len(header.auth_data): + print (len(auth_data) , len(header.auth_data)) + raise Exception ("Unexpected authentication data length for container header !") + header.auth_data = auth_data + + def get_data (self): + # Prepare data buffer + header = self.header + data = bytearray(header) + for component in header.comp_entry: + data.extend (component) + data.extend (component.hash_data) + padding = b'\xff' * get_padding_length (len(data)) + data.extend(padding + header.auth_data) + for component in header.comp_entry: + offset = component.offset + header.data_offset + data.extend (b'\xff' * (offset - len(data))) + comp_data = bytearray(component.data) + padding = b'\xff' * get_padding_length (len(comp_data)) + comp_data.extend (padding + component.auth_data) + if len(comp_data) > component.size: + raise Exception ("Component '%s' needs space 0x%X, but region size is 0x%X !" % (component.name.decode(), len(comp_data), component.size)) + data.extend (comp_data) + offset = header.data_offset + header.data_size + data.extend (b'\xff' * (offset - len(data))) + return data + + def locate_component (self, comp_name): + component = None + for each in self.header.comp_entry: + if each.name.decode() == comp_name.upper(): + component = each + break; + return component + + def dump (self): + print ('%s' % self.output_struct (self.header)) + print (self.hex_str (self.header.auth_data, 'auth_data')) + for component in self.header.comp_entry: + print ('%s' % self.output_struct (component)) + print (self.hex_str (component.hash_data, 'hash_data')) + print (self.hex_str (component.auth_data, 'auth_data')) + print (self.hex_str (component.data, 'data') + ' %s' % str(component.data[:4].decode())) + + def create (self, layout): + + # for monolithic signing, need to add a reserved _SG_ entry to hold the auth info + mono_sig = '_SG_' + is_mono_signing = True if layout[-1][0] == mono_sig else False + + # get the first entry in layout as CONTAINER_HDR + container_sig, container_file, image_type, auth_type, key_file, alignment, region_size, svn = layout[0] + + if alignment == 0: + alignment = 0x1000 + + if auth_type == '': + auth_type = 'NONE' + + if image_type == '': + image_type = 'NORMAL' + + if container_file == '': + container_file = container_sig + '.bin' + key_path = os.path.join(self.key_dir, key_file) + if os.path.isfile (key_path): + auth_type = adjust_auth_type (auth_type, key_path) + + # build header + self.init_header (container_sig.encode(), alignment, image_type) + self.set_header_auth_info (auth_type, key_path) + self.set_header_svn_info (svn) + + name_set = set() + is_last_entry = False + for name, file, compress_alg, auth_type, key_file, alignment, region_size, svn in layout[1:]: + if is_last_entry: + raise Exception ("'%s' must be the last entry in layout for monolithic signing!" % mono_sig) + if compress_alg == '': + compress_alg = 'Dummy' + if auth_type == '': + auth_type = 'NONE' + + # build a component entry + component = COMPONENT_ENTRY () + component.name = name.encode() + if alignment == 0: + component.alignment = self.header.alignment.bit_length() - 1 + else: + component.alignment = alignment.bit_length() - 1 + component.attribute = 0 + component.auth_type = self.get_auth_type_val (auth_type) + key_file = os.path.join (self.key_dir, key_file) + if file: + if os.path.isabs(file): + in_file = file + else: + for tst in [self.inp_dir, self.out_dir]: + in_file = os.path.join(tst, file) + if os.path.isfile(in_file): + break + if not os.path.isfile(in_file): + raise Exception ("Component file path '%s' is invalid !" % file) + else: + in_file = os.path.join(self.out_dir, component.name.decode() + '.bin') + gen_file_with_size (in_file, 0) + if component.name == mono_sig.encode(): + component.attribute = COMPONENT_ENTRY._attr['RESERVED'] + compress_alg = 'Dummy' + is_last_entry = True + + # compress the component + lz_file = compress (in_file, compress_alg, svn, self.out_dir, self.tool_dir) + component.data = bytearray(get_file_data (lz_file)) + + # calculate the component auth info + component.hash_data, component.auth_data = CONTAINER.calculate_auth_data (lz_file, auth_type, key_file, self.out_dir) + component.hash_size = len(component.hash_data) + if region_size == 0: + # arrange the region size automatically + region_size = len(component.data) + region_size = get_aligned_value (region_size, 4) + len(component.auth_data) + if is_mono_signing: + region_size = get_aligned_value (region_size, self.header.alignment) + else: + region_size = get_aligned_value (region_size, (1 << component.alignment)) + component.size = region_size + name_set.add (component.name) + self.header.comp_entry.append (component) + + if len(name_set) != len(self.header.comp_entry): + raise Exception ("Found duplicated component names in a container !") + + # calculate the component offset based on alignment requirement + base_offset = None + offset = self.get_header_size () + for component in self.header.comp_entry: + alignment = (1 << component.alignment) - 1 + next_offset = (offset + alignment) & ~alignment + if is_mono_signing and (next_offset - offset >= sizeof(LZ_HEADER)): + offset = next_offset - sizeof(LZ_HEADER) + else: + offset = next_offset + if base_offset is None: + base_offset = offset + component.offset = offset - base_offset + offset += component.size + + if is_mono_signing: + # for monolithic signing, set proper flags and update header + self.set_header_flags (CONTAINER_HDR._flags['MONO_SIGNING']) + self.adjust_header () + + # update auth info for last _SG_ entry + data = self.get_data ()[self.header.data_offset:] + pods_comp = self.header.comp_entry[-1] + pods_data = data[:pods_comp.offset] + gen_file_from_object (in_file, pods_data) + pods_comp.hash_data, pods_comp.auth_data = CONTAINER.calculate_auth_data (in_file, auth_type, key_file, self.out_dir) + + self.adjust_header () + data = self.get_data () + + out_file = os.path.join(self.out_dir, container_file) + gen_file_from_object (out_file, data) + + return out_file + + def replace (self, comp_name, comp_file, comp_alg, key_file, svn, new_name): + if self.header.flags & CONTAINER_HDR._flags['MONO_SIGNING']: + raise Exception ("Counld not replace component for monolithically signed container!") + + component = self.locate_component (comp_name) + if not component: + raise Exception ("Counld not locate component '%s' in container !" % comp_name) + if comp_alg == '': + # reuse the original compression alg + lz_header = LZ_HEADER.from_buffer(component.data) + comp_alg = LZ_HEADER._compress_alg[lz_header.signature] + else: + comp_alg = comp_alg[0].upper() + comp_alg[1:] + + # verify the new component hash does match the hash stored in the container header + auth_type_str = self.get_auth_type_str (component.auth_type) + data, hash_data, auth_data = self.get_auth_data (comp_file, auth_type_str) + if auth_data is None: + lz_file = compress (comp_file, comp_alg, svn, self.out_dir, self.tool_dir) + if auth_type_str.startswith ('RSA') and key_file == '': + raise Exception ("Signing key needs to be specified !") + hash_data, auth_data = CONTAINER.calculate_auth_data (lz_file, auth_type_str, key_file, self.out_dir) + data = get_file_data (lz_file) + component.data = bytearray(data) + component.auth_data = bytearray(auth_data) + if component.hash_data != bytearray(hash_data): + raise Exception ('Compoent hash does not match the one stored in container header !') + + # create the final output file + data = self.get_data () + if new_name == '': + new_name = self.header.signature + '.bin' + out_file = os.path.join(self.out_dir, new_name) + gen_file_from_object (out_file, data) + + return out_file + + def extract (self, name = '', file_path = ''): + if name == '': + # extract all components inside a container + # so creat a layout file first + if file_path == '': + file_name = self.header.signature + '.bin' + else: + file_name = os.path.splitext(os.path.basename (file_path))[0] + '.bin' + + # create header entry + auth_type_str = self.get_auth_type_str (self.header.auth_type) + match = re.match('RSA(\d+)_', auth_type_str) + if match: + key_file = 'KEY_ID_CONTAINER_RSA%s' % match.group(1) + else: + key_file = '' + alignment = self.header.alignment + image_type_str = CONTAINER.get_image_type_str(self.header.image_type) + header = ['%s' % self.header.signature.decode(), file_name, image_type_str, auth_type_str, key_file] + layout = [(' Name', ' ImageFile', ' CompAlg', ' AuthType', ' KeyFile', ' Alignment', ' Size', 'Svn')] + layout.append(tuple(["'%s'" % x for x in header] + ['0x%x' % alignment, '0', '0x%x' % self.header.svn])) + # create component entry + for component in self.header.comp_entry: + auth_type_str = self.get_auth_type_str (component.auth_type) + match = re.match('RSA(\d+)_', auth_type_str) + if match: + key_file = 'KEY_ID_CONTAINER_COMP_RSA%s' % match.group(1) + else: + key_file = '' + lz_header = LZ_HEADER.from_buffer(component.data) + alg = LZ_HEADER._compress_alg[lz_header.signature] + svn = lz_header.svn + if component.attribute & COMPONENT_ENTRY._attr['RESERVED']: + comp_file = '' + else: + comp_file = component.name.decode() + '.bin' + comp = [component.name.decode(), comp_file, alg, auth_type_str, key_file] + layout.append(tuple(["'%s'" % x for x in comp] + ['0x%x' % (1 << component.alignment), '0x%x' % component.size, '0x%x' % svn])) + + # write layout file + layout_file = os.path.join(self.out_dir, self.header.signature.decode() + '.txt') + fo = open (layout_file, 'w') + fo.write ('# Container Layout File\n#\n') + for idx, each in enumerate(layout): + line = ' %-6s, %-16s, %-10s, %-24s, %-32s, %-10s, %-10s, %-10s' % each + if idx == 0: + line = '# %s\n' % line + else: + line = ' (%s),\n' % line + fo.write (line) + if idx == 0: + line = '# %s\n' % ('=' * 136) + fo.write (line) + fo.close() + + for component in self.header.comp_entry: + if component.attribute & COMPONENT_ENTRY._attr['RESERVED']: + continue + # creat individual component region and image binary + if (component.name.decode() == name) or (name == ''): + basename = os.path.join(self.out_dir, '%s' % component.name.decode()) + sig_file = basename + '.rgn' + sig_data = component.data + b'\xff' * get_padding_length (len(component.data)) + component.auth_data + gen_file_from_object (sig_file, sig_data) + + bin_file = basename + '.bin' + lz_header = LZ_HEADER.from_buffer(component.data) + signature = lz_header.signature + if signature in [b'LZDM']: + offset = sizeof(lz_header) + data = component.data[offset : offset + lz_header.compressed_len] + gen_file_from_object (bin_file, data) + elif signature in [b'LZMA', b'LZ4 ']: + decompress (sig_file, bin_file, self.tool_dir) + else: + raise Exception ("Unknown LZ format!") + +def gen_container_bin (container_list, out_dir, inp_dir, key_dir = '.', tool_dir = ''): + for each in container_list: + container = CONTAINER () + container.set_dir_path (out_dir, inp_dir, key_dir, tool_dir) + out_file = container.create (each) + print ("Container '%s' was created successfully at: \n %s" % (container.header.signature.decode(), out_file)) + +def adjust_auth_type (auth_type_str, key_path): + if os.path.exists(key_path): + sign_key_type = get_key_type(key_path) + if auth_type_str != '': + sign_scheme = CONTAINER._auth_to_signscheme_str[auth_type_str] + else: + # Set to default signing scheme if auth type is generated. + sign_scheme = 'RSA_PSS' + auth_type, hash_type = get_auth_hash_type (sign_key_type, sign_scheme) + if auth_type_str and (auth_type != auth_type_str): + print ("Override auth type to '%s' in order to match the private key type !" % auth_type) + auth_type_str = auth_type + + return auth_type_str + +def gen_layout (comp_list, img_type, auth_type_str, svn, out_file, key_dir, key_file): + hash_type = CONTAINER._auth_to_hashalg_str[auth_type_str] if auth_type_str else '' + auth_type = auth_type_str + key_path = os.path.join(key_dir, key_file) + auth_type = adjust_auth_type (auth_type, key_path) + if auth_type == '': + raise Exception ("'auth' parameter is expected !") + + # prepare the layout from individual components from '-cl' + if img_type not in CONTAINER_HDR._image_type.keys(): + raise Exception ("Invalid Container Type '%s' !" % img_type) + layout = "('BOOT', '%s', '%s', '%s' , '%s', 0x10, 0, %s),\n" % (out_file, img_type, auth_type, key_file, svn) + end_layout = "('_SG_', '', 'Dummy', '%s', '', 0, 0, %s)," % (hash_type, svn) + for idx, each in enumerate(comp_list): + parts = each.split(':') + comp_name = parts[0] + if len(comp_name) != 4: + raise Exception ("Invalid component string format '%s' !" % each) + + if (len(parts)) > 2: + comp_file = ':'.join(parts[1:2]) + com_svn = ':'.join(parts[2:]) + else: + comp_file = ':'.join(parts[1:]) + com_svn = 0 # set to default svn + + if comp_name == 'INRD': + align = 0x1000 + else: + align = 0 + layout += "('%s', '%s', 'Dummy', 'NONE', '', %s, 0, %s),\n" % (comp_name, comp_file, align, com_svn) + layout += end_layout + return layout + +def create_container (args): + layout = "" + # if '-l', get the layout content directly + # if '-cl' prepare the layout + + #extract key dir and file + key_path = os.path.abspath(args.key_path) + if os.path.isdir(key_path): + key_dir = key_path + key_file = '' + else: + key_dir = os.path.dirname(key_path) + key_file = os.path.basename(key_path) + + #extract out dir and file + out_path = os.path.abspath(args.out_path) + if os.path.isdir(out_path): + out_dir = out_path + out_file = '' + else: + out_dir = os.path.dirname(out_path) + out_file = os.path.basename(out_path) + + if args.layout: + # Using layout file + layout = get_file_data(args.layout, 'r') + else: + # Using component list + if not key_file: + raise Exception ("key_path expects a key file path !") + layout = gen_layout (args.comp_list, args.img_type, args.auth, args.svn, out_file, key_dir, key_file) + container_list = eval ('[[%s]]' % layout.replace('\\', '/')) + + comp_dir = os.path.abspath(args.comp_dir) + if not os.path.isdir(comp_dir): + raise Exception ("'comp_dir' expects a directory path !") + tool_dir = os.path.abspath(args.tool_dir) + if not os.path.isdir(tool_dir): + raise Exception ("'tool_dir' expects a directory path !") + + if out_file: + # override the output file name + hdr_entry = list (container_list[0][0]) + hdr_entry[1] = out_file + container_list[0][0] = tuple(hdr_entry) + + if args.layout and args.auth: + # override auth + hdr_entry = list (container_list[0][0]) + hdr_entry[3] = args.auth + container_list[0][0] = tuple(hdr_entry) + + gen_container_bin (container_list, out_dir, comp_dir, key_dir, tool_dir) + +def extract_container (args): + tool_dir = args.tool_dir if args.tool_dir else '.' + data = get_file_data (args.image) + container = CONTAINER (data) + container.set_dir_path (args.out_dir, '.', '.', tool_dir) + container.extract (args.comp_name, args.image) + print ("Components were extraced successfully at:\n %s" % args.out_dir) + +def replace_component (args): + tool_dir = args.tool_dir if args.tool_dir else '.' + data = get_file_data (args.image) + container = CONTAINER (data) + out_path = os.path.abspath(args.out_image) + out_dir = os.path.dirname(out_path) + out_file = os.path.basename(out_path) + container.set_dir_path (out_dir, '.', '.', tool_dir) + file = container.replace (args.comp_name, args.comp_file, args.compress, args.key_file, args.svn, out_file) + print ("Component '%s' was replaced successfully at:\n %s" % (args.comp_name, file)) + +def sign_component (args): + compress_alg = args.compress + compress_alg = compress_alg[0].upper() + compress_alg[1:] + + #extract out dir and file + sign_file = os.path.abspath(args.out_file) + out_dir = os.path.dirname(sign_file) + + lz_file = compress (args.comp_file, compress_alg, args.svn, out_dir, args.tool_dir) + data = bytearray(get_file_data (lz_file)) + hash_data, auth_data = CONTAINER.calculate_auth_data (lz_file, args.auth, args.key_file, out_dir) + + data.extend (b'\xff' * get_padding_length(len(data))) + data.extend (auth_data) + gen_file_from_object (sign_file, data) + print ("Component file was signed successfully at:\n %s" % sign_file) + +def display_container (args): + data = get_file_data (args.image) + container = CONTAINER (data) + container.dump () + +def main(): + parser = argparse.ArgumentParser() + sub_parser = parser.add_subparsers(help='command') + + # Command for display + cmd_display = sub_parser.add_parser('view', help='display a container image') + cmd_display.add_argument('-i', dest='image', type=str, required=True, help='Container input image') + cmd_display.set_defaults(func=display_container) + + # Command for create + cmd_display = sub_parser.add_parser('create', help='create a container image') + group = cmd_display.add_mutually_exclusive_group (required=True) + # '-l' or '-cl', one of them is mandatory + group.add_argument('-l', dest='layout', type=str, help='Container layout input file if no -cl') + group.add_argument('-cl', dest='comp_list',nargs='+', help='List of each component files, following XXXX:FileName format') + cmd_display.add_argument('-t', dest='img_type', type=str, default='CLASSIC', help='Container Image Type : [NORMAL, CLASSIC, MULTIBOOT]') + cmd_display.add_argument('-o', dest='out_path', type=str, default='.', help='Container output directory/file') + cmd_display.add_argument('-k', dest='key_path', type=str, default='', help='Input key directory/file. Use key directoy path when container layout -l option is used \ + Use Key Id or key file path when component files with -cl option is specified') + cmd_display.add_argument('-a', dest='auth', choices=['SHA2_256', 'SHA2_384', 'RSA2048_PKCS1_SHA2_256', + 'RSA3072_PKCS1_SHA2_384', 'RSA2048_PSS_SHA2_256', 'RSA3072_PSS_SHA2_384', 'NONE'], default='', help='authentication algorithm') + cmd_display.add_argument('-cd', dest='comp_dir', type=str, default='', help='Componet image input directory') + cmd_display.add_argument('-td', dest='tool_dir', type=str, default='', help='Compression tool directory') + cmd_display.add_argument('-s', dest='svn', type=int, default=0, help='Security version number for Container header') + cmd_display.set_defaults(func=create_container) + + # Command for extract + cmd_display = sub_parser.add_parser('extract', help='extract a component image') + cmd_display.add_argument('-i', dest='image', type=str, required=True, help='Container input image path') + cmd_display.add_argument('-n', dest='comp_name', type=str, default='', help='Component name to extract') + cmd_display.add_argument('-od', dest='out_dir', type=str, default='.', help='Output directory') + cmd_display.add_argument('-td', dest='tool_dir', type=str, default='', help='Compression tool directory') + cmd_display.set_defaults(func=extract_container) + + # Command for replace + cmd_display = sub_parser.add_parser('replace', help='replace a component image') + cmd_display.add_argument('-i', dest='image', type=str, required=True, help='Container input image path') + cmd_display.add_argument('-o', dest='out_image', type=str, default='', help='Container new output image path') + cmd_display.add_argument('-n', dest='comp_name', type=str, required=True, help='Component name to replace') + cmd_display.add_argument('-f', dest='comp_file', type=str, required=True, help='Component input file path') + cmd_display.add_argument('-c', dest='compress', choices=['lz4', 'lzma', 'dummy'], default='dummy', help='compression algorithm') + cmd_display.add_argument('-k', dest='key_file', type=str, default='', help='Key Id or Private key file path to sign component') + cmd_display.add_argument('-td', dest='tool_dir', type=str, default='', help='Compression tool directory') + cmd_display.add_argument('-s', dest='svn', type=int, default=0, help='Security version number for Component') + cmd_display.set_defaults(func=replace_component) + + # Command for sign + cmd_display = sub_parser.add_parser('sign', help='compress and sign a component image') + cmd_display.add_argument('-f', dest='comp_file', type=str, required=True, help='Component input file path') + cmd_display.add_argument('-o', dest='out_file', type=str, default='', help='Signed output image path') + cmd_display.add_argument('-c', dest='compress', choices=['lz4', 'lzma', 'dummy'], default='dummy', help='compression algorithm') + cmd_display.add_argument('-a', dest='auth', choices=['SHA2_256', 'SHA2_384', 'RSA2048_PKCS1_SHA2_256', + 'RSA3072_PKCS1_SHA2_384', 'RSA2048_PSS_SHA2_256', 'RSA3072_PSS_SHA2_384', 'NONE'], default='NONE', help='authentication algorithm') + cmd_display.add_argument('-k', dest='key_file', type=str, default='', help='Key Id or Private key file path to sign component') + cmd_display.add_argument('-td', dest='tool_dir', type=str, default='', help='Compression tool directory') + cmd_display.add_argument('-s', dest='svn', type=int, default=0, help='Security version number for Component') + cmd_display.set_defaults(func=sign_component) + + # Parse arguments and run sub-command + args = parser.parse_args() + try: + func = args.func + except AttributeError: + parser.error("too few arguments") + + # Additional check + if args.func == sign_component: + if args.auth.startswith('RSA') and args.key_file == '': + parser.error("the following arguments are required: -k") + + func(args) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/SingleSign.py b/SingleSign.py new file mode 100644 index 0000000..5baacbd --- /dev/null +++ b/SingleSign.py @@ -0,0 +1,301 @@ +#!/usr/bin/env python +## @ SingleSign.py +# Single signing script +# +# Copyright (c) 2020, Intel Corporation. All rights reserved.
+# SPDX-License-Identifier: BSD-2-Clause-Patent +# +## + +## +# Import Modules +# +import os +import sys +import re +import shutil +import subprocess +import struct +import hashlib +import string + +SIGNING_KEY = { + # Key Id | Key File Name start | + # ================================================================= + # KEY_ID_MASTER is used for signing Slimboot Key Hash Manifest container (KEYH Component) + "KEY_ID_MASTER_RSA2048" : "MasterTestKey_Priv_RSA2048.pem", + "KEY_ID_MASTER_RSA3072" : "MasterTestKey_Priv_RSA3072.pem", + + # KEY_ID_CFGDATA is used for signing external Config data blob) + "KEY_ID_CFGDATA_RSA2048" : "ConfigTestKey_Priv_RSA2048.pem", + "KEY_ID_CFGDATA_RSA3072" : "ConfigTestKey_Priv_RSA3072.pem", + + # KEY_ID_FIRMWAREUPDATE is used for signing capsule firmware update image) + "KEY_ID_FIRMWAREUPDATE_RSA2048" : "FirmwareUpdateTestKey_Priv_RSA2048.pem", + "KEY_ID_FIRMWAREUPDATE_RSA3072" : "FirmwareUpdateTestKey_Priv_RSA3072.pem", + + # KEY_ID_CONTAINER is used for signing container header with mono signature + "KEY_ID_CONTAINER_RSA2048" : "ContainerTestKey_Priv_RSA2048.pem", + "KEY_ID_CONTAINER_RSA3072" : "ContainerTestKey_Priv_RSA3072.pem", + + # CONTAINER_COMP1_KEY_ID is used for signing container components + "KEY_ID_CONTAINER_COMP_RSA2048" : "ContainerCompTestKey_Priv_RSA2048.pem", + "KEY_ID_CONTAINER_COMP_RSA3072" : "ContainerCompTestKey_Priv_RSA3072.pem", + + # KEY_ID_OS1_PUBLIC, KEY_ID_OS2_PUBLIC is used for referencing Boot OS public keys + "KEY_ID_OS1_PUBLIC_RSA2048" : "OS1_TestKey_Pub_RSA2048.pem", + "KEY_ID_OS1_PUBLIC_RSA3072" : "OS1_TestKey_Pub_RSA3072.pem", + + "KEY_ID_OS2_PUBLIC_RSA2048" : "OS2_TestKey_Pub_RSA2048.pem", + "KEY_ID_OS2_PUBLIC_RSA3072" : "OS2_TestKey_Pub_RSA3072.pem", + + } + +MESSAGE_SBL_KEY_DIR = ( + "!!! PRE-REQUISITE: Path to SBL_KEY_DIR has to be set with SBL KEYS DIRECTORY !!! \n" + "!!! Generate keys using GenerateKeys.py available in BootloaderCorePkg/Tools directory !!! \n" + "!!! Run $python BootloaderCorePkg/Tools/GenerateKeys.py -k $PATH_TO_SBL_KEY_DIR !!!\n" + "!!! Set SBL_KEY_DIR environ with path to SBL KEYS DIR !!!\n" + "!!! Windows $set SBL_KEY_DIR=$PATH_TO_SBL_KEY_DIR !!!\n" + "!!! Linux $export SBL_KEY_DIR=$PATH_TO_SBL_KEY_DIR !!!\n" + ) + +def get_openssl_path (): + if os.name == 'nt': + if 'OPENSSL_PATH' not in os.environ: + openssl_dir = "C:\\Openssl\\bin\\" + if os.path.exists (openssl_dir): + os.environ['OPENSSL_PATH'] = openssl_dir + else: + os.environ['OPENSSL_PATH'] = "C:\\Openssl\\" + if 'OPENSSL_CONF' not in os.environ: + openssl_cfg = "C:\\Openssl\\openssl.cfg" + if os.path.exists(openssl_cfg): + os.environ['OPENSSL_CONF'] = openssl_cfg + openssl = os.path.join(os.environ.get ('OPENSSL_PATH', ''), 'openssl.exe') + else: + # Get openssl path for Linux cases + openssl = shutil.which('openssl') + + return openssl + +def run_process (arg_list, print_cmd = False, capture_out = False): + sys.stdout.flush() + if print_cmd: + print (' '.join(arg_list)) + + exc = None + result = 0 + output = '' + try: + if capture_out: + output = subprocess.check_output(arg_list).decode() + else: + result = subprocess.call (arg_list) + except Exception as ex: + result = 1 + exc = ex + + if result: + if not print_cmd: + print ('Error in running process:\n %s' % ' '.join(arg_list)) + if exc is None: + sys.exit(1) + else: + raise exc + + return output + +def check_file_pem_format (priv_key): + # Check for file .pem format + key_name = os.path.basename(priv_key) + if os.path.splitext(key_name)[1] == ".pem": + return True + else: + return False + +def get_key_id (priv_key): + # Extract base name if path is provided. + key_name = os.path.basename(priv_key) + # Check for KEY_ID in key naming. + if key_name.startswith('KEY_ID'): + return key_name + else: + return None + +def get_sbl_key_dir (): + # Check Key store setting SBL_KEY_DIR path + if 'SBL_KEY_DIR' not in os.environ: + raise Exception ("ERROR: SBL_KEY_DIR is not defined. Set SBL_KEY_DIR with SBL Keys directory!!\n" + + MESSAGE_SBL_KEY_DIR) + + sbl_key_dir = os.environ.get('SBL_KEY_DIR') + if not os.path.exists(sbl_key_dir): + raise Exception (("ERROR:SBL_KEY_DIR set %s is not valid. Set the correct SBL_KEY_DIR path !!\n" + + MESSAGE_SBL_KEY_DIR) % sbl_key_dir) + else: + return sbl_key_dir + +def get_key_from_store (in_key): + + #Check in_key is path to key + if os.path.exists(in_key): + return in_key + + # Get Slimboot key dir path + sbl_key_dir = get_sbl_key_dir() + + # Extract if in_key is key_id + priv_key = get_key_id (in_key) + if priv_key is not None: + if (priv_key in SIGNING_KEY): + # Generate key file name from key id + priv_key_file = SIGNING_KEY[priv_key] + else: + raise Exception('KEY_ID %s is not found in supported KEY IDs!!' % priv_key) + elif check_file_pem_format(in_key) == True: + # check if file name is provided in pem format + priv_key_file = in_key + else: + priv_key_file = None + raise Exception('key provided %s is not valid!' % in_key) + + # Create a file path + # Join Key Dir and priv_key_file + try: + priv_key = os.path.join (sbl_key_dir, priv_key_file) + except: + raise Exception('priv_key is not found %s!' % priv_key) + + # Check for priv_key construted based on KEY ID exists in specified path + if not os.path.isfile(priv_key): + raise Exception (("!!! ERROR: Key file corresponding to '%s' do not exist in Sbl key directory at '%s' !!! \n" + MESSAGE_SBL_KEY_DIR) % (in_key, sbl_key_dir)) + + return priv_key + +# +# Sign an file using openssl +# +# priv_key [Input] Key Id or Path to Private key +# hash_type [Input] Signing hash +# sign_scheme[Input] Sign/padding scheme +# in_file [Input] Input file to be signed +# out_file [Input/Output] Signed data file +# + +def single_sign_file (priv_key, hash_type, sign_scheme, in_file, out_file): + + _hash_type_string = { + "SHA2_256" : 'sha256', + "SHA2_384" : 'sha384', + "SHA2_512" : 'sha512', + } + + _hash_digest_Size = { + # Hash_string : Hash_Size + "SHA2_256" : 32, + "SHA2_384" : 48, + "SHA2_512" : 64, + "SM3_256" : 32, + } + + _sign_scheme_string = { + "RSA_PKCS1" : 'pkcs1', + "RSA_PSS" : 'pss', + } + + priv_key = get_key_from_store(priv_key) + + # Temporary files to store hash generated + hash_file_tmp = out_file+'.hash.tmp' + hash_file = out_file+'.hash' + + # Generate hash using openssl dgst in hex format + cmdargs = [get_openssl_path(), 'dgst', '-'+'%s' % _hash_type_string[hash_type], '-out', '%s' % hash_file_tmp, '%s' % in_file] + run_process (cmdargs) + + # Extract hash form dgst command output and convert to ascii + with open(hash_file_tmp, 'r') as fin: + hashdata = fin.read() + fin.close() + + try: + hashdata = hashdata.rsplit('=', 1)[1].strip() + except: + raise Exception('Hash Data not found for signing!') + + if len(hashdata) != (_hash_digest_Size[hash_type] * 2): + raise Exception('Hash Data size do match with for hash type!') + + hashdata_bytes = bytearray.fromhex(hashdata) + open (hash_file, 'wb').write(hashdata_bytes) + + print ("Key used for Singing %s !!" % priv_key) + + # sign using Openssl pkeyutl + cmdargs = [get_openssl_path(), 'pkeyutl', '-sign', '-in', '%s' % hash_file, '-inkey', '%s' % priv_key, + '-out', '%s' % out_file, '-pkeyopt', 'digest:%s' % _hash_type_string[hash_type], + '-pkeyopt', 'rsa_padding_mode:%s' % _sign_scheme_string[sign_scheme]] + + run_process (cmdargs) + + return + +# +# Extract public key using openssl +# +# in_key [Input] Private key or public key in pem format +# pub_key_file [Input/Output] Public Key to a file +# +# return keydata (mod, exp) in bin format +# + +def single_sign_gen_pub_key (in_key, pub_key_file = None): + + in_key = get_key_from_store(in_key) + + # Expect key to be in PEM format + is_prv_key = False + cmdline = [get_openssl_path(), 'rsa', '-pubout', '-text', '-noout', '-in', '%s' % in_key] + # Check if it is public key or private key + text = open(in_key, 'r').read() + if '-BEGIN RSA PRIVATE KEY-' in text or '-BEGIN PRIVATE KEY-' in text: + is_prv_key = True + elif '-BEGIN PUBLIC KEY-' in text: + cmdline.extend (['-pubin']) + else: + raise Exception('Unknown key format "%s" !' % in_key) + + if pub_key_file: + cmdline.extend (['-out', '%s' % pub_key_file]) + capture = False + else: + capture = True + + output = run_process (cmdline, capture_out = capture) + if not capture: + output = text = open(pub_key_file, 'r').read() + data = output.replace('\r', '') + data = data.replace('\n', '') + data = data.replace(' ', '') + + # Extract the modulus + if is_prv_key: + match = re.search('modulus(.*)publicExponent:\s+(\d+)\s+', data) + else: + match = re.search('Modulus(?:.*?):(.*)Exponent:\s+(\d+)\s+', data) + if not match: + raise Exception('Public key not found!') + modulus = match.group(1).replace(':', '') + exponent = int(match.group(2)) + + mod = bytearray.fromhex(modulus) + # Remove the '00' from the front if the MSB is 1 + if mod[0] == 0 and (mod[1] & 0x80): + mod = mod[1:] + exp = bytearray.fromhex('{:08x}'.format(exponent)) + + keydata = mod + exp + + return keydata + diff --git a/core/abl_executable.mk b/core/abl_executable.mk new file mode 100755 index 0000000..3fa5f97 --- /dev/null +++ b/core/abl_executable.mk @@ -0,0 +1,55 @@ +ifeq ($(strip $(LOCAL_MODULE_CLASS)),) +LOCAL_MODULE_CLASS := ABL +endif + +ifeq ($(strip $(LOCAL_MODULE_SUFFIX)),) +LOCAL_MODULE_SUFFIX := .abl +endif + +ifeq ($(strip $(LOCAL_MODULE_PATH)),) +LOCAL_MODULE_PATH := $(PRODUCT_OUT)/abl +endif + +LOCAL_CC := $(IAFW_CC) +LOCAL_CLANG := true +LOCAL_SANITIZE := never +LOCAL_NO_DEFAULT_COMPILER_FLAGS := true +LOCAL_CFLAGS += $(TARGET_IAFW_GLOBAL_CFLAGS) +LOCAL_ASFLAGS += $(TARGET_IAFW_ASFLAGS) +LOCAL_LDFLAGS := $(TARGET_IAFW_GLOBAL_LDFLAGS) -static \ + -T $(TARGET_ABL_LDS) $(LOCAL_LDFLAGS) +# If kernel enforce superpages the .text section gets aligned at +# offset 0x200000 which break multiboot compliance. +LOCAL_LDFLAGS += -z max-page-size=0x1000 +LOCAL_ABL_LDFALGS := $(LOCAL_LDFLAGS) +LOCAL_OBJCOPY_FLAGS := $(TARGET_IAFW_GLOBAL_OBJCOPY_FLAGS) $(LOCAL_OBJCOPY_FLAGS) + +skip_build_from_source := +ifdef LOCAL_PREBUILT_MODULE_FILE +ifeq (,$(call if-build-from-source,$(LOCAL_MODULE),$(LOCAL_PATH))) +include $(BUILD_SYSTEM)/prebuilt_internal.mk +skip_build_from_source := true +endif +endif + +ifndef skip_build_from_source + +ifdef LOCAL_IS_HOST_MODULE +$(error This file should not be used to build host binaries. Included by (or near) $(lastword $(filter-out config/%,$(MAKEFILE_LIST)))) +endif + +WITHOUT_LIBCOMPILER_RT := true +include $(BUILD_SYSTEM)/binary.mk +WITHOUT_LIBCOMPILER_RT := + +LIBPAYLOAD_CRT0_LIB := $(call intermediates-dir-for,STATIC_LIBRARIES,$(LIBPAYLOAD_CRT0))/$(LIBPAYLOAD_CRT0).a +all_objects += $(LIBPAYLOAD_CRT0_LIB) + +$(LOCAL_BUILT_MODULE): PRIVATE_OBJCOPY_FLAGS := $(LOCAL_OBJCOPY_FLAGS) + +#$(LOCAL_BUILT_MODULE): $(all_objects) $(all_libraries) $(ABLIMAGE) $(ABLSIGN) +$(LOCAL_BUILT_MODULE): $(all_objects) $(all_libraries) + $(call transform-o-to-sbl-executable,$(LOCAL_ABL_LDFALGS)) + +endif # skip_build_from_source + diff --git a/core/definitions.mk b/core/definitions.mk index 5a4a9f8..c31fe1d 100755 --- a/core/definitions.mk +++ b/core/definitions.mk @@ -5,6 +5,7 @@ IAFW_BUILD_SYSTEM := $(INTEL_PATH_BUILD)/core BUILD_EFI_STATIC_LIBRARY := $(IAFW_BUILD_SYSTEM)/iafw_static_library.mk BUILD_IAFW_STATIC_LIBRARY := $(IAFW_BUILD_SYSTEM)/iafw_static_library.mk BUILD_EFI_EXECUTABLE := $(IAFW_BUILD_SYSTEM)/efi_executable.mk +BUILD_ABL_EXECUTABLE := $(IAFW_BUILD_SYSTEM)/abl_executable.mk # Override default definition CLEAR_VARS := $(IAFW_BUILD_SYSTEM)/clear_vars.mk @@ -15,19 +16,36 @@ GENERATE_VERITY_KEY := $(HOST_OUT_EXECUTABLES)/generate_verity_key$(HOST_EXECUTA OPENSSL := openssl SBSIGN := sbsign MKDOSFS := mkdosfs -MKEXT2IMG := $(HOST_OUT_EXECUTABLES)/mkext2img -DUMPEXT2IMG := $(HOST_OUT_EXECUTABLES)/dumpext2img +#MKEXT2IMG := $(HOST_OUT_EXECUTABLES)/mkext2img +#DUMPEXT2IMG := $(HOST_OUT_EXECUTABLES)/dumpext2img MCOPY := mcopy SESL := sign-efi-sig-list$(HOST_EXECUTABLE_SUFFIX) CTESL := cert-to-efi-sig-list$(HOST_EXECUTABLE_SUFFIX) IASL := $(INTEL_PATH_BUILD)/acpi-tools/linux64/bin/iasl +# Generation +KF4ABL_SYMBOLS_ZIP := $(PRODUCT_OUT)/kf4abl_symbols.zip +FB4ABL_SYMBOLS_ZIP := $(PRODUCT_OUT)/fb4abl_symbols.zip + # Extra host tools we need built to use our *_from_target_files # or sign_target_files_* scripts INTEL_OTATOOLS := \ $(GENERATE_VERITY_KEY) \ $(AVBTOOL) +ifeq ($(KERNELFLINGER_SUPPORT_NON_EFI_BOOT),true) +# NON UEFI platform +INTEL_OTATOOLS += \ + # $(MKEXT2IMG) \ + # $(DUMPEXT2IMG) \ + $(FASTBOOT) \ + $(IASL) +endif + +ifeq ($(BOARD_USE_ABL),true) +INTEL_OTATOOLS += abl_toolchain +endif + otatools: $(INTEL_OTATOOLS) # FIXME: may be unsafe to omit -no-sse @@ -73,6 +91,7 @@ GNU_EFI_CRT0 := crt0-efi-$(TARGET_IAFW_ARCH_NAME) LIBPAYLOAD_CRT0 := crt0-libpayload-$(TARGET_IAFW_ARCH_NAME) TARGET_EFI_LDS := $(IAFW_BUILD_SYSTEM)/elf_$(TARGET_IAFW_ARCH_NAME)_efi.lds +TARGET_ABL_LDS := $(IAFW_BUILD_SYSTEM)/elf_$(TARGET_IAFW_ARCH_NAME)_abl.lds TARGET_IAFW_GLOBAL_OBJCOPY_FLAGS := \ -j .text -j .sdata -j .data \ -j .dynamic -j .dynsym -j .rel \ @@ -125,6 +144,32 @@ $(hide) $(IAFW_OBJCOPY) $(PRIVATE_OBJCOPY_FLAGS) \ $(hide) $(SBSIGN) --key $1 --cert $2 --output $@ $(@:.efi=.efiunsigned) endef +define transform-o-to-sbl-executable +@echo "target ABL Executable: $(PRIVATE_MODULE) ($@)" +$(hide) mkdir -p $(dir $@) +$(hide) $(IAFW_LD) $1 \ + --defsym=CONFIG_LP_BASE_ADDRESS=$(LIBPAYLOAD_BASE_ADDRESS) \ + --defsym=CONFIG_LP_HEAP_SIZE=$(LIBPAYLOAD_HEAP_SIZE) \ + --defsym=CONFIG_LP_STACK_SIZE=$(LIBPAYLOAD_STACK_SIZE) \ + --whole-archive $(call module-built-files,$(LIBPAYLOAD_CRT0)) --no-whole-archive \ + $(PRIVATE_ALL_OBJECTS) --start-group $(PRIVATE_ALL_STATIC_LIBRARIES) --end-group $(IAFW_LIBCLANG) \ + -Map $(@:.abl=.map) -o $(@:.abl=.sym.elf) +$(hide)$(IAFW_STRIP) --strip-all $(@:.abl=.sym.elf) -o $(@:.abl=.elf) + +$(hide) cp $(@:.abl=.elf) $@ + + +python3 $(INTEL_PATH_BUILD)/GenContainer.py create -t NORMAL -cl ELF1:$@ -k $(INTEL_PATH_BUILD)/testkeys/OS1_TestKey_Priv_RSA2048.pem -o $(PRODUCT_OUT)/sbl_os + + +$(hide) if [ "$(PRIVATE_MODULE:debug=)" = fb4abl-user ]; then \ + zip -juy $(FB4ABL_SYMBOLS_ZIP) $(@:.abl=.map) $(@:.abl=.sym.elf); \ + zip -juy $(FB4ABL_SYMBOLS_ZIP) $@; \ +elif [ "$(PRIVATE_MODULE:debug=)" = kf4abl-user ]; then \ + zip -juy $(KF4ABL_SYMBOLS_ZIP) $(@:.abl=.map) $(@:.abl=.sym.elf); \ +fi +endef + # Hook up the prebuilts generation mechanism include $(INTEL_PATH_COMMON)/external/external.mk diff --git a/core/elf_ia32_abl.lds b/core/elf_ia32_abl.lds new file mode 100644 index 0000000..bcb2165 --- /dev/null +++ b/core/elf_ia32_abl.lds @@ -0,0 +1,87 @@ +/* + * This file is part of the libpayload project. + * + * Copyright (C) 2008 Advanced Micro Devices, Inc. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + */ + +OUTPUT_FORMAT(elf32-i386) +OUTPUT_ARCH(i386) + +ENTRY(_entry) + +SECTIONS +{ + . = CONFIG_LP_BASE_ADDRESS; + + . = ALIGN(16); + _start = .; + + .text : { + *(.text._entry) + *(.text) + *(.text.*) + } + + .rodata : { + *(.rodata) + *(.rodata.*) + } + + .data : { + *(.data) + *(.data.*) + } + + _edata = .; + + .bss : { + *(.sbss) + *(.sbss.*) + *(.bss) + *(.bss.*) + *(COMMON) + + /* Stack and heap */ + + . = ALIGN(16); + _heap = .; + . += CONFIG_LP_HEAP_SIZE; + . = ALIGN(16); + _eheap = .; + + _estack = .; + . += CONFIG_LP_STACK_SIZE; + . = ALIGN(16); + _stack = .; + } + + _end = .; + + /DISCARD/ : { + *(.comment) + *(.note*) + } +} diff --git a/core/elf_x86_64_abl.lds b/core/elf_x86_64_abl.lds new file mode 100644 index 0000000..1eb9e25 --- /dev/null +++ b/core/elf_x86_64_abl.lds @@ -0,0 +1,87 @@ +/* + * This file is part of the libpayload project. + * + * Copyright (C) 2008 Advanced Micro Devices, Inc. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + */ + +OUTPUT_FORMAT("elf64-x86-64", "elf64-x86-64", "elf64-x86-64") +OUTPUT_ARCH(i386:x86-64) + +ENTRY(_entry) + +SECTIONS +{ + . = CONFIG_LP_BASE_ADDRESS; + + . = ALIGN(16); + _start = .; + + .text : { + *(.text._entry) + *(.text) + *(.text.*) + } + + .rodata : { + *(.rodata) + *(.rodata.*) + } + + .data : { + *(.data) + *(.data.*) + } + + _edata = .; + + .bss : { + *(.sbss) + *(.sbss.*) + *(.bss) + *(.bss.*) + *(COMMON) + + /* Stack and heap */ + + . = ALIGN(16); + _heap = .; + . += CONFIG_LP_HEAP_SIZE; + . = ALIGN(16); + _eheap = .; + + _estack = .; + . += CONFIG_LP_STACK_SIZE; + . = ALIGN(32); + _stack = .; + } + + _end = .; + + /DISCARD/ : { + *(.comment) + *(.note*) + } +} diff --git a/testkeys/OS1_TestKey_Priv_RSA2048.pem b/testkeys/OS1_TestKey_Priv_RSA2048.pem new file mode 100644 index 0000000..3aab5f1 --- /dev/null +++ b/testkeys/OS1_TestKey_Priv_RSA2048.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpgIBAAKCAQEAzLiDFGCKzj/TfsxpIxtQ1f2YI/5/7tmyPj23Zy/7bbnqrsfR +CQyBbDElte1R9cCAYhK9aN5JTuparnsh3eFIhq9DeuyFtQtjo54t3AQ6cUVx+wyP +8D8gFjC0TlIhin0xW/xwmMptXdxjxv44mEfI1ioIvGHSMNtic0jA9WRimlL+qIJ/ +P4fWisU24x5yoPDjGM5C3h5LWvcqGi1HGtyH0I6l74NcPahJl/dVV0di1s6eURXT +gCSwJx2zL/H4Xf/qy5uh3fGKod4ay8MwaHOlVHc2CxnkoBjoj9mEUhNasVOiOmjY +W7yxi9WDdSlDoZDrCyiGHtKmLnTBuTLJlU6pcQIDAQABAoIBAQCEm/CsvmyrdUS2 +mgpwpz0RoJdwmWadfX6sOqYWvUoMpaWTWfPZ/LPJNXzL/9Jbcjq3TJRM3dB2we/D +nhct9sRYGieH9LYXtCzyy3/BSSviO629hUGnyfwq45moMiNv5fCXOUpmbpmxdxVa +zUozuiwqLkhCXsscwr9fFf2H92K3u2md8xSeqEqgcz5eDWRryAB12jndQICJvpvb +mS9yvhfZnZS3s66Pqfcf4MEUeKkxLgs7F0gTPxKSD5wwurjWd9p48b10YphH9B3S +vjKIvrbwd4rRQdDIeHspJ+evhhXU/MIjsx3DwFJgCOOIghwqriEAd/BNhurS0qXS +YrlhhekBAoGBAPfNk7E3lkRPC7laz24Co9effrPOSHfdAO6fRjLqu/5Cg6MSxT6K +d/JC6hlv8kuFRr2hVC8GzaopmNLzgTq5/CEFDADdnirJ9kJDj+toicQL3+ntN2sT +usk1I2Ym+RnuJFw5himq4/t7MP1BOlfRd6MhEuupWSvFcvn1IUu9qv7JAoGBANN+ +HBCq+XN/iuUgOfgj+p7kdz18j2GHeIa5tVirQeGsBGl3gLzVH9l0sxxeoEqdDzIv +j3Nxstero1PfMZsRTfSomd2zsRshYH17FAAjue28MJGbBcCGEHpg5GCmeP5Wemqw +dRDreRTPwroOLGcFILZTiYzSTFhQ8vdHW4upkmFpAoGBAOMPUPx6lcsjrvd5eUYx +TxXdhIiXRRnnvNnpwbYYFohFRD9hjLDgykWHAOCsDhUbGxXZRL0Oe9cm5GyvSUOZ +dah/6l7EsxA8HNTc+XvYPxwPUW4Uucn4uxdxBG2u2mPjLRgAcamud9ZQLDRy11Xu +jrRAMwkTFK0h8FePQKIZOhzZAoGBAJqxY1XNuSgB4lY1fDFEH8yYz57pvPZZYI8q +xrpUiZvHdnKuNjvop6Mm+NuHWzc4KiVLJGIMiAO4R37RgjcqMRsDN4gyJxjxEOK3 +Y6m2eAir3dqiyVJIvUTGGkWXfmiKWIAxEW+OzMGsNYJhW5j+5y51a+bU64Ktswmr +6An/vlIBAoGBAO44DdQRrqmndVjXu/+yMLYmRxG7CCzUyxgxUeaR2fco1Qss/2Me +fgk3T85tnFM4rq3gvSL4oyjUgMZJ8vtnWl+6IppSru9zzmEZ+aqT+GsHWsK6Tkcv +SvZswG++MVPfOLAveappYkaSLhbbBi3ARfM7G90RG0SK3JLQNXVAX2Q1 +-----END RSA PRIVATE KEY-----