mirror of
https://github.com/platomav/BIOSUtilities.git
synced 2025-05-13 06:34:42 -04:00
Added Dell PFS Update Extractor v6.0_a1
Adjusted dependencies
This commit is contained in:
parent
46172a218b
commit
f2be701423
12 changed files with 1242 additions and 211 deletions
|
@ -7,7 +7,7 @@ AMI BIOS Guard Extractor
|
||||||
Copyright (C) 2018-2022 Plato Mavropoulos
|
Copyright (C) 2018-2022 Plato Mavropoulos
|
||||||
"""
|
"""
|
||||||
|
|
||||||
title = 'AMI BIOS Guard Extractor v4.0_a1'
|
title = 'AMI BIOS Guard Extractor v4.0_a2'
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
@ -18,13 +18,12 @@ import ctypes
|
||||||
# Stop __pycache__ generation
|
# Stop __pycache__ generation
|
||||||
sys.dont_write_bytecode = True
|
sys.dont_write_bytecode = True
|
||||||
|
|
||||||
from common.patterns import PAT_AMI_PFAT
|
|
||||||
from common.externals import get_bgs_tool
|
from common.externals import get_bgs_tool
|
||||||
from common.num_ops import get_ordinal
|
from common.num_ops import get_ordinal
|
||||||
from common.text_ops import padder
|
from common.path_ops import argparse_init, safe_name
|
||||||
from common.path_ops import argparse_init, process_input_files, safe_name
|
from common.patterns import PAT_AMI_PFAT
|
||||||
from common.struct_ops import get_struct, char, uint8_t, uint16_t, uint32_t
|
from common.struct_ops import get_struct, char, uint8_t, uint16_t, uint32_t
|
||||||
from common.system import nice_exc_handler, check_sys_py, check_sys_os, show_title, print_input
|
from common.system import script_init, script_title, printer
|
||||||
|
|
||||||
class AmiBiosGuardHeader(ctypes.LittleEndianStructure):
|
class AmiBiosGuardHeader(ctypes.LittleEndianStructure):
|
||||||
_pack_ = 1
|
_pack_ = 1
|
||||||
|
@ -36,13 +35,11 @@ class AmiBiosGuardHeader(ctypes.LittleEndianStructure):
|
||||||
# 0x11
|
# 0x11
|
||||||
]
|
]
|
||||||
|
|
||||||
def struct_print(self, padding):
|
def struct_print(self, p):
|
||||||
p = padder(padding)
|
printer(['Size :', '0x%X' % self.Size], p, False)
|
||||||
|
printer(['Checksum:', '0x%0.4X' % self.Checksum], p, False)
|
||||||
print(p + 'Size :', '0x%X' % self.Size)
|
printer(['Tag :', self.Tag.decode('utf-8')], p, False)
|
||||||
print(p + 'Checksum:', '0x%0.4X' % self.Checksum)
|
printer(['Flags :', '0x%0.2X' % self.Flags], p, False)
|
||||||
print(p + 'Tag :', self.Tag.decode('utf-8'))
|
|
||||||
print(p + 'Flags :', '0x%0.2X' % self.Flags)
|
|
||||||
|
|
||||||
class IntelBiosGuardHeader(ctypes.LittleEndianStructure):
|
class IntelBiosGuardHeader(ctypes.LittleEndianStructure):
|
||||||
_pack_ = 1
|
_pack_ = 1
|
||||||
|
@ -77,25 +74,23 @@ class IntelBiosGuardHeader(ctypes.LittleEndianStructure):
|
||||||
|
|
||||||
return attr.b.SFAM, attr.b.ProtectEC, attr.b.GFXMitDis, attr.b.FTU, attr.b.Reserved
|
return attr.b.SFAM, attr.b.ProtectEC, attr.b.GFXMitDis, attr.b.FTU, attr.b.Reserved
|
||||||
|
|
||||||
def struct_print(self, padding):
|
def struct_print(self, p):
|
||||||
p = padder(padding)
|
|
||||||
|
|
||||||
no_yes = ['No','Yes']
|
no_yes = ['No','Yes']
|
||||||
f1,f2,f3,f4,f5 = self.get_flags()
|
f1,f2,f3,f4,f5 = self.get_flags()
|
||||||
|
|
||||||
print(p + 'BIOS Guard Version :', '%d.%d' % (self.BGVerMajor, self.BGVerMinor))
|
printer(['BIOS Guard Version :', '%d.%d' % (self.BGVerMajor, self.BGVerMinor)], p, False)
|
||||||
print(p + 'Platform Identity :', self.get_platform_id())
|
printer(['Platform Identity :', self.get_platform_id()], p, False)
|
||||||
print(p + 'Signed Flash Address Map :', no_yes[f1])
|
printer(['Signed Flash Address Map :', no_yes[f1]], p, False)
|
||||||
print(p + 'Protected EC OpCodes :', no_yes[f2])
|
printer(['Protected EC OpCodes :', no_yes[f2]], p, False)
|
||||||
print(p + 'Graphics Security Disable :', no_yes[f3])
|
printer(['Graphics Security Disable :', no_yes[f3]], p, False)
|
||||||
print(p + 'Fault Tolerant Update :', no_yes[f4])
|
printer(['Fault Tolerant Update :', no_yes[f4]], p, False)
|
||||||
print(p + 'Attributes Reserved :', '0x%X' % f5)
|
printer(['Attributes Reserved :', '0x%X' % f5], p, False)
|
||||||
print(p + 'Script Version :', '%d.%d' % (self.ScriptVerMajor, self.ScriptVerMinor))
|
printer(['Script Version :', '%d.%d' % (self.ScriptVerMajor, self.ScriptVerMinor)], p, False)
|
||||||
print(p + 'Script Size :', '0x%X' % self.ScriptSize)
|
printer(['Script Size :', '0x%X' % self.ScriptSize], p, False)
|
||||||
print(p + 'Data Size :', '0x%X' % self.DataSize)
|
printer(['Data Size :', '0x%X' % self.DataSize], p, False)
|
||||||
print(p + 'BIOS Security Version Number:', '0x%X' % self.BIOSSVN)
|
printer(['BIOS Security Version Number:', '0x%X' % self.BIOSSVN], p, False)
|
||||||
print(p + 'EC Security Version Number :', '0x%X' % self.ECSVN)
|
printer(['EC Security Version Number :', '0x%X' % self.ECSVN], p, False)
|
||||||
print(p + 'Vendor Information :', '0x%X' % self.VendorInfo)
|
printer(['Vendor Information :', '0x%X' % self.VendorInfo], p, False)
|
||||||
|
|
||||||
class IntelBiosGuardHeaderAttributes(ctypes.LittleEndianStructure):
|
class IntelBiosGuardHeaderAttributes(ctypes.LittleEndianStructure):
|
||||||
_fields_ = [
|
_fields_ = [
|
||||||
|
@ -123,17 +118,15 @@ class IntelBiosGuardSignature2k(ctypes.LittleEndianStructure):
|
||||||
# 0x20C
|
# 0x20C
|
||||||
]
|
]
|
||||||
|
|
||||||
def struct_print(self, padding):
|
def struct_print(self, p):
|
||||||
p = padder(padding)
|
|
||||||
|
|
||||||
Modulus = '%0.*X' % (0x100 * 2, int.from_bytes(self.Modulus, 'little'))
|
Modulus = '%0.*X' % (0x100 * 2, int.from_bytes(self.Modulus, 'little'))
|
||||||
Signature = '%0.*X' % (0x100 * 2, int.from_bytes(self.Signature, 'little'))
|
Signature = '%0.*X' % (0x100 * 2, int.from_bytes(self.Signature, 'little'))
|
||||||
|
|
||||||
print(p + 'Unknown 0:', '0x%X' % self.Unknown0)
|
printer(['Unknown 0:', '0x%X' % self.Unknown0], p, False)
|
||||||
print(p + 'Unknown 1:', '0x%X' % self.Unknown1)
|
printer(['Unknown 1:', '0x%X' % self.Unknown1], p, False)
|
||||||
print(p + 'Modulus :', '%s [...]' % Modulus[:32])
|
printer(['Modulus :', '%s [...]' % Modulus[:32]], p, False)
|
||||||
print(p + 'Exponent :', '0x%X' % self.Exponent)
|
printer(['Exponent :', '0x%X' % self.Exponent], p, False)
|
||||||
print(p + 'Signature:', '%s [...]' % Signature[:32])
|
printer(['Signature:', '%s [...]' % Signature[:32]], p, False)
|
||||||
|
|
||||||
def get_ami_pfat(input_buffer):
|
def get_ami_pfat(input_buffer):
|
||||||
match = PAT_AMI_PFAT.search(input_buffer)
|
match = PAT_AMI_PFAT.search(input_buffer)
|
||||||
|
@ -149,33 +142,33 @@ def parse_bg_script(script_data, padding):
|
||||||
is_opcode_div = len(script_data) % 8 == 0
|
is_opcode_div = len(script_data) % 8 == 0
|
||||||
|
|
||||||
if not is_opcode_div:
|
if not is_opcode_div:
|
||||||
print('%sError: Script not divisible by OpCode length!' % padder(padding))
|
printer('Error: Script not divisible by OpCode length!', padding, False)
|
||||||
|
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
is_begin_end = script_data[:8] + script_data[-8:] == b'\x01' + b'\x00' * 7 + b'\xFF' + b'\x00' * 7
|
is_begin_end = script_data[:8] + script_data[-8:] == b'\x01' + b'\x00' * 7 + b'\xFF' + b'\x00' * 7
|
||||||
|
|
||||||
if not is_begin_end:
|
if not is_begin_end:
|
||||||
print('%sError: Script lacks Begin and/or End OpCodes!' % padder(padding))
|
printer('Error: Script lacks Begin and/or End OpCodes!', padding, False)
|
||||||
|
|
||||||
return 2
|
return 2
|
||||||
|
|
||||||
BigScript = get_bgs_tool()
|
BigScript = get_bgs_tool()
|
||||||
|
|
||||||
if not BigScript:
|
if not BigScript:
|
||||||
print('%sError: BIOS Guard Script Tool dependency missing!' % padder(padding))
|
printer('Error: BIOS Guard Script Tool dependency missing!', padding, False)
|
||||||
|
|
||||||
return 3
|
return 3
|
||||||
|
|
||||||
script = BigScript(code_bytes=script_data).to_string().replace('\t',' ').split('\n')
|
script = BigScript(code_bytes=script_data).to_string().replace('\t',' ').split('\n')
|
||||||
|
|
||||||
for opcode in script:
|
for opcode in script:
|
||||||
if opcode.endswith(('begin','end')): spacing = padder(padding)
|
if opcode.endswith(('begin','end')): spacing = padding
|
||||||
elif opcode.endswith(':'): spacing = padder(padding + 4)
|
elif opcode.endswith(':'): spacing = padding + 4
|
||||||
else: spacing = padder(padding + 12)
|
else: spacing = padding + 12
|
||||||
|
|
||||||
operands = [operand for operand in opcode.split(' ') if operand]
|
operands = [operand for operand in opcode.split(' ') if operand]
|
||||||
print(spacing + ('{:<12s}' + '{:<11s}' * (len(operands) - 1)).format(*operands))
|
printer(('{:<12s}' + '{:<11s}' * (len(operands) - 1)).format(*operands), spacing, False)
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
@ -188,7 +181,7 @@ def parse_pfat_hdr(buffer, padding):
|
||||||
hdr_data = buffer[PFAT_AMI_HDR_LEN:hdr_size]
|
hdr_data = buffer[PFAT_AMI_HDR_LEN:hdr_size]
|
||||||
hdr_text = hdr_data.decode('utf-8').splitlines()
|
hdr_text = hdr_data.decode('utf-8').splitlines()
|
||||||
|
|
||||||
print('\n%sAMI BIOS Guard Header:\n' % padder(padding))
|
printer('AMI BIOS Guard Header:\n', padding)
|
||||||
|
|
||||||
pfat_hdr.struct_print(padding + 4)
|
pfat_hdr.struct_print(padding + 4)
|
||||||
|
|
||||||
|
@ -198,7 +191,7 @@ def parse_pfat_hdr(buffer, padding):
|
||||||
|
|
||||||
hdr_tag,*hdr_indexes = hdr_title.split('II')
|
hdr_tag,*hdr_indexes = hdr_title.split('II')
|
||||||
|
|
||||||
print('\n%s%s\n' % (padder(padding + 4), hdr_tag))
|
printer(hdr_tag + '\n', padding + 4)
|
||||||
|
|
||||||
bgt_indexes = [int(h, 16) for h in re.findall(r'.{1,4}', hdr_indexes[0])] if hdr_indexes else []
|
bgt_indexes = [int(h, 16) for h in re.findall(r'.{1,4}', hdr_indexes[0])] if hdr_indexes else []
|
||||||
|
|
||||||
|
@ -218,7 +211,7 @@ def parse_pfat_hdr(buffer, padding):
|
||||||
|
|
||||||
block_all += [(desc, name, order, param, flags, index, i, count) for i in range(count)]
|
block_all += [(desc, name, order, param, flags, index, i, count) for i in range(count)]
|
||||||
|
|
||||||
_ = [print(padder(padding + 8) + block[0]) for block in block_all if block[6] == 0]
|
_ = [printer(block[0], padding + 8, False) for block in block_all if block[6] == 0]
|
||||||
|
|
||||||
return block_all, hdr_size, files_count
|
return block_all, hdr_size, files_count
|
||||||
|
|
||||||
|
@ -240,7 +233,7 @@ def parse_pfat_file(buffer, output_path, padding):
|
||||||
file_desc,file_name,_,_,_,file_index,block_index,block_count = block
|
file_desc,file_name,_,_,_,file_index,block_index,block_count = block
|
||||||
|
|
||||||
if block_index == 0:
|
if block_index == 0:
|
||||||
print('\n%s%s' % (padder(padding + 4), file_desc))
|
printer(file_desc, padding + 4)
|
||||||
|
|
||||||
file_path = os.path.join(extract_path, get_file_name(file_index + 1, file_name))
|
file_path = os.path.join(extract_path, get_file_name(file_index + 1, file_name))
|
||||||
|
|
||||||
|
@ -250,7 +243,7 @@ def parse_pfat_file(buffer, output_path, padding):
|
||||||
|
|
||||||
bg_hdr = get_struct(buffer, block_off, IntelBiosGuardHeader)
|
bg_hdr = get_struct(buffer, block_off, IntelBiosGuardHeader)
|
||||||
|
|
||||||
print('\n%sIntel BIOS Guard %s Header:\n' % (padder(padding + 8), block_status))
|
printer('Intel BIOS Guard %s Header:\n' % block_status, padding + 8)
|
||||||
|
|
||||||
bg_hdr.struct_print(padding + 12)
|
bg_hdr.struct_print(padding + 12)
|
||||||
|
|
||||||
|
@ -274,13 +267,13 @@ def parse_pfat_file(buffer, output_path, padding):
|
||||||
if len(bg_sig_bin) == PFAT_BLK_S2K_LEN:
|
if len(bg_sig_bin) == PFAT_BLK_S2K_LEN:
|
||||||
bg_sig = get_struct(bg_sig_bin, 0x0, IntelBiosGuardSignature2k)
|
bg_sig = get_struct(bg_sig_bin, 0x0, IntelBiosGuardSignature2k)
|
||||||
|
|
||||||
print('\n%sIntel BIOS Guard %s Signature:\n' % (padder(padding + 8), block_status))
|
printer('Intel BIOS Guard %s Signature:\n' % block_status, padding + 8)
|
||||||
|
|
||||||
bg_sig.struct_print(padding + 12)
|
bg_sig.struct_print(padding + 12)
|
||||||
|
|
||||||
block_off = bg_sig_end # Adjust next block to start at data + signature end
|
block_off = bg_sig_end # Adjust next block to start at data + signature end
|
||||||
|
|
||||||
print('\n%sIntel BIOS Guard %s Script:\n' % (padder(padding + 8), block_status))
|
printer('Intel BIOS Guard %s Script:\n' % block_status, padding + 8)
|
||||||
|
|
||||||
_ = parse_bg_script(bg_script_bin, padding + 12)
|
_ = parse_bg_script(bg_script_bin, padding + 12)
|
||||||
|
|
||||||
|
@ -310,38 +303,26 @@ PFAT_BLK_S2K_LEN = ctypes.sizeof(IntelBiosGuardSignature2k)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
# Show script title
|
# Show script title
|
||||||
show_title(title)
|
script_title(title)
|
||||||
|
|
||||||
# Set argparse Arguments
|
# Set argparse Arguments
|
||||||
argparser = argparse_init()
|
argparser = argparse_init()
|
||||||
arguments = argparser.parse_args()
|
arguments = argparser.parse_args()
|
||||||
|
|
||||||
# Pretty Python exception handler (must be after argparse)
|
# Initialize script (must be after argparse)
|
||||||
sys.excepthook = nice_exc_handler
|
input_files,output_path,padding = script_init(arguments, 4)
|
||||||
|
|
||||||
# Check Python Version (must be after argparse)
|
|
||||||
check_sys_py()
|
|
||||||
|
|
||||||
# Check OS Platform (must be after argparse)
|
|
||||||
check_sys_os()
|
|
||||||
|
|
||||||
# Process input files and generate output path
|
|
||||||
input_files,output_path = process_input_files(arguments, sys.argv)
|
|
||||||
|
|
||||||
# Initial output padding count
|
|
||||||
padding = 4
|
|
||||||
|
|
||||||
for input_file in input_files:
|
for input_file in input_files:
|
||||||
input_name = os.path.basename(input_file)
|
input_name = os.path.basename(input_file)
|
||||||
|
|
||||||
print('\n*** %s' % input_name)
|
printer(['***', input_name], padding - 4)
|
||||||
|
|
||||||
with open(input_file, 'rb') as in_file: input_buffer = in_file.read()
|
with open(input_file, 'rb') as in_file: input_buffer = in_file.read()
|
||||||
|
|
||||||
pfat_match,pfat_buffer = get_ami_pfat(input_buffer)
|
pfat_match,pfat_buffer = get_ami_pfat(input_buffer)
|
||||||
|
|
||||||
if not pfat_match:
|
if not pfat_match:
|
||||||
print('\n%sError: This is not an AMI BIOS Guard (PFAT) image!' % padder(padding))
|
printer('Error: This is not an AMI BIOS Guard (PFAT) image!', padding)
|
||||||
|
|
||||||
continue # Next input file
|
continue # Next input file
|
||||||
|
|
||||||
|
@ -349,4 +330,4 @@ if __name__ == '__main__':
|
||||||
|
|
||||||
parse_pfat_file(pfat_buffer, extract_path, padding)
|
parse_pfat_file(pfat_buffer, extract_path, padding)
|
||||||
|
|
||||||
print_input('\nDone!')
|
printer('Done!', pause=True)
|
||||||
|
|
|
@ -7,7 +7,7 @@ AMI UCP BIOS Extractor
|
||||||
Copyright (C) 2021-2022 Plato Mavropoulos
|
Copyright (C) 2021-2022 Plato Mavropoulos
|
||||||
"""
|
"""
|
||||||
|
|
||||||
title = 'AMI UCP BIOS Extractor v2.0_a1'
|
title = 'AMI UCP BIOS Extractor v2.0_a2'
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
@ -19,14 +19,14 @@ import contextlib
|
||||||
# Stop __pycache__ generation
|
# Stop __pycache__ generation
|
||||||
sys.dont_write_bytecode = True
|
sys.dont_write_bytecode = True
|
||||||
|
|
||||||
from common.patterns import PAT_AMI_UCP, PAT_INTEL_ENG
|
|
||||||
from common.checksums import checksum16
|
|
||||||
from common.text_ops import padder
|
|
||||||
from common.a7z_comp import a7z_decompress, is_7z_supported
|
from common.a7z_comp import a7z_decompress, is_7z_supported
|
||||||
|
from common.checksums import get_chk_16
|
||||||
from common.efi_comp import efi_decompress, is_efi_compressed
|
from common.efi_comp import efi_decompress, is_efi_compressed
|
||||||
from common.path_ops import argparse_init, process_input_files, safe_name
|
from common.path_ops import argparse_init, safe_name
|
||||||
|
from common.patterns import PAT_AMI_UCP, PAT_INTEL_ENG
|
||||||
from common.struct_ops import get_struct, char, uint8_t, uint16_t, uint32_t
|
from common.struct_ops import get_struct, char, uint8_t, uint16_t, uint32_t
|
||||||
from common.system import nice_exc_handler, check_sys_py, check_sys_os, show_title, print_input
|
from common.system import script_init, script_title, printer
|
||||||
|
|
||||||
from AMI_PFAT_Extract import get_ami_pfat, parse_pfat_file
|
from AMI_PFAT_Extract import get_ami_pfat, parse_pfat_file
|
||||||
|
|
||||||
class UafHeader(ctypes.LittleEndianStructure):
|
class UafHeader(ctypes.LittleEndianStructure):
|
||||||
|
@ -41,15 +41,13 @@ class UafHeader(ctypes.LittleEndianStructure):
|
||||||
# 0x10
|
# 0x10
|
||||||
]
|
]
|
||||||
|
|
||||||
def struct_print(self, padding):
|
def struct_print(self, p):
|
||||||
p = padder(padding)
|
printer(['Tag :', self.ModuleTag.decode('utf-8')], p, False)
|
||||||
|
printer(['Size :', '0x%X' % self.ModuleSize], p, False)
|
||||||
print(p + 'Tag :', self.ModuleTag.decode('utf-8'))
|
printer(['Checksum :', '0x%0.4X' % self.Checksum], p, False)
|
||||||
print(p + 'Size :', '0x%X' % self.ModuleSize)
|
printer(['Unknown 0 :', '0x%0.2X' % self.Unknown0], p, False)
|
||||||
print(p + 'Checksum :', '0x%0.4X' % self.Checksum)
|
printer(['Unknown 1 :', '0x%0.2X' % self.Unknown1], p, False)
|
||||||
print(p + 'Unknown 0 :', '0x%0.2X' % self.Unknown0)
|
printer(['Reserved :', '0x%0.8X' % self.Reserved], p, False)
|
||||||
print(p + 'Unknown 1 :', '0x%0.2X' % self.Unknown1)
|
|
||||||
print(p + 'Reserved :', '0x%0.8X' % self.Reserved)
|
|
||||||
|
|
||||||
class UafModule(ctypes.LittleEndianStructure):
|
class UafModule(ctypes.LittleEndianStructure):
|
||||||
_pack_ = 1
|
_pack_ = 1
|
||||||
|
@ -59,12 +57,10 @@ class UafModule(ctypes.LittleEndianStructure):
|
||||||
# 0x08
|
# 0x08
|
||||||
]
|
]
|
||||||
|
|
||||||
def struct_print(self, padding, filename):
|
def struct_print(self, p, filename):
|
||||||
p = padder(padding)
|
printer(['Compress Size:', '0x%X' % self.CompressSize], p, False)
|
||||||
|
printer(['Original Size:', '0x%X' % self.OriginalSize], p, False)
|
||||||
print(p + 'Compress Size:', '0x%X' % self.CompressSize)
|
printer(['File Name :', filename], p, False)
|
||||||
print(p + 'Original Size:', '0x%X' % self.OriginalSize)
|
|
||||||
print(p + 'File Name :', filename)
|
|
||||||
|
|
||||||
class UiiHeader(ctypes.LittleEndianStructure):
|
class UiiHeader(ctypes.LittleEndianStructure):
|
||||||
_pack_ = 1
|
_pack_ = 1
|
||||||
|
@ -88,26 +84,24 @@ class UiiHeader(ctypes.LittleEndianStructure):
|
||||||
PTP = {1: 'Executable', 2: 'Library', 3: 'Driver'}
|
PTP = {1: 'Executable', 2: 'Library', 3: 'Driver'}
|
||||||
PMD = {1: 'API', 2: 'Console', 3: 'GUI', 4: 'Console/GUI'}
|
PMD = {1: 'API', 2: 'Console', 3: 'GUI', 4: 'Console/GUI'}
|
||||||
|
|
||||||
def struct_print(self, padding, description):
|
def struct_print(self, p, description):
|
||||||
p = padder(padding)
|
|
||||||
|
|
||||||
SupportBIOS = self.SBI.get(self.SupportBIOS, 'Unknown (%d)' % self.SupportBIOS)
|
SupportBIOS = self.SBI.get(self.SupportBIOS, 'Unknown (%d)' % self.SupportBIOS)
|
||||||
SupportOS = self.SOS.get(self.SupportOS, 'Unknown (%d)' % self.SupportOS)
|
SupportOS = self.SOS.get(self.SupportOS, 'Unknown (%d)' % self.SupportOS)
|
||||||
DataBusWidth = self.DBW.get(self.DataBusWidth, 'Unknown (%d)' % self.DataBusWidth)
|
DataBusWidth = self.DBW.get(self.DataBusWidth, 'Unknown (%d)' % self.DataBusWidth)
|
||||||
ProgramType = self.PTP.get(self.ProgramType, 'Unknown (%d)' % self.ProgramType)
|
ProgramType = self.PTP.get(self.ProgramType, 'Unknown (%d)' % self.ProgramType)
|
||||||
ProgramMode = self.PMD.get(self.ProgramMode, 'Unknown (%d)' % self.ProgramMode)
|
ProgramMode = self.PMD.get(self.ProgramMode, 'Unknown (%d)' % self.ProgramMode)
|
||||||
|
|
||||||
print(p + 'UII Size :', '0x%X' % self.UIISize)
|
printer(['UII Size :', '0x%X' % self.UIISize], p, False)
|
||||||
print(p + 'Checksum :', '0x%0.4X' % self.Checksum)
|
printer(['Checksum :', '0x%0.4X' % self.Checksum], p, False)
|
||||||
print(p + 'Tool Version :', '0x%0.8X' % self.UtilityVersion)
|
printer(['Tool Version :', '0x%0.8X' % self.UtilityVersion], p, False)
|
||||||
print(p + 'Info Size :', '0x%X' % self.InfoSize)
|
printer(['Info Size :', '0x%X' % self.InfoSize], p, False)
|
||||||
print(p + 'Supported BIOS:', SupportBIOS)
|
printer(['Supported BIOS:', SupportBIOS], p, False)
|
||||||
print(p + 'Supported OS :', SupportOS)
|
printer(['Supported OS :', SupportOS], p, False)
|
||||||
print(p + 'Data Bus Width:', DataBusWidth)
|
printer(['Data Bus Width:', DataBusWidth], p, False)
|
||||||
print(p + 'Program Type :', ProgramType)
|
printer(['Program Type :', ProgramType], p, False)
|
||||||
print(p + 'Program Mode :', ProgramMode)
|
printer(['Program Mode :', ProgramMode], p, False)
|
||||||
print(p + 'SourceSafe Tag:', '%0.2d' % self.SourceSafeRel)
|
printer(['SourceSafe Tag:', '%0.2d' % self.SourceSafeRel], p, False)
|
||||||
print(p + 'Description :', description)
|
printer(['Description :', description], p, False)
|
||||||
|
|
||||||
class DisHeader(ctypes.LittleEndianStructure):
|
class DisHeader(ctypes.LittleEndianStructure):
|
||||||
_pack_ = 1
|
_pack_ = 1
|
||||||
|
@ -118,12 +112,10 @@ class DisHeader(ctypes.LittleEndianStructure):
|
||||||
# 0x10
|
# 0x10
|
||||||
]
|
]
|
||||||
|
|
||||||
def struct_print(self, padding):
|
def struct_print(self, p):
|
||||||
p = padder(padding)
|
printer(['Password Size:', '0x%X' % self.PasswordSize], p, False)
|
||||||
|
printer(['Entry Count :', self.EntryCount], p, False)
|
||||||
print(p + 'Password Size:', '0x%X' % self.PasswordSize)
|
printer(['Password :', self.Password.decode('utf-8')], p, False)
|
||||||
print(p + 'Entry Count :', self.EntryCount)
|
|
||||||
print(p + 'Password :', self.Password.decode('utf-8'))
|
|
||||||
|
|
||||||
class DisModule(ctypes.LittleEndianStructure):
|
class DisModule(ctypes.LittleEndianStructure):
|
||||||
_pack_ = 1
|
_pack_ = 1
|
||||||
|
@ -138,23 +130,21 @@ class DisModule(ctypes.LittleEndianStructure):
|
||||||
ENDIS = {0: 'Disabled', 1: 'Enabled'}
|
ENDIS = {0: 'Disabled', 1: 'Enabled'}
|
||||||
SHOWN = {0: 'Hidden', 1: 'Shown', 2: 'Shown Only'}
|
SHOWN = {0: 'Hidden', 1: 'Shown', 2: 'Shown Only'}
|
||||||
|
|
||||||
def struct_print(self, padding):
|
def struct_print(self, p):
|
||||||
p = padder(padding)
|
|
||||||
|
|
||||||
EnabledDisabled = self.ENDIS.get(self.EnabledDisabled, 'Unknown (%d)' % self.EnabledDisabled)
|
EnabledDisabled = self.ENDIS.get(self.EnabledDisabled, 'Unknown (%d)' % self.EnabledDisabled)
|
||||||
ShownHidden = self.SHOWN.get(self.ShownHidden, 'Unknown (%d)' % self.ShownHidden)
|
ShownHidden = self.SHOWN.get(self.ShownHidden, 'Unknown (%d)' % self.ShownHidden)
|
||||||
|
|
||||||
print(p + 'State :', EnabledDisabled)
|
printer(['State :', EnabledDisabled], p, False)
|
||||||
print(p + 'Display :', ShownHidden)
|
printer(['Display :', ShownHidden], p, False)
|
||||||
print(p + 'Command :', self.Command.decode('utf-8').strip())
|
printer(['Command :', self.Command.decode('utf-8').strip()], p, False)
|
||||||
print(p + 'Description:', self.Description.decode('utf-8').strip())
|
printer(['Description:', self.Description.decode('utf-8').strip()], p, False)
|
||||||
|
|
||||||
# Validate @UAF Module Checksum-16
|
# Validate @UAF Module Checksum-16
|
||||||
def chk16_validate(data, tag, padd=0):
|
def chk16_validate(data, tag, padd=0):
|
||||||
if checksum16(data) != 0:
|
if get_chk_16(data) != 0:
|
||||||
print_input('\n%sError: Invalid UCP Module %s Checksum!' % (padder(padd), tag))
|
printer('Error: Invalid UCP Module %s Checksum!' % tag, padd, pause=True)
|
||||||
else:
|
else:
|
||||||
print('\n%sChecksum of UCP Module %s is valid!' % (padder(padd), tag))
|
printer('Checksum of UCP Module %s is valid!' % tag, padd)
|
||||||
|
|
||||||
# Get all input file AMI UCP patterns
|
# Get all input file AMI UCP patterns
|
||||||
def get_ami_ucp(buffer):
|
def get_ami_ucp(buffer):
|
||||||
|
@ -197,10 +187,10 @@ def get_uaf_mod(buffer, uaf_off=0x0):
|
||||||
return uaf_all
|
return uaf_all
|
||||||
|
|
||||||
# Parse & Extract AMI UCP structures
|
# Parse & Extract AMI UCP structures
|
||||||
def ucp_extract(buffer, output_path, padding=0, is_chk16=False):
|
def ucp_extract(buffer, output_path, padding=0, is_checksum=False):
|
||||||
nal_dict = {} # Initialize @NAL Dictionary per UCP
|
nal_dict = {} # Initialize @NAL Dictionary per UCP
|
||||||
|
|
||||||
print('\n%sUtility Configuration Program' % padder(padding))
|
printer('Utility Configuration Program', padding)
|
||||||
|
|
||||||
extract_path = os.path.join(output_path + '_extracted', '')
|
extract_path = os.path.join(output_path + '_extracted', '')
|
||||||
|
|
||||||
|
@ -210,7 +200,7 @@ def ucp_extract(buffer, output_path, padding=0, is_chk16=False):
|
||||||
|
|
||||||
uaf_hdr = get_struct(buffer, 0, UafHeader) # Parse @UAF Header Structure
|
uaf_hdr = get_struct(buffer, 0, UafHeader) # Parse @UAF Header Structure
|
||||||
|
|
||||||
print('\n%sUtility Auxiliary File > @UAF:\n' % padder(padding + 4))
|
printer('Utility Auxiliary File > @UAF:\n', padding + 4)
|
||||||
|
|
||||||
uaf_hdr.struct_print(padding + 8)
|
uaf_hdr.struct_print(padding + 8)
|
||||||
|
|
||||||
|
@ -220,15 +210,15 @@ def ucp_extract(buffer, output_path, padding=0, is_chk16=False):
|
||||||
|
|
||||||
uaf_mod.struct_print(padding + 8, UAF_TAG_DICT['@UAF'][0]) # Print @UAF Module EFI Info
|
uaf_mod.struct_print(padding + 8, UAF_TAG_DICT['@UAF'][0]) # Print @UAF Module EFI Info
|
||||||
|
|
||||||
if is_chk16: chk16_validate(buffer, '@UAF', padding + 8)
|
if is_checksum: chk16_validate(buffer, '@UAF', padding + 8)
|
||||||
|
|
||||||
uaf_all = get_uaf_mod(buffer, UAF_HDR_LEN)
|
uaf_all = get_uaf_mod(buffer, UAF_HDR_LEN)
|
||||||
|
|
||||||
for mod_info in uaf_all:
|
for mod_info in uaf_all:
|
||||||
nal_dict = uaf_extract(buffer, extract_path, mod_info, padding + 8, is_chk16, nal_dict)
|
nal_dict = uaf_extract(buffer, extract_path, mod_info, padding + 8, is_checksum, nal_dict)
|
||||||
|
|
||||||
# Parse & Extract AMI UCP > @UAF Module/Section
|
# Parse & Extract AMI UCP > @UAF Module/Section
|
||||||
def uaf_extract(buffer, extract_path, mod_info, padding=0, is_chk16=False, nal_dict=None):
|
def uaf_extract(buffer, extract_path, mod_info, padding=0, is_checksum=False, nal_dict=None):
|
||||||
if nal_dict is None: nal_dict = {}
|
if nal_dict is None: nal_dict = {}
|
||||||
|
|
||||||
uaf_tag,uaf_off,uaf_hdr = mod_info
|
uaf_tag,uaf_off,uaf_hdr = mod_info
|
||||||
|
@ -239,7 +229,7 @@ def uaf_extract(buffer, extract_path, mod_info, padding=0, is_chk16=False, nal_d
|
||||||
|
|
||||||
uaf_data_raw = uaf_data_mod[UAF_MOD_LEN:] # @UAF Module Raw Data
|
uaf_data_raw = uaf_data_mod[UAF_MOD_LEN:] # @UAF Module Raw Data
|
||||||
|
|
||||||
print('\n%sUtility Auxiliary File > %s:\n' % (padder(padding), uaf_tag))
|
printer('Utility Auxiliary File > %s:\n' % uaf_tag, padding)
|
||||||
|
|
||||||
uaf_hdr.struct_print(padding + 4) # Print @UAF Module Info
|
uaf_hdr.struct_print(padding + 4) # Print @UAF Module Info
|
||||||
|
|
||||||
|
@ -264,12 +254,12 @@ def uaf_extract(buffer, extract_path, mod_info, padding=0, is_chk16=False, nal_d
|
||||||
|
|
||||||
# Check if unknown @UAF Module Tag is present in NAL but not in built-in dictionary
|
# Check if unknown @UAF Module Tag is present in NAL but not in built-in dictionary
|
||||||
if uaf_tag in nal_dict and uaf_tag not in UAF_TAG_DICT and not uaf_tag.startswith(('@ROM','@R0','@S0','@DR','@DS')):
|
if uaf_tag in nal_dict and uaf_tag not in UAF_TAG_DICT and not uaf_tag.startswith(('@ROM','@R0','@S0','@DR','@DS')):
|
||||||
print_input('\n%sNote: Detected new AMI UCP Module %s (%s) in NAL!' % (padder(padding), uaf_tag, nal_dict[uaf_tag]))
|
printer('Note: Detected new AMI UCP Module %s (%s) in NAL!' % (uaf_tag, nal_dict[uaf_tag]), padding, pause=True)
|
||||||
|
|
||||||
# Generate @UAF Module File name, depending on whether decompression will be required
|
# Generate @UAF Module File name, depending on whether decompression will be required
|
||||||
uaf_fname = os.path.join(extract_path, safe_name(uaf_name + ('.temp' if is_comp else uaf_fext)))
|
uaf_fname = os.path.join(extract_path, safe_name(uaf_name + ('.temp' if is_comp else uaf_fext)))
|
||||||
|
|
||||||
if is_chk16: chk16_validate(uaf_data_all, uaf_tag, padding + 4)
|
if is_checksum: chk16_validate(uaf_data_all, uaf_tag, padding + 4)
|
||||||
|
|
||||||
# Parse Utility Identification Information @UAF Module (@UII)
|
# Parse Utility Identification Information @UAF Module (@UII)
|
||||||
if uaf_tag == '@UII':
|
if uaf_tag == '@UII':
|
||||||
|
@ -280,11 +270,11 @@ def uaf_extract(buffer, extract_path, mod_info, padding=0, is_chk16=False, nal_d
|
||||||
# Get @UII Module Info/Description text field
|
# Get @UII Module Info/Description text field
|
||||||
info_desc = info_data.decode('utf-8','ignore').strip('\x00 ')
|
info_desc = info_data.decode('utf-8','ignore').strip('\x00 ')
|
||||||
|
|
||||||
print('\n%sUtility Identification Information:\n' % padder(padding + 4))
|
printer('Utility Identification Information:\n', padding + 4)
|
||||||
|
|
||||||
info_hdr.struct_print(padding + 8, info_desc) # Print @UII Module Info
|
info_hdr.struct_print(padding + 8, info_desc) # Print @UII Module Info
|
||||||
|
|
||||||
if is_chk16: chk16_validate(uaf_data_raw, '@UII > Info', padding + 8)
|
if is_checksum: chk16_validate(uaf_data_raw, '@UII > Info', padding + 8)
|
||||||
|
|
||||||
# Store/Save @UII Module Info in file
|
# Store/Save @UII Module Info in file
|
||||||
with open(uaf_fname[:-4] + '.txt', 'a', encoding='utf-8') as uii_out:
|
with open(uaf_fname[:-4] + '.txt', 'a', encoding='utf-8') as uii_out:
|
||||||
|
@ -319,14 +309,14 @@ def uaf_extract(buffer, extract_path, mod_info, padding=0, is_chk16=False, nal_d
|
||||||
|
|
||||||
# Process and Print known text only @UAF Modules (after EFI/Tiano Decompression)
|
# Process and Print known text only @UAF Modules (after EFI/Tiano Decompression)
|
||||||
if uaf_tag in UAF_TAG_DICT and UAF_TAG_DICT[uaf_tag][2] == 'Text':
|
if uaf_tag in UAF_TAG_DICT and UAF_TAG_DICT[uaf_tag][2] == 'Text':
|
||||||
print('\n%s%s:' % (padder(padding + 4), UAF_TAG_DICT[uaf_tag][1]))
|
printer(UAF_TAG_DICT[uaf_tag][1] + ':', padding + 4)
|
||||||
print('\n%s%s' % (padder(padding + 8), uaf_data_raw.decode('utf-8','ignore')))
|
printer(uaf_data_raw.decode('utf-8','ignore'), padding + 8)
|
||||||
|
|
||||||
# Parse Default Command Status @UAF Module (@DIS)
|
# Parse Default Command Status @UAF Module (@DIS)
|
||||||
if len(uaf_data_raw) and uaf_tag == '@DIS':
|
if len(uaf_data_raw) and uaf_tag == '@DIS':
|
||||||
dis_hdr = get_struct(uaf_data_raw, 0x0, DisHeader) # Parse @DIS Module Raw Header Structure
|
dis_hdr = get_struct(uaf_data_raw, 0x0, DisHeader) # Parse @DIS Module Raw Header Structure
|
||||||
|
|
||||||
print('\n%sDefault Command Status Header:\n' % padder(padding + 4))
|
printer('Default Command Status Header:\n', padding + 4)
|
||||||
|
|
||||||
dis_hdr.struct_print(padding + 8) # Print @DIS Module Raw Header Info
|
dis_hdr.struct_print(padding + 8) # Print @DIS Module Raw Header Info
|
||||||
|
|
||||||
|
@ -341,14 +331,14 @@ def uaf_extract(buffer, extract_path, mod_info, padding=0, is_chk16=False, nal_d
|
||||||
for mod_idx in range(dis_hdr.EntryCount):
|
for mod_idx in range(dis_hdr.EntryCount):
|
||||||
dis_mod = get_struct(dis_data, mod_idx * DIS_MOD_LEN, DisModule) # Parse @DIS Module Raw Entry Structure
|
dis_mod = get_struct(dis_data, mod_idx * DIS_MOD_LEN, DisModule) # Parse @DIS Module Raw Entry Structure
|
||||||
|
|
||||||
print('\n%sDefault Command Status Entry %0.2d/%0.2d:\n' % (padder(padding + 8), mod_idx + 1, dis_hdr.EntryCount))
|
printer('Default Command Status Entry %0.2d/%0.2d:\n' % (mod_idx + 1, dis_hdr.EntryCount), padding + 8)
|
||||||
|
|
||||||
dis_mod.struct_print(padding + 12) # Print @DIS Module Raw Entry Info
|
dis_mod.struct_print(padding + 12) # Print @DIS Module Raw Entry Info
|
||||||
|
|
||||||
# Store/Save @DIS Module Entry Info in file
|
# Store/Save @DIS Module Entry Info in file
|
||||||
with open(uaf_fname[:-3] + 'txt', 'a', encoding='utf-8') as dis:
|
with open(uaf_fname[:-3] + 'txt', 'a', encoding='utf-8') as dis:
|
||||||
with contextlib.redirect_stdout(dis):
|
with contextlib.redirect_stdout(dis):
|
||||||
print()
|
printer()
|
||||||
dis_mod.struct_print(4) # Store @DIS Module Entry Info
|
dis_mod.struct_print(4) # Store @DIS Module Entry Info
|
||||||
|
|
||||||
os.remove(uaf_fname) # Delete @DIS Module binary, info exported as text
|
os.remove(uaf_fname) # Delete @DIS Module binary, info exported as text
|
||||||
|
@ -357,13 +347,13 @@ def uaf_extract(buffer, extract_path, mod_info, padding=0, is_chk16=False, nal_d
|
||||||
if len(uaf_data_raw) >= 5 and (uaf_tag,uaf_data_raw[0],uaf_data_raw[4]) == ('@NAL',0x40,0x3A):
|
if len(uaf_data_raw) >= 5 and (uaf_tag,uaf_data_raw[0],uaf_data_raw[4]) == ('@NAL',0x40,0x3A):
|
||||||
nal_info = uaf_data_raw.decode('utf-8','ignore').replace('\r','').strip().split('\n')
|
nal_info = uaf_data_raw.decode('utf-8','ignore').replace('\r','').strip().split('\n')
|
||||||
|
|
||||||
print('\n%s@UAF Module Name List:\n' % padder(padding + 4))
|
printer('@UAF Module Name List:\n', padding + 4)
|
||||||
|
|
||||||
# Parse all @NAL Module Entries
|
# Parse all @NAL Module Entries
|
||||||
for info in nal_info:
|
for info in nal_info:
|
||||||
info_tag,info_val = info.split(':',1)
|
info_tag,info_val = info.split(':',1)
|
||||||
|
|
||||||
print('%s%s : %s' % (padder(padding + 8), info_tag, info_val)) # Print @NAL Module Tag-Path Info
|
printer(info_tag + ' : ' + info_val, padding + 8, False) # Print @NAL Module Tag-Path Info
|
||||||
|
|
||||||
nal_dict[info_tag] = os.path.basename(info_val) # Assign a file name (w/o path) to each Tag
|
nal_dict[info_tag] = os.path.basename(info_val) # Assign a file name (w/o path) to each Tag
|
||||||
|
|
||||||
|
@ -371,7 +361,7 @@ def uaf_extract(buffer, extract_path, mod_info, padding=0, is_chk16=False, nal_d
|
||||||
if uaf_tag == '@INS' and is_7z_supported(uaf_fname):
|
if uaf_tag == '@INS' and is_7z_supported(uaf_fname):
|
||||||
ins_dir = os.path.join(extract_path, safe_name(uaf_tag + '_nested-SFX')) # Generate extraction directory
|
ins_dir = os.path.join(extract_path, safe_name(uaf_tag + '_nested-SFX')) # Generate extraction directory
|
||||||
|
|
||||||
print('\n%sInsyde BIOS 7z SFX Archive:' % padder(padding + 4))
|
printer('Insyde BIOS 7z SFX Archive:', padding + 4)
|
||||||
|
|
||||||
if a7z_decompress(uaf_fname, ins_dir, '7z SFX', padding + 8) == 0:
|
if a7z_decompress(uaf_fname, ins_dir, '7z SFX', padding + 8) == 0:
|
||||||
os.remove(uaf_fname) # Successful extraction, delete @INS Module file/archive
|
os.remove(uaf_fname) # Successful extraction, delete @INS Module file/archive
|
||||||
|
@ -388,8 +378,8 @@ def uaf_extract(buffer, extract_path, mod_info, padding=0, is_chk16=False, nal_d
|
||||||
|
|
||||||
# Detect Intel Engine firmware image and show ME Analyzer advice
|
# Detect Intel Engine firmware image and show ME Analyzer advice
|
||||||
if uaf_tag.startswith('@ME') and PAT_INTEL_ENG.search(uaf_data_raw):
|
if uaf_tag.startswith('@ME') and PAT_INTEL_ENG.search(uaf_data_raw):
|
||||||
print('\n%sIntel Management Engine (ME) Firmware:\n' % padder(padding + 4))
|
printer('Intel Management Engine (ME) Firmware:\n', padding + 4)
|
||||||
print('%sUse "ME Analyzer" from https://github.com/platomav/MEAnalyzer' % padder(padding + 8))
|
printer('Use "ME Analyzer" from https://github.com/platomav/MEAnalyzer', padding + 8, False)
|
||||||
|
|
||||||
# Get best Nested AMI UCP Pattern match based on @UAF Size
|
# Get best Nested AMI UCP Pattern match based on @UAF Size
|
||||||
nested_uaf_off,nested_uaf_bin = get_ami_ucp(uaf_data_raw)
|
nested_uaf_off,nested_uaf_bin = get_ami_ucp(uaf_data_raw)
|
||||||
|
@ -398,7 +388,7 @@ def uaf_extract(buffer, extract_path, mod_info, padding=0, is_chk16=False, nal_d
|
||||||
if nested_uaf_off:
|
if nested_uaf_off:
|
||||||
uaf_dir = os.path.join(extract_path, safe_name(uaf_tag + '_nested-UCP')) # Generate extraction directory
|
uaf_dir = os.path.join(extract_path, safe_name(uaf_tag + '_nested-UCP')) # Generate extraction directory
|
||||||
|
|
||||||
ucp_extract(nested_uaf_bin, uaf_dir, padding + 4, is_chk16) # Call recursively
|
ucp_extract(nested_uaf_bin, uaf_dir, padding + 4, is_checksum) # Call recursively
|
||||||
|
|
||||||
os.remove(uaf_fname) # Delete raw nested AMI UCP Structure after successful recursion/extraction
|
os.remove(uaf_fname) # Delete raw nested AMI UCP Structure after successful recursion/extraction
|
||||||
|
|
||||||
|
@ -468,32 +458,22 @@ UAF_TAG_DICT = {
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
# Show script title
|
# Show script title
|
||||||
show_title(title)
|
script_title(title)
|
||||||
|
|
||||||
# Set argparse Arguments
|
# Set argparse Arguments
|
||||||
argparser = argparse_init()
|
argparser = argparse_init()
|
||||||
argparser.add_argument('-c', '--checksum', help='verify AMI UCP Checksums (slow)', action='store_true')
|
argparser.add_argument('-c', '--checksum', help='verify AMI UCP Checksums (slow)', action='store_true')
|
||||||
arguments = argparser.parse_args()
|
arguments = argparser.parse_args()
|
||||||
|
|
||||||
# Pretty Python exception handler (must be after argparse)
|
is_checksum = arguments.checksum # Set Checksum verification optional argument
|
||||||
sys.excepthook = nice_exc_handler
|
|
||||||
|
|
||||||
# Check Python Version (must be after argparse)
|
# Initialize script (must be after argparse)
|
||||||
check_sys_py()
|
input_files,output_path,padding = script_init(arguments, 4)
|
||||||
|
|
||||||
# Check OS Platform (must be after argparse)
|
|
||||||
check_sys_os()
|
|
||||||
|
|
||||||
# Process input files and generate output path
|
|
||||||
input_files,output_path = process_input_files(arguments, sys.argv)
|
|
||||||
|
|
||||||
# Initial output padding count
|
|
||||||
padding = 4
|
|
||||||
|
|
||||||
for input_file in input_files:
|
for input_file in input_files:
|
||||||
input_name = os.path.basename(input_file)
|
input_name = os.path.basename(input_file)
|
||||||
|
|
||||||
print('\n*** %s' % input_name)
|
printer(['***', input_name], padding - 4)
|
||||||
|
|
||||||
with open(input_file, 'rb') as in_file: input_buffer = in_file.read()
|
with open(input_file, 'rb') as in_file: input_buffer = in_file.read()
|
||||||
|
|
||||||
|
@ -501,14 +481,14 @@ if __name__ == '__main__':
|
||||||
main_uaf_off,main_uaf_bin = get_ami_ucp(input_buffer)
|
main_uaf_off,main_uaf_bin = get_ami_ucp(input_buffer)
|
||||||
|
|
||||||
if not main_uaf_off:
|
if not main_uaf_off:
|
||||||
print('\n%sError: This is not an AMI UCP BIOS executable!' % padder(padding))
|
printer('Error: This is not an AMI UCP BIOS executable!', padding)
|
||||||
|
|
||||||
continue # Next input file
|
continue # Next input file
|
||||||
|
|
||||||
extract_path = os.path.join(output_path, input_name)
|
extract_path = os.path.join(output_path, input_name)
|
||||||
|
|
||||||
ucp_extract(main_uaf_bin, extract_path, padding, arguments.checksum)
|
ucp_extract(main_uaf_bin, extract_path, padding, is_checksum)
|
||||||
|
|
||||||
print('\n%sExtracted AMI UCP BIOS executable!' % padder(padding))
|
printer('Extracted AMI UCP BIOS executable!', padding)
|
||||||
|
|
||||||
print_input('\nDone!')
|
printer('Done!', pause=True)
|
||||||
|
|
976
Dell_PFS_Extract.py
Normal file
976
Dell_PFS_Extract.py
Normal file
|
@ -0,0 +1,976 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
#coding=utf-8
|
||||||
|
|
||||||
|
"""
|
||||||
|
Dell PFS Extract
|
||||||
|
Dell PFS Update Extractor
|
||||||
|
Copyright (C) 2018-2022 Plato Mavropoulos
|
||||||
|
"""
|
||||||
|
|
||||||
|
title = 'Dell PFS Update Extractor v6.0_a1'
|
||||||
|
|
||||||
|
import os
|
||||||
|
import io
|
||||||
|
import sys
|
||||||
|
import lzma
|
||||||
|
import zlib
|
||||||
|
import shutil
|
||||||
|
import ctypes
|
||||||
|
import contextlib
|
||||||
|
|
||||||
|
# Skip __pycache__ generation
|
||||||
|
sys.dont_write_bytecode = True
|
||||||
|
|
||||||
|
from common.checksums import get_chk_8_xor
|
||||||
|
from common.path_ops import argparse_init, safe_name
|
||||||
|
from common.patterns import PAT_DELL_HDR, PAT_DELL_FTR, PAT_DELL_PKG
|
||||||
|
from common.struct_ops import get_struct, char, uint8_t, uint16_t, uint32_t, uint64_t
|
||||||
|
from common.system import script_init, script_title, printer
|
||||||
|
|
||||||
|
from AMI_PFAT_Extract import IntelBiosGuardHeader, IntelBiosGuardSignature2k, parse_bg_script
|
||||||
|
|
||||||
|
# Dell PFS Header Structure
|
||||||
|
class DellPfsHeader(ctypes.LittleEndianStructure):
|
||||||
|
_pack_ = 1
|
||||||
|
_fields_ = [
|
||||||
|
('Tag', char*8), # 0x00
|
||||||
|
('HeaderVersion', uint32_t), # 0x08
|
||||||
|
('PayloadSize', uint32_t), # 0x0C
|
||||||
|
# 0x10
|
||||||
|
]
|
||||||
|
|
||||||
|
def struct_print(self, p):
|
||||||
|
printer(['Header Tag :', self.Tag.decode('utf-8')], p, False)
|
||||||
|
printer(['Header Version:', self.HeaderVersion], p, False)
|
||||||
|
printer(['Payload Size :', '0x%X' % self.PayloadSize], p, False)
|
||||||
|
|
||||||
|
# Dell PFS Footer Structure
|
||||||
|
class DellPfsFooter(ctypes.LittleEndianStructure):
|
||||||
|
_pack_ = 1
|
||||||
|
_fields_ = [
|
||||||
|
('PayloadSize', uint32_t), # 0x00
|
||||||
|
('Checksum', uint32_t), # 0x04 ~CRC32 w/ Vector 0
|
||||||
|
('Tag', char*8), # 0x08
|
||||||
|
# 0x10
|
||||||
|
]
|
||||||
|
|
||||||
|
def struct_print(self, p):
|
||||||
|
printer(['Payload Size :', '0x%X' % self.PayloadSize], p, False)
|
||||||
|
printer(['Payload Checksum:', '0x%0.8X' % self.Checksum], p, False)
|
||||||
|
printer(['Footer Tag :', self.Tag.decode('utf-8')], p, False)
|
||||||
|
|
||||||
|
# Dell PFS Entry Base Structure
|
||||||
|
class DellPfsEntryBase(ctypes.LittleEndianStructure):
|
||||||
|
_pack_ = 1
|
||||||
|
_fields_ = [
|
||||||
|
('GUID', uint32_t*4), # 0x00 Little Endian
|
||||||
|
('HeaderVersion', uint32_t), # 0x10 1 or 2
|
||||||
|
('VersionType', uint8_t*4), # 0x14
|
||||||
|
('Version', uint16_t*4), # 0x18
|
||||||
|
('Reserved', uint64_t), # 0x20
|
||||||
|
('DataSize', uint32_t), # 0x28
|
||||||
|
('DataSigSize', uint32_t), # 0x2C
|
||||||
|
('DataMetSize', uint32_t), # 0x30
|
||||||
|
('DataMetSigSize', uint32_t), # 0x34
|
||||||
|
# 0x38 (parent class, base)
|
||||||
|
]
|
||||||
|
|
||||||
|
def struct_print(self, p):
|
||||||
|
GUID = '%0.*X' % (0x10 * 2, int.from_bytes(self.GUID, 'little'))
|
||||||
|
Unknown = '%0.*X' % (len(self.Unknown) * 8, int.from_bytes(self.Unknown, 'little'))
|
||||||
|
Version = get_entry_ver(self.Version, self.VersionType, padding - 4)
|
||||||
|
|
||||||
|
printer(['Entry GUID :', GUID], p, False)
|
||||||
|
printer(['Entry Version :', self.HeaderVersion], p, False)
|
||||||
|
printer(['Payload Version :', Version], p, False)
|
||||||
|
printer(['Reserved :', '0x%X' % self.Reserved], p, False)
|
||||||
|
printer(['Payload Data Size :', '0x%X' % self.DataSize], p, False)
|
||||||
|
printer(['Payload Signature Size :', '0x%X' % self.DataSigSize], p, False)
|
||||||
|
printer(['Metadata Data Size :', '0x%X' % self.DataMetSize], p, False)
|
||||||
|
printer(['Metadata Signature Size:', '0x%X' % self.DataMetSigSize], p, False)
|
||||||
|
printer(['Unknown :', '0x%s' % Unknown], p, False)
|
||||||
|
|
||||||
|
# Dell PFS Entry Revision 1 Structure
|
||||||
|
class DellPfsEntryR1(DellPfsEntryBase):
|
||||||
|
_pack_ = 1
|
||||||
|
_fields_ = [
|
||||||
|
('Unknown', uint32_t*4), # 0x38
|
||||||
|
# 0x48 (child class, R1)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Dell PFS Entry Revision 2 Structure
|
||||||
|
class DellPfsEntryR2(DellPfsEntryBase):
|
||||||
|
_pack_ = 1
|
||||||
|
_fields_ = [
|
||||||
|
('Unknown', uint32_t*8), # 0x38
|
||||||
|
# 0x58 (child class, R2)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Dell PFS Information Header Structure
|
||||||
|
class DellPfsInfo(ctypes.LittleEndianStructure):
|
||||||
|
_pack_ = 1
|
||||||
|
_fields_ = [
|
||||||
|
('HeaderVersion', uint32_t), # 0x00
|
||||||
|
('GUID', uint32_t*4), # 0x04 Little Endian
|
||||||
|
# 0x14
|
||||||
|
]
|
||||||
|
|
||||||
|
def struct_print(self, p):
|
||||||
|
GUID = '%0.*X' % (0x10 * 2, int.from_bytes(self.GUID, 'little'))
|
||||||
|
|
||||||
|
printer(['Info Version:', self.HeaderVersion], p, False)
|
||||||
|
printer(['Entry GUID :', GUID], p, False)
|
||||||
|
|
||||||
|
# Dell PFS FileName Header Structure
|
||||||
|
class DellPfsName(ctypes.LittleEndianStructure):
|
||||||
|
_pack_ = 1
|
||||||
|
_fields_ = [
|
||||||
|
('Version', uint16_t*4), # 0x00
|
||||||
|
('VersionType', uint8_t*4), # 0x08
|
||||||
|
('CharacterCount', uint16_t), # 0x0C UTF-16 2-byte Characters
|
||||||
|
# 0x0E
|
||||||
|
]
|
||||||
|
|
||||||
|
def struct_print(self, p):
|
||||||
|
Version = get_entry_ver(self.Version, self.VersionType, padding - 4)
|
||||||
|
|
||||||
|
printer(['Payload Version:', Version], p, False)
|
||||||
|
printer(['Character Count:', self.CharacterCount], p, False)
|
||||||
|
|
||||||
|
# Dell PFS Metadata Header Structure
|
||||||
|
class DellPfsMetadata(ctypes.LittleEndianStructure):
|
||||||
|
_pack_ = 1
|
||||||
|
_fields_ = [
|
||||||
|
('ModelIDs', char*501), # 0x000
|
||||||
|
('FileName', char*100), # 0x1F5
|
||||||
|
('FileVersion', char*33), # 0x259
|
||||||
|
('Date', char*33), # 0x27A
|
||||||
|
('Brand', char*80), # 0x29B
|
||||||
|
('ModelFile', char*80), # 0x2EB
|
||||||
|
('ModelName', char*100), # 0x33B
|
||||||
|
('ModelVersion', char*33), # 0x39F
|
||||||
|
# 0x3C0
|
||||||
|
]
|
||||||
|
|
||||||
|
def struct_print(self, p):
|
||||||
|
printer(['Model IDs :', self.ModelIDs.decode('utf-8').strip(',END')], p, False)
|
||||||
|
printer(['File Name :', self.FileName.decode('utf-8')], p, False)
|
||||||
|
printer(['File Version :', self.FileVersion.decode('utf-8')], p, False)
|
||||||
|
printer(['Date :', self.Date.decode('utf-8')], p, False)
|
||||||
|
printer(['Brand :', self.Brand.decode('utf-8')], p, False)
|
||||||
|
printer(['Model File :', self.ModelFile.decode('utf-8')], p, False)
|
||||||
|
printer(['Model Name :', self.ModelName.decode('utf-8')], p, False)
|
||||||
|
printer(['Model Version:', self.ModelVersion.decode('utf-8')], p, False)
|
||||||
|
|
||||||
|
# Dell PFS BIOS Guard Metadata Structure
|
||||||
|
class DellPfsPfatMetadata(ctypes.LittleEndianStructure):
|
||||||
|
_pack_ = 1
|
||||||
|
_fields_ = [
|
||||||
|
('OffsetTop', uint32_t), # 0x00
|
||||||
|
('Unknown0', uint32_t), # 0x04
|
||||||
|
('OffsetBase', uint32_t), # 0x08
|
||||||
|
('BlockSize', uint32_t), # 0x0C
|
||||||
|
('Unknown1', uint32_t), # 0x10
|
||||||
|
('Unknown2', uint32_t), # 0x14
|
||||||
|
('Unknown3', uint8_t), # 0x18
|
||||||
|
# 0x19
|
||||||
|
]
|
||||||
|
|
||||||
|
def struct_print(self, p):
|
||||||
|
printer(['Offset Top :', '0x%X' % self.OffsetTop], p, False)
|
||||||
|
printer(['Unknown 0 :', '0x%X' % self.Unknown0], p, False)
|
||||||
|
printer(['Offset Base:', '0x%X' % self.OffsetBase], p, False)
|
||||||
|
printer(['Block Size :', '0x%X' % self.BlockSize], p, False)
|
||||||
|
printer(['Unknown 1 :', '0x%X' % self.Unknown1], p, False)
|
||||||
|
printer(['Unknown 2 :', '0x%X' % self.Unknown2], p, False)
|
||||||
|
printer(['Unknown 3 :', '0x%X' % self.Unknown3], p, False)
|
||||||
|
|
||||||
|
# The Dell ThinOS PKG update images usually contain multiple sections.
|
||||||
|
# Each section starts with a 0x30 header, which begins with pattern 72135500.
|
||||||
|
# The section length is found at 0x10-0x14 and its (optional) MD5 hash at 0x20-0x30.
|
||||||
|
# Section data can be raw or LZMA2 (7zXZ) compressed. The latter contains the PFS update image.
|
||||||
|
def is_dell_pfs_pkg(in_buffer):
|
||||||
|
return PAT_DELL_PKG.search(in_buffer)
|
||||||
|
|
||||||
|
# The Dell PFS update images usually contain multiple sections.
|
||||||
|
# Each section is zlib-compressed with header pattern ********++EEAA761BECBB20F1E651--789C,
|
||||||
|
# where ******** is the zlib stream size, ++ is the section type and -- the header Checksum XOR 8.
|
||||||
|
# The "Firmware" section has type AA and its files are stored in PFS format.
|
||||||
|
# The "Utility" section has type BB and its files are stored in PFS, BIN or 7z formats.
|
||||||
|
def is_dell_pfs_hdr(in_buffer):
|
||||||
|
return list(PAT_DELL_HDR.finditer(in_buffer))
|
||||||
|
|
||||||
|
# Each section is followed by the footer pattern ********EEAAEE8F491BE8AE143790--,
|
||||||
|
# where ******** is the zlib stream size and ++ the footer Checksum XOR 8.
|
||||||
|
def is_dell_pfs_ftr(in_buffer):
|
||||||
|
return PAT_DELL_FTR.search(in_buffer)
|
||||||
|
|
||||||
|
# Get PFS ZLIB Section Offsets
|
||||||
|
def get_section_offsets(buffer):
|
||||||
|
pfs_zlib_init = is_dell_pfs_hdr(buffer)
|
||||||
|
|
||||||
|
if not pfs_zlib_init: return [] # No PFS ZLIB detected
|
||||||
|
|
||||||
|
pfs_zlib_list = [] # Initialize PFS ZLIB offset list
|
||||||
|
|
||||||
|
# Remove duplicate/nested PFS ZLIB offsets
|
||||||
|
for zlib_c in pfs_zlib_init:
|
||||||
|
is_duplicate = False # Initialize duplicate/nested PFS ZLIB offset
|
||||||
|
|
||||||
|
for zlib_o in pfs_zlib_init:
|
||||||
|
zlib_o_size = int.from_bytes(buffer[zlib_o.start() - 0x5:zlib_o.start() - 0x1], 'little')
|
||||||
|
|
||||||
|
# If current PFS ZLIB offset is within another PFS ZLIB range (start-end), set as duplicate
|
||||||
|
if zlib_o.start() < zlib_c.start() < zlib_o.start() + zlib_o_size: is_duplicate = True
|
||||||
|
|
||||||
|
if not is_duplicate: pfs_zlib_list.append(zlib_c.start())
|
||||||
|
|
||||||
|
return pfs_zlib_list
|
||||||
|
|
||||||
|
# Dell PFS ZLIB Section Parser
|
||||||
|
def pfs_section_parse(zlib_data, zlib_start, output_path, pfs_name, pfs_index, pfs_count, is_rec, padding, is_verbose=True, is_advanced=True):
|
||||||
|
is_zlib_error = False # Initialize PFS ZLIB-related error state
|
||||||
|
|
||||||
|
section_type = zlib_data[zlib_start - 0x1] # Byte before PFS ZLIB Section pattern is Section Type (e.g. AA, BB)
|
||||||
|
section_name = {0xAA:'Firmware', 0xBB:'Utilities'}.get(section_type, 'Unknown (%0.2X)' % section_type)
|
||||||
|
|
||||||
|
# Show extraction complete message for each main PFS ZLIB Section
|
||||||
|
printer('Extracting Dell PFS %d >%s > %s' % (pfs_index, pfs_name, section_name), padding)
|
||||||
|
|
||||||
|
# Set PFS ZLIB Section extraction sub-directory path
|
||||||
|
section_path = os.path.join(output_path, section_name)
|
||||||
|
|
||||||
|
# Delete existing extraction sub-directory (not in recursions)
|
||||||
|
if os.path.isdir(section_path) and not is_rec: shutil.rmtree(section_path)
|
||||||
|
|
||||||
|
# Create extraction sub-directory
|
||||||
|
if not os.path.isdir(section_path): os.makedirs(section_path)
|
||||||
|
|
||||||
|
# Store the compressed zlib stream start offset
|
||||||
|
compressed_start = zlib_start + 0xB
|
||||||
|
|
||||||
|
# Store the PFS ZLIB section header start offset
|
||||||
|
header_start = zlib_start - 0x5
|
||||||
|
|
||||||
|
# Store the PFS ZLIB section header contents (16 bytes)
|
||||||
|
header_data = zlib_data[header_start:compressed_start]
|
||||||
|
|
||||||
|
# Check if the PFS ZLIB section header Checksum XOR 8 is valid
|
||||||
|
if get_chk_8_xor(header_data[:0xF]) != header_data[0xF]:
|
||||||
|
printer('Error: Invalid Dell PFS ZLIB section Header Checksum!', padding)
|
||||||
|
is_zlib_error = True
|
||||||
|
|
||||||
|
# Store the compressed zlib stream size from the header contents
|
||||||
|
compressed_size_hdr = int.from_bytes(header_data[:0x4], 'little')
|
||||||
|
|
||||||
|
# Store the compressed zlib stream end offset
|
||||||
|
compressed_end = compressed_start + compressed_size_hdr
|
||||||
|
|
||||||
|
# Store the compressed zlib stream contents
|
||||||
|
compressed_data = zlib_data[compressed_start:compressed_end]
|
||||||
|
|
||||||
|
# Check if the compressed zlib stream is complete, based on header
|
||||||
|
if len(compressed_data) != compressed_size_hdr:
|
||||||
|
printer('Error: Incomplete Dell PFS ZLIB section data (Header)!', padding)
|
||||||
|
is_zlib_error = True
|
||||||
|
|
||||||
|
# Store the PFS ZLIB section footer contents (16 bytes)
|
||||||
|
footer_data = zlib_data[compressed_end:compressed_end + 0x10]
|
||||||
|
|
||||||
|
# Search input section for PFS ZLIB section footer
|
||||||
|
pfs_zlib_footer_match = is_dell_pfs_ftr(footer_data)
|
||||||
|
|
||||||
|
# Check if PFS ZLIB section footer was found in the section
|
||||||
|
if not pfs_zlib_footer_match:
|
||||||
|
printer('Error: This Dell PFS ZLIB section is corrupted!', padding)
|
||||||
|
is_zlib_error = True
|
||||||
|
|
||||||
|
# Check if the PFS ZLIB section footer Checksum XOR 8 is valid
|
||||||
|
if get_chk_8_xor(footer_data[:0xF]) != footer_data[0xF]:
|
||||||
|
printer('Error: Invalid Dell PFS ZLIB section Footer Checksum!', padding)
|
||||||
|
is_zlib_error = True
|
||||||
|
|
||||||
|
# Store the compressed zlib stream size from the footer contents
|
||||||
|
compressed_size_ftr = int.from_bytes(footer_data[:0x4], 'little')
|
||||||
|
|
||||||
|
# Check if the compressed zlib stream is complete, based on footer
|
||||||
|
if compressed_size_ftr != compressed_size_hdr:
|
||||||
|
printer('Error: Incomplete Dell PFS ZLIB section data (Footer)!', padding)
|
||||||
|
is_zlib_error = True
|
||||||
|
|
||||||
|
# Decompress PFS ZLIB section payload
|
||||||
|
try:
|
||||||
|
assert not is_zlib_error # ZLIB errors are critical
|
||||||
|
section_data = zlib.decompress(compressed_data) # ZLIB decompression
|
||||||
|
except:
|
||||||
|
section_data = zlib_data # Fallback to raw ZLIB data upon critical error
|
||||||
|
|
||||||
|
# Call the PFS Extract function on the decompressed PFS ZLIB Section
|
||||||
|
pfs_extract(section_data, pfs_index, pfs_name, pfs_count, section_path, padding, is_verbose, is_advanced)
|
||||||
|
|
||||||
|
# Parse & Extract Dell PFS Volume
|
||||||
|
def pfs_extract(buffer, pfs_index, pfs_name, pfs_count, output_path, pfs_padd, is_verbose=True, is_advanced=True):
|
||||||
|
# Show PFS Volume indicator
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFS Volume:', pfs_padd)
|
||||||
|
|
||||||
|
# Get PFS Header Structure values
|
||||||
|
pfs_hdr = get_struct(buffer, 0, DellPfsHeader)
|
||||||
|
|
||||||
|
# Validate that a PFS Header was parsed
|
||||||
|
if pfs_hdr.Tag != b'PFS.HDR.':
|
||||||
|
printer('Error: PFS Header could not be found!', pfs_padd + 4)
|
||||||
|
|
||||||
|
return # Critical error, abort
|
||||||
|
|
||||||
|
# Show PFS Header Structure info
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFS Header:\n', pfs_padd + 4)
|
||||||
|
pfs_hdr.struct_print(pfs_padd + 8)
|
||||||
|
|
||||||
|
# Validate that a known PFS Header Version was encountered
|
||||||
|
chk_hdr_ver(pfs_hdr.HeaderVersion, 'PFS', pfs_padd + 8)
|
||||||
|
|
||||||
|
# Get PFS Payload Data
|
||||||
|
pfs_payload = buffer[PFS_HEAD_LEN:PFS_HEAD_LEN + pfs_hdr.PayloadSize]
|
||||||
|
|
||||||
|
# Parse all PFS Payload Entries/Components
|
||||||
|
entry_index = 1 # Index number of each PFS Entry
|
||||||
|
entry_start = 0 # Increasing PFS Entry starting offset
|
||||||
|
entries_all = [] # Storage for each PFS Entry details
|
||||||
|
filename_info = [] # Buffer for FileName Information Entry Data
|
||||||
|
signature_info = [] # Buffer for Signature Information Entry Data
|
||||||
|
pfs_entry_struct,pfs_entry_size = get_pfs_entry(pfs_payload, entry_start) # Get PFS Entry Info
|
||||||
|
while len(pfs_payload[entry_start:entry_start + pfs_entry_size]) == pfs_entry_size:
|
||||||
|
# Analyze PFS Entry Structure and get relevant info
|
||||||
|
_,entry_version,entry_guid,entry_data,entry_data_sig,entry_met,entry_met_sig,next_entry = \
|
||||||
|
parse_pfs_entry(pfs_payload, entry_start, pfs_entry_size, pfs_entry_struct, 'PFS Entry', pfs_padd, is_verbose)
|
||||||
|
|
||||||
|
entry_type = 'OTHER' # Adjusted later if PFS Entry is Zlib, PFAT, PFS Info, Model Info
|
||||||
|
|
||||||
|
# Get PFS Information from the PFS Entry with GUID E0717CE3A9BB25824B9F0DC8FD041960 or B033CB16EC9B45A14055F80E4D583FD3
|
||||||
|
if entry_guid in ['E0717CE3A9BB25824B9F0DC8FD041960','B033CB16EC9B45A14055F80E4D583FD3']:
|
||||||
|
filename_info = entry_data
|
||||||
|
entry_type = 'NAME_INFO'
|
||||||
|
|
||||||
|
# Get Model Information from the PFS Entry with GUID 6F1D619A22A6CB924FD4DA68233AE3FB
|
||||||
|
elif entry_guid == '6F1D619A22A6CB924FD4DA68233AE3FB':
|
||||||
|
entry_type = 'MODEL_INFO'
|
||||||
|
|
||||||
|
# Get Signature Information from the PFS Entry with GUID D086AFEE3ADBAEA94D5CED583C880BB7
|
||||||
|
elif entry_guid == 'D086AFEE3ADBAEA94D5CED583C880BB7':
|
||||||
|
signature_info = entry_data
|
||||||
|
entry_type = 'SIG_INFO'
|
||||||
|
|
||||||
|
# Get Nested PFS from the PFS Entry with GUID 900FAE60437F3AB14055F456AC9FDA84
|
||||||
|
elif entry_guid == '900FAE60437F3AB14055F456AC9FDA84':
|
||||||
|
entry_type = 'NESTED_PFS' # Nested PFS are usually zlib-compressed so it might change to 'ZLIB' later
|
||||||
|
|
||||||
|
# Store all relevant PFS Entry details
|
||||||
|
entries_all.append([entry_index, entry_guid, entry_version, entry_type, entry_data, entry_data_sig, entry_met, entry_met_sig])
|
||||||
|
|
||||||
|
entry_index += 1 # Increase PFS Entry Index number for user-friendly output and name duplicates
|
||||||
|
entry_start = next_entry # Next PFS Entry starts after PFS Entry Metadata Signature
|
||||||
|
|
||||||
|
# Parse all PFS Information Entries/Descriptors
|
||||||
|
info_start = 0 # Increasing PFS Information Entry starting offset
|
||||||
|
info_all = [] # Storage for each PFS Information Entry details
|
||||||
|
while len(filename_info[info_start:info_start + PFS_INFO_LEN]) == PFS_INFO_LEN:
|
||||||
|
# Get PFS Information Header Structure info
|
||||||
|
entry_info_hdr = get_struct(filename_info, info_start, DellPfsInfo)
|
||||||
|
|
||||||
|
# Show PFS Information Header Structure info
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFS Information Header:\n', pfs_padd + 4)
|
||||||
|
entry_info_hdr.struct_print(pfs_padd + 8)
|
||||||
|
|
||||||
|
# Validate that a known PFS Information Header Version was encountered
|
||||||
|
if entry_info_hdr.HeaderVersion != 1:
|
||||||
|
printer('Error: Unknown PFS Information Header Version %d!' % entry_info_hdr.HeaderVersion, pfs_padd + 8)
|
||||||
|
break # Skip PFS Information Entries/Descriptors in case of unknown PFS Information Header Version
|
||||||
|
|
||||||
|
# Get PFS Information Header GUID in Big Endian format to match each Info to the equivalent stored PFS Entry details
|
||||||
|
entry_guid = '%0.*X' % (0x10 * 2, int.from_bytes(entry_info_hdr.GUID, 'little'))
|
||||||
|
|
||||||
|
# Get PFS FileName Structure values
|
||||||
|
entry_info_mod = get_struct(filename_info, info_start + PFS_INFO_LEN, DellPfsName)
|
||||||
|
|
||||||
|
# Show PFS FileName Structure info
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFS FileName Entry:\n', pfs_padd + 8)
|
||||||
|
entry_info_mod.struct_print(pfs_padd + 12)
|
||||||
|
|
||||||
|
# The PFS FileName Structure is not complete by itself. The size of the last field (Entry Name) is determined from
|
||||||
|
# CharacterCount multiplied by 2 due to usage of UTF-16 2-byte Characters. Any Entry Name leading and/or trailing
|
||||||
|
# space/null characters are stripped and common Windows reserved/illegal filename characters are replaced
|
||||||
|
name_start = info_start + PFS_INFO_LEN + PFS_NAME_LEN # PFS Entry's FileName start offset
|
||||||
|
name_size = entry_info_mod.CharacterCount * 2 # PFS Entry's FileName buffer total size
|
||||||
|
name_data = filename_info[name_start:name_start + name_size] # PFS Entry's FileName buffer
|
||||||
|
entry_name = safe_name(name_data.decode('utf-16').strip()) # PFS Entry's FileName value
|
||||||
|
|
||||||
|
# Show PFS FileName Name info (padding matches the one from PFS FileName Structure info)
|
||||||
|
if is_verbose:
|
||||||
|
printer('Payload Name : %s' % entry_name, pfs_padd + 12, False)
|
||||||
|
|
||||||
|
# Get PFS FileName Version string via "Version" and "VersionType" fields
|
||||||
|
# PFS FileName Version string must be preferred over PFS Entry's Version
|
||||||
|
entry_version = get_entry_ver(entry_info_mod.Version, entry_info_mod.VersionType, pfs_padd + 12)
|
||||||
|
|
||||||
|
# Store all relevant PFS FileName details
|
||||||
|
info_all.append([entry_guid, entry_name, entry_version])
|
||||||
|
|
||||||
|
# The next PFS Information Header starts after the calculated FileName size
|
||||||
|
# Two space/null characters seem to always exist after each FileName value
|
||||||
|
info_start += (PFS_INFO_LEN + PFS_NAME_LEN + name_size + 0x2)
|
||||||
|
|
||||||
|
# Parse Nested PFS Metadata when its PFS Information Entry is missing
|
||||||
|
for index in range(len(entries_all)):
|
||||||
|
if entries_all[index][3] == 'NESTED_PFS' and not filename_info:
|
||||||
|
entry_guid = entries_all[index][1] # Nested PFS Entry GUID in Big Endian format
|
||||||
|
entry_metadata = entries_all[index][6] # Use Metadata as PFS Information Entry
|
||||||
|
|
||||||
|
# When PFS Information Entry exists, Nested PFS Metadata contains only Model IDs
|
||||||
|
# When it's missing, the Metadata structure is large and contains equivalent info
|
||||||
|
if len(entry_metadata) >= PFS_META_LEN:
|
||||||
|
# Get Nested PFS Metadata Structure values
|
||||||
|
entry_info = get_struct(entry_metadata, 0, DellPfsMetadata)
|
||||||
|
|
||||||
|
# Show Nested PFS Metadata Structure info
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFS Metadata Information:\n', pfs_padd + 4)
|
||||||
|
entry_info.struct_print(pfs_padd + 8)
|
||||||
|
|
||||||
|
# As Nested PFS Entry Name, we'll use the actual PFS File Name
|
||||||
|
# Replace common Windows reserved/illegal filename characters
|
||||||
|
entry_name = safe_name(entry_info.FileName.decode('utf-8').strip('.exe'))
|
||||||
|
|
||||||
|
# As Nested PFS Entry Version, we'll use the actual PFS File Version
|
||||||
|
entry_version = entry_info.FileVersion.decode('utf-8')
|
||||||
|
|
||||||
|
# Store all relevant Nested PFS Metadata/Information details
|
||||||
|
info_all.append([entry_guid, entry_name, entry_version])
|
||||||
|
|
||||||
|
# Re-set Nested PFS Entry Version from Metadata
|
||||||
|
entries_all[index][2] = entry_version
|
||||||
|
|
||||||
|
# Parse all PFS Signature Entries/Descriptors
|
||||||
|
sign_start = 0 # Increasing PFS Signature Entry starting offset
|
||||||
|
while len(signature_info[sign_start:sign_start + PFS_INFO_LEN]) == PFS_INFO_LEN:
|
||||||
|
# Get PFS Information Header Structure info
|
||||||
|
entry_info_hdr = get_struct(signature_info, sign_start, DellPfsInfo)
|
||||||
|
|
||||||
|
# Show PFS Information Header Structure info
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFS Information Header:\n', pfs_padd + 4)
|
||||||
|
entry_info_hdr.struct_print(pfs_padd + 8)
|
||||||
|
|
||||||
|
# Validate that a known PFS Information Header Version was encountered
|
||||||
|
if entry_info_hdr.HeaderVersion != 1:
|
||||||
|
printer('Error: Unknown PFS Information Header Version %d!' % entry_info_hdr.HeaderVersion, pfs_padd + 8)
|
||||||
|
break # Skip PFS Signature Entries/Descriptors in case of unknown Header Version
|
||||||
|
|
||||||
|
# PFS Signature Entries/Descriptors have DellPfsInfo + DellPfsEntryR* + Sign Size [0x2] + Sign Data [Sig Size]
|
||||||
|
pfs_entry_struct, pfs_entry_size = get_pfs_entry(signature_info, sign_start + PFS_INFO_LEN) # Get PFS Entry Info
|
||||||
|
|
||||||
|
# Get PFS Entry Header Structure info
|
||||||
|
entry_hdr = get_struct(signature_info, sign_start + PFS_INFO_LEN, pfs_entry_struct)
|
||||||
|
|
||||||
|
# Show PFS Information Header Structure info
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFS Information Entry:\n', pfs_padd + 8)
|
||||||
|
entry_hdr.struct_print(pfs_padd + 12)
|
||||||
|
|
||||||
|
# Show PFS Signature Size & Data (after DellPfsEntryR*)
|
||||||
|
sign_info_start = sign_start + PFS_INFO_LEN + pfs_entry_size
|
||||||
|
sign_size = int.from_bytes(signature_info[sign_info_start:sign_info_start + 0x2], 'little')
|
||||||
|
sign_data_raw = signature_info[sign_info_start + 0x2:sign_info_start + 0x2 + sign_size]
|
||||||
|
sign_data_txt = '%0.*X' % (sign_size * 2, int.from_bytes(sign_data_raw, 'little'))
|
||||||
|
|
||||||
|
if is_verbose:
|
||||||
|
printer('Signature Information:\n', pfs_padd + 8)
|
||||||
|
printer('Signature Size: 0x%X' % sign_size, pfs_padd + 12, False)
|
||||||
|
printer('Signature Data: %s [...]' % sign_data_txt[:32], pfs_padd + 12, False)
|
||||||
|
|
||||||
|
# The next PFS Signature Entry/Descriptor starts after the previous Signature Data
|
||||||
|
sign_start += (PFS_INFO_LEN + pfs_entry_size + 0x2 + sign_size)
|
||||||
|
|
||||||
|
# Parse each PFS Entry Data for special types (zlib or PFAT)
|
||||||
|
for index in range(len(entries_all)):
|
||||||
|
entry_data = entries_all[index][4] # Get PFS Entry Data
|
||||||
|
entry_type = entries_all[index][3] # Get PFS Entry Type
|
||||||
|
|
||||||
|
# Very small PFS Entry Data cannot be of special type
|
||||||
|
if len(entry_data) < PFS_HEAD_LEN: continue
|
||||||
|
|
||||||
|
# Check if PFS Entry contains zlib-compressed sub-PFS Volume
|
||||||
|
pfs_zlib_offsets = get_section_offsets(entry_data)
|
||||||
|
|
||||||
|
# Check if PFS Entry contains sub-PFS Volume with PFAT Payload
|
||||||
|
is_pfat = False # Initial PFAT state for sub-PFS Entry
|
||||||
|
_, pfat_entry_size = get_pfs_entry(entry_data, PFS_HEAD_LEN) # Get possible PFS PFAT Entry Size
|
||||||
|
pfat_hdr_off = PFS_HEAD_LEN + pfat_entry_size # Possible PFAT Header starts after PFS Header & Entry
|
||||||
|
pfat_entry_hdr = get_struct(entry_data, 0, DellPfsHeader) # Possible PFS PFAT Entry
|
||||||
|
if len(entry_data) - pfat_hdr_off >= PFAT_HDR_LEN:
|
||||||
|
pfat_hdr = get_struct(entry_data, pfat_hdr_off, IntelBiosGuardHeader)
|
||||||
|
is_pfat = pfat_hdr.get_platform_id().upper().startswith('DELL')
|
||||||
|
|
||||||
|
# Parse PFS Entry which contains sub-PFS Volume with PFAT Payload
|
||||||
|
if pfat_entry_hdr.Tag == b'PFS.HDR.' and is_pfat:
|
||||||
|
entry_type = 'PFAT' # Re-set PFS Entry Type from OTHER to PFAT, to use such info afterwards
|
||||||
|
|
||||||
|
entry_data = parse_pfat_pfs(pfat_entry_hdr, entry_data, pfs_padd, is_verbose) # Parse sub-PFS PFAT Volume
|
||||||
|
|
||||||
|
# Parse PFS Entry which contains zlib-compressed sub-PFS Volume
|
||||||
|
elif pfs_zlib_offsets:
|
||||||
|
entry_type = 'ZLIB' # Re-set PFS Entry Type from OTHER to ZLIB, to use such info afterwards
|
||||||
|
pfs_count += 1 # Increase the count/index of parsed main PFS structures by one
|
||||||
|
|
||||||
|
# Parse each sub-PFS ZLIB Section
|
||||||
|
for offset in pfs_zlib_offsets:
|
||||||
|
# Get the Name of the zlib-compressed full PFS structure via the already stored PFS Information
|
||||||
|
# The zlib-compressed full PFS structure(s) are used to contain multiple FW (CombineBiosNameX)
|
||||||
|
# When zlib-compressed full PFS structure(s) exist within the main/first full PFS structure,
|
||||||
|
# its PFS Information should contain their names (CombineBiosNameX). Since the main/first
|
||||||
|
# full PFS structure has count/index 1, the rest start at 2+ and thus, their PFS Information
|
||||||
|
# names can be retrieved in order by subtracting 2 from the main/first PFS Information values
|
||||||
|
sub_pfs_name = ' %s v%s' % (info_all[pfs_count - 2][1], info_all[pfs_count - 2][2]) if info_all else ' UNKNOWN'
|
||||||
|
|
||||||
|
# Set the sub-PFS output path (create sub-folders for each sub-PFS and its ZLIB sections)
|
||||||
|
sub_pfs_path = os.path.join(output_path, str(pfs_count) + sub_pfs_name)
|
||||||
|
|
||||||
|
# Recursively call the PFS ZLIB Section Parser function for the sub-PFS Volume (pfs_index = pfs_count)
|
||||||
|
pfs_section_parse(entry_data, offset, sub_pfs_path, sub_pfs_name, pfs_count, pfs_count, True, pfs_padd + 4, is_verbose, is_advanced)
|
||||||
|
|
||||||
|
entries_all[index][4] = entry_data # Adjust PFS Entry Data after parsing PFAT (same ZLIB raw data, not stored afterwards)
|
||||||
|
entries_all[index][3] = entry_type # Adjust PFS Entry Type from OTHER to PFAT or ZLIB (ZLIB is ignored at file extraction)
|
||||||
|
|
||||||
|
# Name & Store each PFS Entry/Component Data, Data Signature, Metadata, Metadata Signature
|
||||||
|
for entry_index in range(len(entries_all)):
|
||||||
|
file_index = entries_all[entry_index][0]
|
||||||
|
file_guid = entries_all[entry_index][1]
|
||||||
|
file_version = entries_all[entry_index][2]
|
||||||
|
file_type = entries_all[entry_index][3]
|
||||||
|
file_data = entries_all[entry_index][4]
|
||||||
|
file_data_sig = entries_all[entry_index][5]
|
||||||
|
file_meta = entries_all[entry_index][6]
|
||||||
|
file_meta_sig = entries_all[entry_index][7]
|
||||||
|
|
||||||
|
# Give Names to special PFS Entries, not covered by PFS Information
|
||||||
|
if file_type == 'MODEL_INFO':
|
||||||
|
file_name = 'Model Information'
|
||||||
|
elif file_type == 'NAME_INFO':
|
||||||
|
file_name = 'Filename Information'
|
||||||
|
if not is_advanced: continue # Don't store Filename Information in non-advanced user mode
|
||||||
|
elif file_type == 'SIG_INFO':
|
||||||
|
file_name = 'Signature Information'
|
||||||
|
if not is_advanced: continue # Don't store Signature Information in non-advanced user mode
|
||||||
|
else:
|
||||||
|
file_name = ''
|
||||||
|
|
||||||
|
# Most PFS Entry Names & Versions are found at PFS Information via their GUID
|
||||||
|
# Version can be found at DellPfsEntryR* but prefer PFS Information when possible
|
||||||
|
for info_index in range(len(info_all)):
|
||||||
|
info_guid = info_all[info_index][0]
|
||||||
|
info_name = info_all[info_index][1]
|
||||||
|
info_version = info_all[info_index][2]
|
||||||
|
|
||||||
|
# Give proper Name & Version info if Entry/Information GUIDs match
|
||||||
|
if info_guid == file_guid:
|
||||||
|
file_name = info_name
|
||||||
|
file_version = info_version
|
||||||
|
|
||||||
|
info_all[info_index][0] = 'USED' # PFS with zlib-compressed sub-PFS use the same GUID
|
||||||
|
break # Break at 1st Name match to not rename again from next zlib-compressed sub-PFS with the same GUID
|
||||||
|
|
||||||
|
# For both advanced & non-advanced users, the goal is to store final/usable files only
|
||||||
|
# so empty or intermediate files such as sub-PFS, PFS w/ PFAT or zlib-PFS are skipped
|
||||||
|
# Main/First PFS CombineBiosNameX Metadata files must be kept for accurate Model Information
|
||||||
|
# All users should check these files in order to choose the correct CombineBiosNameX modules
|
||||||
|
write_files = [] # Initialize list of output PFS Entry files to be written/extracted
|
||||||
|
|
||||||
|
is_zlib = bool(file_type == 'ZLIB') # Determine if PFS Entry Data was zlib-compressed
|
||||||
|
|
||||||
|
if file_data and not is_zlib: write_files.append([file_data, 'data']) # PFS Entry Data Payload
|
||||||
|
if file_data_sig and is_advanced: write_files.append([file_data_sig, 'sign_data']) # PFS Entry Data Signature
|
||||||
|
if file_meta and (is_zlib or is_advanced): write_files.append([file_meta, 'meta']) # PFS Entry Metadata Payload
|
||||||
|
if file_meta_sig and is_advanced: write_files.append([file_meta_sig, 'sign_meta']) # PFS Entry Metadata Signature
|
||||||
|
|
||||||
|
# Write/Extract PFS Entry files
|
||||||
|
for file in write_files:
|
||||||
|
full_name = '%d%s -- %d %s v%s' % (pfs_index, pfs_name, file_index, file_name, file_version) # Full PFS Entry Name
|
||||||
|
pfs_file_write(file[0], file[1], file_type, full_name, output_path, pfs_padd, is_verbose, is_advanced)
|
||||||
|
|
||||||
|
# Get PFS Footer Data after PFS Header Payload
|
||||||
|
pfs_footer = buffer[PFS_HEAD_LEN + pfs_hdr.PayloadSize:PFS_HEAD_LEN + pfs_hdr.PayloadSize + PFS_FOOT_LEN]
|
||||||
|
|
||||||
|
# Analyze PFS Footer Structure
|
||||||
|
chk_pfs_ftr(pfs_footer, pfs_payload, pfs_hdr.PayloadSize, 'PFS', pfs_padd, is_verbose)
|
||||||
|
|
||||||
|
# Analyze Dell PFS Entry Structure
|
||||||
|
def parse_pfs_entry(entry_buffer, entry_start, entry_size, entry_struct, text, padding, is_verbose=True):
|
||||||
|
# Get PFS Entry Structure values
|
||||||
|
pfs_entry = get_struct(entry_buffer, entry_start, entry_struct)
|
||||||
|
|
||||||
|
# Show PFS Entry Structure info
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFS Entry:\n', padding + 4)
|
||||||
|
pfs_entry.struct_print(padding + 8)
|
||||||
|
|
||||||
|
# Validate that a known PFS Entry Header Version was encountered
|
||||||
|
chk_hdr_ver(pfs_entry.HeaderVersion, text, padding + 8)
|
||||||
|
|
||||||
|
# Validate that the PFS Entry Reserved field is empty
|
||||||
|
if pfs_entry.Reserved != 0:
|
||||||
|
printer('Error: Detected non-empty %s Reserved field!' % text, padding + 8)
|
||||||
|
|
||||||
|
# Get PFS Entry Version string via "Version" and "VersionType" fields
|
||||||
|
entry_version = get_entry_ver(pfs_entry.Version, pfs_entry.VersionType, padding + 8)
|
||||||
|
|
||||||
|
# Get PFS Entry GUID in Big Endian format
|
||||||
|
entry_guid = '%0.*X' % (0x10 * 2, int.from_bytes(pfs_entry.GUID, 'little'))
|
||||||
|
|
||||||
|
# PFS Entry Data starts after the PFS Entry Structure
|
||||||
|
entry_data_start = entry_start + entry_size
|
||||||
|
entry_data_end = entry_data_start + pfs_entry.DataSize
|
||||||
|
|
||||||
|
# PFS Entry Data Signature starts after PFS Entry Data
|
||||||
|
entry_data_sig_start = entry_data_end
|
||||||
|
entry_data_sig_end = entry_data_sig_start + pfs_entry.DataSigSize
|
||||||
|
|
||||||
|
# PFS Entry Metadata starts after PFS Entry Data Signature
|
||||||
|
entry_met_start = entry_data_sig_end
|
||||||
|
entry_met_end = entry_met_start + pfs_entry.DataMetSize
|
||||||
|
|
||||||
|
# PFS Entry Metadata Signature starts after PFS Entry Metadata
|
||||||
|
entry_met_sig_start = entry_met_end
|
||||||
|
entry_met_sig_end = entry_met_sig_start + pfs_entry.DataMetSigSize
|
||||||
|
|
||||||
|
entry_data = entry_buffer[entry_data_start:entry_data_end] # Store PFS Entry Data
|
||||||
|
entry_data_sig = entry_buffer[entry_data_sig_start:entry_data_sig_end] # Store PFS Entry Data Signature
|
||||||
|
entry_met = entry_buffer[entry_met_start:entry_met_end] # Store PFS Entry Metadata
|
||||||
|
entry_met_sig = entry_buffer[entry_met_sig_start:entry_met_sig_end] # Store PFS Entry Metadata Signature
|
||||||
|
|
||||||
|
return pfs_entry, entry_version, entry_guid, entry_data, entry_data_sig, entry_met, entry_met_sig, entry_met_sig_end
|
||||||
|
|
||||||
|
# Parse Dell PFS Volume with PFAT Payload
|
||||||
|
def parse_pfat_pfs(entry_hdr, entry_data, padding, is_verbose=True):
|
||||||
|
# Show PFS Volume indicator
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFS Volume:', padding + 4)
|
||||||
|
|
||||||
|
# Show sub-PFS Header Structure Info
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFS Header:\n', padding + 8)
|
||||||
|
entry_hdr.struct_print(padding + 12)
|
||||||
|
|
||||||
|
# Validate that a known sub-PFS Header Version was encountered
|
||||||
|
chk_hdr_ver(entry_hdr.HeaderVersion, 'sub-PFS', padding + 12)
|
||||||
|
|
||||||
|
# Get sub-PFS Payload Data
|
||||||
|
pfat_payload = entry_data[PFS_HEAD_LEN:PFS_HEAD_LEN + entry_hdr.PayloadSize]
|
||||||
|
|
||||||
|
# Get sub-PFS Footer Data after sub-PFS Header Payload (must be retrieved at the initial entry_data, before PFAT parsing)
|
||||||
|
pfat_footer = entry_data[PFS_HEAD_LEN + entry_hdr.PayloadSize:PFS_HEAD_LEN + entry_hdr.PayloadSize + PFS_FOOT_LEN]
|
||||||
|
|
||||||
|
# Parse all sub-PFS Payload PFAT Entries
|
||||||
|
pfat_data_all = [] # Storage for all sub-PFS PFAT Entries Order/Offset & Payload/Raw Data
|
||||||
|
pfat_entry_start = 0 # Increasing sub-PFS PFAT Entry start offset
|
||||||
|
pfat_entry_index = 0 # Increasing sub-PFS PFAT Entry count index
|
||||||
|
_, pfs_entry_size = get_pfs_entry(pfat_payload, 0) # Get initial PFS PFAT Entry Size for loop
|
||||||
|
while len(pfat_payload[pfat_entry_start:pfat_entry_start + pfs_entry_size]) == pfs_entry_size:
|
||||||
|
# Get sub-PFS PFAT Entry Structure & Size info
|
||||||
|
pfat_entry_struct, pfat_entry_size = get_pfs_entry(pfat_payload, pfat_entry_start)
|
||||||
|
|
||||||
|
# Analyze sub-PFS PFAT Entry Structure and get relevant info
|
||||||
|
pfat_entry,_,_,pfat_entry_data,_,pfat_entry_met,_,pfat_next_entry = parse_pfs_entry(pfat_payload,
|
||||||
|
pfat_entry_start, pfat_entry_size, pfat_entry_struct, 'sub-PFS PFAT Entry', padding + 4, is_verbose)
|
||||||
|
|
||||||
|
# Each sub-PFS PFAT Entry includes an AMI BIOS Guard (a.k.a. PFAT) block at the beginning
|
||||||
|
# We need to parse the PFAT block and remove its contents from the final Payload/Raw Data
|
||||||
|
pfat_hdr_off = pfat_entry_start + pfat_entry_size # PFAT block starts after PFS Entry
|
||||||
|
|
||||||
|
# Get sub-PFS PFAT Header Structure values
|
||||||
|
pfat_hdr = get_struct(pfat_payload, pfat_hdr_off, IntelBiosGuardHeader)
|
||||||
|
|
||||||
|
# Show sub-PFS PFAT Header Structure info
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFAT Block %d Header:\n' % pfat_entry_index, padding + 12)
|
||||||
|
pfat_hdr.struct_print(padding + 16)
|
||||||
|
|
||||||
|
pfat_script_start = pfat_hdr_off + PFAT_HDR_LEN # PFAT Block Script Start
|
||||||
|
pfat_script_end = pfat_script_start + pfat_hdr.ScriptSize # PFAT Block Script End
|
||||||
|
pfat_script_data = pfat_payload[pfat_script_start:pfat_script_end] # PFAT Block Script Data
|
||||||
|
pfat_payload_start = pfat_script_end # PFAT Block Payload Start (at Script end)
|
||||||
|
pfat_payload_end = pfat_script_end + pfat_hdr.DataSize # PFAT Block Data End
|
||||||
|
pfat_payload_data = pfat_payload[pfat_payload_start:pfat_payload_end] # PFAT Block Raw Data
|
||||||
|
pfat_hdr_bgs_size = PFAT_HDR_LEN + pfat_hdr.ScriptSize # PFAT Block Header & Script Size
|
||||||
|
|
||||||
|
# The PFAT Script End should match the total Entry Data Size w/o PFAT block
|
||||||
|
if pfat_hdr_bgs_size != pfat_entry.DataSize - pfat_hdr.DataSize:
|
||||||
|
printer('Error: Detected sub-PFS PFAT Entry Header & PFAT Size mismatch!', padding + 16)
|
||||||
|
|
||||||
|
# Get PFAT Header Flags (SFAM, ProtectEC, GFXMitDis, FTU, Reserved)
|
||||||
|
is_sfam,_,_,_,_ = pfat_hdr.get_flags()
|
||||||
|
|
||||||
|
# Parse sub-PFS PFAT Signature, if applicable (only when PFAT Header > SFAM flag is set)
|
||||||
|
if is_sfam and len(pfat_payload[pfat_payload_end:pfat_payload_end + PFAT_SIG_LEN]) == PFAT_SIG_LEN:
|
||||||
|
# Get sub-PFS PFAT Signature Structure values
|
||||||
|
pfat_sig = get_struct(pfat_payload, pfat_payload_end, IntelBiosGuardSignature2k)
|
||||||
|
|
||||||
|
# Show sub-PFS PFAT Signature Structure info
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFAT Block %d Signature:\n' % pfat_entry_index, padding + 12)
|
||||||
|
pfat_sig.struct_print(padding + 16)
|
||||||
|
|
||||||
|
# Show PFAT Script via BIOS Guard Script Tool
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFAT Block %d Script:\n' % pfat_entry_index, padding + 12)
|
||||||
|
|
||||||
|
# https://github.com/allowitsme/big-tool by Dmitry Frolov
|
||||||
|
_ = parse_bg_script(pfat_script_data, padding + 16)
|
||||||
|
|
||||||
|
# The payload of sub-PFS PFAT Entries is not in proper order by default
|
||||||
|
# We can get each payload's order from PFAT Script > OpCode #2 (set I0 imm)
|
||||||
|
# PFAT Script OpCode #2 > Operand #3 stores the payload Offset in final image
|
||||||
|
pfat_entry_off = int.from_bytes(pfat_script_data[0xC:0x10], 'little')
|
||||||
|
|
||||||
|
# Parse sub-PFS PFAT Entry/Block Metadata
|
||||||
|
if len(pfat_entry_met) >= PFS_PFAT_LEN:
|
||||||
|
# Get sub-PFS PFAT Metadata Structure values
|
||||||
|
pfat_met = get_struct(pfat_entry_met, 0, DellPfsPfatMetadata)
|
||||||
|
|
||||||
|
# Show sub-PFS PFAT Metadata Structure info
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFAT Block %d Metadata:\n' % pfat_entry_index, padding + 12)
|
||||||
|
pfat_met.struct_print(padding + 16)
|
||||||
|
|
||||||
|
# Another way to get each PFAT Entry payload's Order is from its Metadata at 0x8-0xC, if applicable
|
||||||
|
# Check that the PFAT Entry payload Order/Offset from PFAT Script matches the one from PFAT Metadata
|
||||||
|
if pfat_entry_off != pfat_met.OffsetBase:
|
||||||
|
printer('Error: Detected sub-PFS PFAT Entry Metadata & PFAT Base Offset mismatch!', padding + 16)
|
||||||
|
pfat_entry_off = pfat_met.OffsetBase # Prefer Offset from Metadata, in case PFAT Script differs
|
||||||
|
|
||||||
|
# Check that the PFAT Entry payload Size from PFAT Header matches the one from PFAT Metadata
|
||||||
|
if pfat_hdr.DataSize != pfat_met.BlockSize:
|
||||||
|
printer('Error: Detected sub-PFS PFAT Entry Metadata & PFAT Block Size mismatch!', padding + 16)
|
||||||
|
|
||||||
|
# Get sub-PFS Entry Raw Data by subtracting PFAT Header & Script from PFAT Entry Data
|
||||||
|
pfat_entry_data_raw = pfat_entry_data[pfat_hdr_bgs_size:]
|
||||||
|
|
||||||
|
# The sub-PFS Entry Raw Data (w/o PFAT Header & Script) should match with the PFAT Block payload
|
||||||
|
if pfat_entry_data_raw != pfat_payload_data:
|
||||||
|
printer('Error: Detected sub-PFS PFAT Entry w/o PFAT & PFAT Block Data mismatch!', padding + 16)
|
||||||
|
pfat_entry_data_raw = pfat_payload_data # Prefer Data from PFAT Block, in case PFAT Entry differs
|
||||||
|
|
||||||
|
# Store each sub-PFS PFAT Entry Order/Offset and Payload/Raw Data (w/o PFAT)
|
||||||
|
pfat_data_all.append((pfat_entry_off, pfat_entry_data_raw))
|
||||||
|
|
||||||
|
pfat_entry_start = pfat_next_entry # Next sub-PFS PFAT Entry starts after sub-PFS Entry Metadata Signature
|
||||||
|
|
||||||
|
pfat_entry_index += 1
|
||||||
|
|
||||||
|
pfat_data_all.sort() # Sort all sub-PFS PFAT Entries payloads/data based on their Order/Offset
|
||||||
|
|
||||||
|
entry_data = b'' # Initialize new sub-PFS Entry Data
|
||||||
|
for pfat_data in pfat_data_all: entry_data += pfat_data[1] # Merge all sub-PFS PFAT Entry Payload/Raw into the final sub-PFS Entry Data
|
||||||
|
|
||||||
|
# Verify that the Order/Offset of the last PFAT Entry w/ its Size matches the final sub-PFS Entry Data Size
|
||||||
|
if len(entry_data) != pfat_data_all[-1][0] + len(pfat_data_all[-1][1]):
|
||||||
|
printer('Error: Detected sub-PFS PFAT Entry Buffer & Last Offset Size mismatch!', padding + 8)
|
||||||
|
|
||||||
|
# Analyze sub-PFS Footer Structure
|
||||||
|
chk_pfs_ftr(pfat_footer, pfat_payload, entry_hdr.PayloadSize, 'Sub-PFS', padding + 4, is_verbose)
|
||||||
|
|
||||||
|
return entry_data
|
||||||
|
|
||||||
|
# Get Dell PFS Entry Structure & Size via its Version
|
||||||
|
def get_pfs_entry(buffer, offset):
|
||||||
|
pfs_entry_ver = int.from_bytes(buffer[offset + 0x10:offset + 0x14], 'little') # PFS Entry Version
|
||||||
|
|
||||||
|
if pfs_entry_ver == 1: return DellPfsEntryR1, ctypes.sizeof(DellPfsEntryR1)
|
||||||
|
if pfs_entry_ver == 2: return DellPfsEntryR2, ctypes.sizeof(DellPfsEntryR2)
|
||||||
|
|
||||||
|
return DellPfsEntryR2, ctypes.sizeof(DellPfsEntryR2)
|
||||||
|
|
||||||
|
# Determine Dell PFS Entry Version string
|
||||||
|
def get_entry_ver(version_fields, version_types, msg_padd):
|
||||||
|
version = '' # Initialize Version string
|
||||||
|
|
||||||
|
# Each Version Type (1 byte) determines the type of each Version Value (2 bytes)
|
||||||
|
# Version Type 'N' is Number, 'A' is Text and ' ' is Empty/Unused
|
||||||
|
for idx in range(len(version_fields)):
|
||||||
|
eol = '' if idx == len(version_fields) - 1 else '.'
|
||||||
|
|
||||||
|
if version_types[idx] == 65: version += '%X%s' % (version_fields[idx], eol) # 0x41 = ASCII
|
||||||
|
elif version_types[idx] == 78: version += '%d%s' % (version_fields[idx], eol) # 0x4E = Number
|
||||||
|
elif version_types[idx] in (0, 32): version = version.strip('.') # 0x00 or 0x20 = Unused
|
||||||
|
else:
|
||||||
|
version += '%X%s' % (version_fields[idx], eol) # Unknown
|
||||||
|
printer('Error: Unknown PFS Entry Version Type 0x%0.2X!' % version_types[idx], msg_padd)
|
||||||
|
|
||||||
|
return version
|
||||||
|
|
||||||
|
# Check if Dell PFS Header Version is known
|
||||||
|
def chk_hdr_ver(version, text, padding):
|
||||||
|
if version in (1,2): return
|
||||||
|
|
||||||
|
printer('Error: Unknown %s Header Version %d!' % (text, version), padding)
|
||||||
|
|
||||||
|
# Analyze Dell PFS Footer Structure
|
||||||
|
def chk_pfs_ftr(footer_buffer, data_buffer, data_size, text, padding, is_verbose=True):
|
||||||
|
# Get PFS Footer Structure values
|
||||||
|
pfs_ftr = get_struct(footer_buffer, 0, DellPfsFooter)
|
||||||
|
|
||||||
|
# Validate that a PFS Footer was parsed
|
||||||
|
if pfs_ftr.Tag == b'PFS.FTR.':
|
||||||
|
# Show PFS Footer Structure info
|
||||||
|
if is_verbose:
|
||||||
|
printer('PFS Footer:\n', padding + 4)
|
||||||
|
pfs_ftr.struct_print(padding + 8)
|
||||||
|
else:
|
||||||
|
printer('Error: %s Footer could not be found!' % text, padding + 4)
|
||||||
|
|
||||||
|
# Validate that PFS Header Payload Size matches the one at PFS Footer
|
||||||
|
if data_size != pfs_ftr.PayloadSize:
|
||||||
|
printer('Error: %s Header & Footer Payload Size mismatch!' % text, padding + 4)
|
||||||
|
|
||||||
|
# Calculate the PFS Payload Data CRC-32 w/ Vector 0
|
||||||
|
pfs_ftr_crc = ~zlib.crc32(data_buffer, 0) & 0xFFFFFFFF
|
||||||
|
|
||||||
|
# Validate PFS Payload Data Checksum via PFS Footer
|
||||||
|
if pfs_ftr.Checksum != pfs_ftr_crc:
|
||||||
|
printer('Error: Invalid %s Footer Payload Checksum!' % text, padding + 4)
|
||||||
|
|
||||||
|
# Write/Extract Dell PFS Entry Files (Data, Metadata, Signature)
|
||||||
|
def pfs_file_write(bin_buff, bin_name, bin_type, full_name, out_path, padding, is_verbose=True, is_advanced=True):
|
||||||
|
# Store Data/Metadata Signature (advanced users only)
|
||||||
|
if bin_name.startswith('sign'):
|
||||||
|
final_name = '%s.%s.sig' % (safe_name(full_name), bin_name.split('_')[1])
|
||||||
|
final_path = os.path.join(out_path, final_name)
|
||||||
|
|
||||||
|
with open(final_path, 'wb') as pfs_out: pfs_out.write(bin_buff) # Write final Data/Metadata Signature
|
||||||
|
|
||||||
|
return # Skip further processing for Signatures
|
||||||
|
|
||||||
|
# Store Data/Metadata Payload
|
||||||
|
bin_ext = '.%s.bin' % bin_name if is_advanced else '.bin' # Simpler Data/Metadata Extension for non-advanced users
|
||||||
|
|
||||||
|
# Some Data may be Text or XML files with useful information for non-advanced users
|
||||||
|
is_text,final_data,file_ext,write_mode = bin_is_text(bin_buff, bin_type, bin_name == 'meta', padding, is_verbose, is_advanced)
|
||||||
|
|
||||||
|
final_name = '%s%s' % (safe_name(full_name), bin_ext[:-4] + file_ext if is_text else bin_ext)
|
||||||
|
final_path = os.path.join(out_path, final_name)
|
||||||
|
|
||||||
|
with open(final_path, write_mode) as pfs_out: pfs_out.write(final_data) # Write final Data/Metadata Payload
|
||||||
|
|
||||||
|
# Check if Dell PFS Entry file/data is Text/XML and Convert
|
||||||
|
def bin_is_text(buffer, file_type, is_metadata, pfs_padd, is_verbose=True, is_advanced=True):
|
||||||
|
is_text = False
|
||||||
|
write_mode = 'wb'
|
||||||
|
extension = '.bin'
|
||||||
|
buffer_in = buffer
|
||||||
|
|
||||||
|
if b',END' in buffer[-0x8:]: # Text Type 1
|
||||||
|
is_text = True
|
||||||
|
write_mode = 'w'
|
||||||
|
extension = '.txt'
|
||||||
|
buffer = buffer.decode('utf-8').split(',END')[0].replace(';','\n')
|
||||||
|
elif buffer.startswith(b'VendorName=Dell'): # Text Type 2
|
||||||
|
is_text = True
|
||||||
|
write_mode = 'w'
|
||||||
|
extension = '.txt'
|
||||||
|
buffer = buffer.split(b'\x00')[0].decode('utf-8').replace(';','\n')
|
||||||
|
elif b'<Rimm x-schema="' in buffer[:0x50]: # XML Type
|
||||||
|
is_text = True
|
||||||
|
write_mode = 'w'
|
||||||
|
extension = '.xml'
|
||||||
|
buffer = buffer.decode('utf-8')
|
||||||
|
elif file_type in ('NESTED_PFS','ZLIB') and is_metadata and len(buffer) == PFS_META_LEN: # Text Type 3
|
||||||
|
is_text = True
|
||||||
|
write_mode = 'w'
|
||||||
|
extension = '.txt'
|
||||||
|
with io.StringIO() as text_buffer, contextlib.redirect_stdout(text_buffer):
|
||||||
|
get_struct(buffer, 0, DellPfsMetadata).struct_print(0)
|
||||||
|
buffer = text_buffer.getvalue()
|
||||||
|
|
||||||
|
# Show Model/PCR XML Information, if applicable
|
||||||
|
if is_verbose and is_text and not is_metadata: # Metadata is shown at initial DellPfsMetadata analysis
|
||||||
|
printer('PFS %s Information:\n' % {'.txt': 'Model', '.xml': 'PCR XML'}[extension], pfs_padd + 8)
|
||||||
|
_ = [printer(line.strip('\r'), pfs_padd + 12, False) for line in buffer.split('\n') if line]
|
||||||
|
|
||||||
|
# Only for non-advanced users due to signature (.sig) invalidation
|
||||||
|
if is_advanced: return False, buffer_in, '.bin', 'wb'
|
||||||
|
|
||||||
|
return is_text, buffer, extension, write_mode
|
||||||
|
|
||||||
|
# Get ctypes Structure Sizes
|
||||||
|
PFS_HEAD_LEN = ctypes.sizeof(DellPfsHeader)
|
||||||
|
PFS_FOOT_LEN = ctypes.sizeof(DellPfsFooter)
|
||||||
|
PFS_INFO_LEN = ctypes.sizeof(DellPfsInfo)
|
||||||
|
PFS_NAME_LEN = ctypes.sizeof(DellPfsName)
|
||||||
|
PFS_META_LEN = ctypes.sizeof(DellPfsMetadata)
|
||||||
|
PFS_PFAT_LEN = ctypes.sizeof(DellPfsPfatMetadata)
|
||||||
|
PFAT_HDR_LEN = ctypes.sizeof(IntelBiosGuardHeader)
|
||||||
|
PFAT_SIG_LEN = ctypes.sizeof(IntelBiosGuardSignature2k)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
# Show script title
|
||||||
|
script_title(title)
|
||||||
|
|
||||||
|
# Set argparse arguments
|
||||||
|
argparser = argparse_init()
|
||||||
|
argparser.add_argument('-a', '--advanced', help='extract signatures and metadata', action='store_true')
|
||||||
|
argparser.add_argument('-v', '--verbose', help='show PFS structure information', action='store_true')
|
||||||
|
arguments = argparser.parse_args()
|
||||||
|
|
||||||
|
is_advanced = arguments.advanced # Set Advanced user mode optional argument
|
||||||
|
is_verbose = arguments.verbose # Set Verbose output mode optional argument
|
||||||
|
|
||||||
|
# Initialize script (must be after argparse)
|
||||||
|
input_files,output_path,padding = script_init(arguments, 4)
|
||||||
|
|
||||||
|
for input_file in input_files:
|
||||||
|
input_name = os.path.basename(input_file)
|
||||||
|
|
||||||
|
printer(['***', input_name], padding - 4)
|
||||||
|
|
||||||
|
with open(input_file, 'rb') as in_file: input_buffer = in_file.read()
|
||||||
|
|
||||||
|
# Search input image for ThinOS PKG 7zXZ section header
|
||||||
|
lzma_pkg_hdr_match = is_dell_pfs_pkg(input_buffer)
|
||||||
|
|
||||||
|
# Decompress ThinOS PKG 7zXZ section first, if present
|
||||||
|
if lzma_pkg_hdr_match:
|
||||||
|
lzma_len_off = lzma_pkg_hdr_match.start() + 0x10
|
||||||
|
lzma_len_int = int.from_bytes(input_buffer[lzma_len_off:lzma_len_off + 0x4], 'little')
|
||||||
|
lzma_bin_off = lzma_pkg_hdr_match.end() - 0x5
|
||||||
|
lzma_bin_dat = input_buffer[lzma_bin_off:lzma_bin_off + lzma_len_int]
|
||||||
|
|
||||||
|
# Check if the compressed 7zXZ stream is complete, based on header
|
||||||
|
if len(lzma_bin_dat) != lzma_len_int:
|
||||||
|
printer('Error: This Dell ThinOS PKG update image is corrupted!', padding)
|
||||||
|
|
||||||
|
continue # Next input file
|
||||||
|
|
||||||
|
input_buffer = lzma.decompress(lzma_bin_dat)
|
||||||
|
|
||||||
|
# Search input image for PFS ZLIB Sections
|
||||||
|
pfs_zlib_offsets = get_section_offsets(input_buffer)
|
||||||
|
|
||||||
|
if not pfs_zlib_offsets:
|
||||||
|
printer('Error: This is not a Dell PFS update image!', padding)
|
||||||
|
|
||||||
|
continue # Next input file
|
||||||
|
|
||||||
|
extract_path = os.path.join(output_path, input_name + '_extracted')
|
||||||
|
|
||||||
|
extract_name = ' ' + os.path.splitext(input_name)[0]
|
||||||
|
|
||||||
|
# Parse each PFS ZLIB Section
|
||||||
|
for offset in pfs_zlib_offsets:
|
||||||
|
# Call the PFS ZLIB Section Parser function
|
||||||
|
pfs_section_parse(input_buffer, offset, extract_path, extract_name, 1, 1, False, padding, is_verbose, is_advanced)
|
||||||
|
|
||||||
|
printer('Extracted Dell PFS Update image!', padding)
|
||||||
|
|
||||||
|
printer('Done!', pause=True)
|
68
README.md
68
README.md
|
@ -5,9 +5,69 @@
|
||||||
|
|
||||||
<a href="https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=DJDZD3PRGCSCL"><img border="0" title="BIOS Utilities Donation via Paypal or Debit/Credit Card" alt="BIOS Utilities Donation via Paypal or Debit/Credit Card" src="https://user-images.githubusercontent.com/11527726/109392268-e0f68280-7923-11eb-83d8-0a63f0d20783.png"></a>
|
<a href="https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=DJDZD3PRGCSCL"><img border="0" title="BIOS Utilities Donation via Paypal or Debit/Credit Card" alt="BIOS Utilities Donation via Paypal or Debit/Credit Card" src="https://user-images.githubusercontent.com/11527726/109392268-e0f68280-7923-11eb-83d8-0a63f0d20783.png"></a>
|
||||||
|
|
||||||
|
* [**Dell PFS Update Extractor**](#dell-pfs-update-extractor)
|
||||||
* [**AMI UCP BIOS Extractor**](#ami-ucp-bios-extractor)
|
* [**AMI UCP BIOS Extractor**](#ami-ucp-bios-extractor)
|
||||||
* [**AMI BIOS Guard Extractor**](#ami-bios-guard-extractor)
|
* [**AMI BIOS Guard Extractor**](#ami-bios-guard-extractor)
|
||||||
|
|
||||||
|
## **Dell PFS Update Extractor**
|
||||||
|
|
||||||
|
![]()
|
||||||
|
|
||||||
|
#### **Description**
|
||||||
|
|
||||||
|
Parses Dell PFS Update images and extracts their Firmware (e.g. SPI, BIOS/UEFI, EC, ME etc) and Utilities (e.g. Flasher etc) component sections. It supports all Dell PFS revisions and formats, including those which are originally LZMA compressed in ThinOS packages, ZLIB compressed or Intel BIOS Guard (PFAT) protected. The output comprises only final firmware components which are directly usable by end users.
|
||||||
|
|
||||||
|
#### **Usage**
|
||||||
|
|
||||||
|
You can either Drag & Drop or manually enter Dell PFS Update images(s). Optional arguments:
|
||||||
|
|
||||||
|
* -h or --help : show help message and exit
|
||||||
|
* -i or --input-dir : extract from given input directory
|
||||||
|
* -o or --output-dir : extract in given output directory
|
||||||
|
* -e or --auto-exit : skip press enter to exit prompts
|
||||||
|
* -a or --advanced : extract signatures and metadata
|
||||||
|
* -v or --verbose : show PFS structure information
|
||||||
|
|
||||||
|
#### **Compatibility**
|
||||||
|
|
||||||
|
Should work at all Windows, Linux or macOS operating systems which have Python 3.7 support.
|
||||||
|
|
||||||
|
#### **Prerequisites**
|
||||||
|
|
||||||
|
Optionally, to decompile the Intel BIOS Guard (PFAT) Scripts, you must have the following 3rd party utility at the "external" project directory:
|
||||||
|
|
||||||
|
* [BIOS Guard Script Tool](https://github.com/allowitsme/big-tool/tree/sdk-compat) (i.e. big_script_tool.py)
|
||||||
|
|
||||||
|
#### **Build/Freeze/Compile with PyInstaller**
|
||||||
|
|
||||||
|
PyInstaller can build/freeze/compile the utility at all three supported platforms, it is simple to run and gets updated often.
|
||||||
|
|
||||||
|
1. Make sure Python 3.7.0 or newer is installed:
|
||||||
|
|
||||||
|
> python --version
|
||||||
|
|
||||||
|
2. Use pip to install PyInstaller:
|
||||||
|
|
||||||
|
> pip3 install pyinstaller
|
||||||
|
|
||||||
|
3. Place any appropriate prerequisite at the project directory:
|
||||||
|
|
||||||
|
> BIOS Guard Script Tool
|
||||||
|
|
||||||
|
4. Build/Freeze/Compile:
|
||||||
|
|
||||||
|
> pyinstaller --noupx --onefile \<path-to-project\>\/Dell_PFS_Extract.py
|
||||||
|
|
||||||
|
You should find the final utility executable at "dist" folder
|
||||||
|
|
||||||
|
#### **Anti-Virus False Positives**
|
||||||
|
|
||||||
|
Some Anti-Virus software may claim that the built/frozen/compiled executable contains viruses. Any such detections are false positives, usually of PyInstaller. You can switch to a better Anti-Virus software, report the false positive to their support, add the executable to the exclusions, build/freeze/compile yourself or use the Python script directly.
|
||||||
|
|
||||||
|
#### **Pictures**
|
||||||
|
|
||||||
|
![]()
|
||||||
|
|
||||||
## **AMI UCP BIOS Extractor**
|
## **AMI UCP BIOS Extractor**
|
||||||
|
|
||||||
![]()
|
![]()
|
||||||
|
@ -28,7 +88,7 @@ You can either Drag & Drop or manually enter AMI UCP BIOS executable file(s). Op
|
||||||
|
|
||||||
#### **Compatibility**
|
#### **Compatibility**
|
||||||
|
|
||||||
Should work at all Windows, Linux or macOS operating systems which have Python 3.7 support. Windows users who plan to use the already built/frozen/compiled binary must make sure that they have the latest Windows Updates installed which include all required "Universal C Runtime (CRT)" libraries.
|
Should work at all Windows, Linux or macOS operating systems which have Python 3.7 support.
|
||||||
|
|
||||||
#### **Prerequisites**
|
#### **Prerequisites**
|
||||||
|
|
||||||
|
@ -61,7 +121,7 @@ PyInstaller can build/freeze/compile the utility at all three supported platform
|
||||||
|
|
||||||
> pyinstaller --noupx --onefile \<path-to-project\>\/AMI_UCP_Extract.py
|
> pyinstaller --noupx --onefile \<path-to-project\>\/AMI_UCP_Extract.py
|
||||||
|
|
||||||
At dist folder you should find the final utility executable
|
You should find the final utility executable at "dist" folder
|
||||||
|
|
||||||
#### **Anti-Virus False Positives**
|
#### **Anti-Virus False Positives**
|
||||||
|
|
||||||
|
@ -92,7 +152,7 @@ You can either Drag & Drop or manually enter AMI BIOS Guard (PFAT) image file(s)
|
||||||
|
|
||||||
#### **Compatibility**
|
#### **Compatibility**
|
||||||
|
|
||||||
Should work at all Windows, Linux or macOS operating systems which have Python 3.7 support. Windows users who plan to use the already built/frozen/compiled binary must make sure that they have the latest Windows Updates installed which include all required "Universal C Runtime (CRT)" libraries.
|
Should work at all Windows, Linux or macOS operating systems which have Python 3.7 support.
|
||||||
|
|
||||||
#### **Prerequisites**
|
#### **Prerequisites**
|
||||||
|
|
||||||
|
@ -120,7 +180,7 @@ PyInstaller can build/freeze/compile the utility at all three supported platform
|
||||||
|
|
||||||
> pyinstaller --noupx --onefile \<path-to-project\>\/AMI_PFAT_Extract.py
|
> pyinstaller --noupx --onefile \<path-to-project\>\/AMI_PFAT_Extract.py
|
||||||
|
|
||||||
At dist folder you should find the final utility executable
|
You should find the final utility executable at "dist" folder
|
||||||
|
|
||||||
#### **Anti-Virus False Positives**
|
#### **Anti-Virus False Positives**
|
||||||
|
|
||||||
|
|
|
@ -4,9 +4,9 @@
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from common.script_get import get_script_dir
|
from common.path_ops import get_script_dir
|
||||||
from common.system import get_os_ver
|
from common.system import get_os_ver
|
||||||
from common.text_ops import padder
|
from common.system import printer
|
||||||
|
|
||||||
# Get 7z path
|
# Get 7z path
|
||||||
def get_7z_path(static=False):
|
def get_7z_path(static=False):
|
||||||
|
@ -36,10 +36,10 @@ def a7z_decompress(in_path, out_path, in_name, padding, static=False):
|
||||||
if not os.path.isdir(out_path): raise Exception('EXTRACT_DIR_MISSING')
|
if not os.path.isdir(out_path): raise Exception('EXTRACT_DIR_MISSING')
|
||||||
|
|
||||||
except:
|
except:
|
||||||
print('\n%sError: 7-Zip could not extract %s file %s!' % (padder(padding), in_name, in_path))
|
printer('Error: 7-Zip could not extract %s file %s!' % (in_name, in_path), padding)
|
||||||
|
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
print('\n%sSuccesfull %s decompression via 7-Zip!' % (padder(padding), in_name))
|
printer('Succesfull %s decompression via 7-Zip!' % in_name, padding)
|
||||||
|
|
||||||
return 0
|
return 0
|
|
@ -2,12 +2,20 @@
|
||||||
#coding=utf-8
|
#coding=utf-8
|
||||||
|
|
||||||
# Get Checksum 16-bit
|
# Get Checksum 16-bit
|
||||||
def checksum16(data):
|
def get_chk_16(data, value=0, order='little'):
|
||||||
chk16 = 0
|
|
||||||
|
|
||||||
for idx in range(0, len(data), 2):
|
for idx in range(0, len(data), 2):
|
||||||
chk16 += int.from_bytes(data[idx:idx + 2], 'little')
|
# noinspection PyTypeChecker
|
||||||
|
value += int.from_bytes(data[idx:idx + 2], order)
|
||||||
|
|
||||||
chk16 &= 0xFFFF
|
value &= 0xFFFF
|
||||||
|
|
||||||
return chk16
|
return value
|
||||||
|
|
||||||
|
# Get Checksum 8-bit XOR
|
||||||
|
def get_chk_8_xor(data, value=0):
|
||||||
|
for byte in data:
|
||||||
|
value ^= byte
|
||||||
|
|
||||||
|
value ^= 0x0
|
||||||
|
|
||||||
|
return value
|
|
@ -4,9 +4,9 @@
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from common.script_get import get_script_dir
|
from common.path_ops import get_script_dir
|
||||||
from common.system import get_os_ver
|
from common.system import get_os_ver
|
||||||
from common.text_ops import padder
|
from common.system import printer
|
||||||
|
|
||||||
def get_compress_sizes(data):
|
def get_compress_sizes(data):
|
||||||
size_compress = int.from_bytes(data[0x0:0x4], 'little')
|
size_compress = int.from_bytes(data[0x0:0x4], 'little')
|
||||||
|
@ -42,10 +42,10 @@ def efi_decompress(in_path, out_path, padding, comp_type='--uefi'):
|
||||||
if os.path.getsize(out_path) != size_orig: raise Exception('EFI_DECOMPRESS_ERROR')
|
if os.path.getsize(out_path) != size_orig: raise Exception('EFI_DECOMPRESS_ERROR')
|
||||||
|
|
||||||
except:
|
except:
|
||||||
print('\n%sError: TianoCompress could not extract file %s!' % (padder(padding), in_path))
|
printer('Error: TianoCompress could not extract file %s!' % in_path, padding)
|
||||||
|
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
print('\n%sSuccesfull EFI/Tiano decompression via TianoCompress!' % padder(padding))
|
printer('Succesfull EFI/Tiano decompression via TianoCompress!', padding)
|
||||||
|
|
||||||
return 0
|
return 0
|
|
@ -3,11 +3,11 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import sys
|
||||||
|
import inspect
|
||||||
import argparse
|
import argparse
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from common.script_get import get_script_dir
|
|
||||||
|
|
||||||
# Fix illegal/reserved Windows characters
|
# Fix illegal/reserved Windows characters
|
||||||
def safe_name(in_name):
|
def safe_name(in_name):
|
||||||
raw_name = repr(in_name).strip("'")
|
raw_name = repr(in_name).strip("'")
|
||||||
|
@ -77,3 +77,15 @@ def process_input_files(argparse_args, sys_argv=None):
|
||||||
output_path = get_absolute_path(input('\nEnter output directory path: '))
|
output_path = get_absolute_path(input('\nEnter output directory path: '))
|
||||||
|
|
||||||
return input_files, output_path
|
return input_files, output_path
|
||||||
|
|
||||||
|
# https://stackoverflow.com/a/22881871 by jfs
|
||||||
|
def get_script_dir(follow_symlinks=True):
|
||||||
|
if getattr(sys, 'frozen', False):
|
||||||
|
path = os.path.abspath(sys.executable)
|
||||||
|
else:
|
||||||
|
path = inspect.getabsfile(get_script_dir)
|
||||||
|
|
||||||
|
if follow_symlinks:
|
||||||
|
path = os.path.realpath(path)
|
||||||
|
|
||||||
|
return os.path.dirname(path)
|
|
@ -5,4 +5,7 @@ import re
|
||||||
|
|
||||||
PAT_AMI_PFAT = re.compile(b'_AMIPFAT.AMI_BIOS_GUARD_FLASH_CONFIGURATIONS', re.DOTALL)
|
PAT_AMI_PFAT = re.compile(b'_AMIPFAT.AMI_BIOS_GUARD_FLASH_CONFIGURATIONS', re.DOTALL)
|
||||||
PAT_AMI_UCP = re.compile(br'\x40\x55\x41\x46.{12}\x40', re.DOTALL)
|
PAT_AMI_UCP = re.compile(br'\x40\x55\x41\x46.{12}\x40', re.DOTALL)
|
||||||
|
PAT_DELL_PKG = re.compile(br'\x72\x13\x55\x00.{45}\x37\x7A\x58\x5A', re.DOTALL)
|
||||||
|
PAT_DELL_HDR = re.compile(br'\xEE\xAA\x76\x1B\xEC\xBB\x20\xF1\xE6\x51.\x78\x9C', re.DOTALL)
|
||||||
|
PAT_DELL_FTR = re.compile(br'\xEE\xAA\xEE\x8F\x49\x1B\xE8\xAE\x14\x37\x90')
|
||||||
PAT_INTEL_ENG = re.compile(br'\x04\x00{3}[\xA1\xE1]\x00{3}.{8}\x86\x80.{9}\x00\$((MN2)|(MAN))', re.DOTALL)
|
PAT_INTEL_ENG = re.compile(br'\x04\x00{3}[\xA1\xE1]\x00{3}.{8}\x86\x80.{9}\x00\$((MN2)|(MAN))', re.DOTALL)
|
|
@ -1,19 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
#coding=utf-8
|
|
||||||
|
|
||||||
# https://stackoverflow.com/a/22881871 by jfs
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import inspect
|
|
||||||
|
|
||||||
def get_script_dir(follow_symlinks=True):
|
|
||||||
if getattr(sys, 'frozen', False):
|
|
||||||
path = os.path.abspath(sys.executable)
|
|
||||||
else:
|
|
||||||
path = inspect.getabsfile(get_script_dir)
|
|
||||||
|
|
||||||
if follow_symlinks:
|
|
||||||
path = os.path.realpath(path)
|
|
||||||
|
|
||||||
return os.path.dirname(path)
|
|
|
@ -5,6 +5,9 @@ import sys
|
||||||
import ctypes
|
import ctypes
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
from common.text_ops import padder
|
||||||
|
from common.path_ops import process_input_files
|
||||||
|
|
||||||
# Get Python Version (tuple)
|
# Get Python Version (tuple)
|
||||||
def get_py_ver():
|
def get_py_ver():
|
||||||
return sys.version_info
|
return sys.version_info
|
||||||
|
@ -20,7 +23,7 @@ def get_os_ver():
|
||||||
|
|
||||||
# Check for --auto-exit|-e
|
# Check for --auto-exit|-e
|
||||||
def is_auto_exit():
|
def is_auto_exit():
|
||||||
return '--auto-exit' in sys.argv or '-e' in sys.argv
|
return bool('--auto-exit' in sys.argv or '-e' in sys.argv)
|
||||||
|
|
||||||
# Check Python Version
|
# Check Python Version
|
||||||
def check_sys_py():
|
def check_sys_py():
|
||||||
|
@ -40,7 +43,7 @@ def check_sys_os():
|
||||||
os_tag,os_win,os_sup = get_os_ver()
|
os_tag,os_win,os_sup = get_os_ver()
|
||||||
|
|
||||||
if not os_sup:
|
if not os_sup:
|
||||||
print('\nError: Unsupported platform "%s"!' % os_tag)
|
printer('Error: Unsupported platform "%s"!' % os_tag)
|
||||||
|
|
||||||
if not is_auto_exit():
|
if not is_auto_exit():
|
||||||
input('\nPress enter to exit')
|
input('\nPress enter to exit')
|
||||||
|
@ -51,8 +54,8 @@ def check_sys_os():
|
||||||
if os_win: sys.stdout.reconfigure(encoding='utf-8')
|
if os_win: sys.stdout.reconfigure(encoding='utf-8')
|
||||||
|
|
||||||
# Show Script Title
|
# Show Script Title
|
||||||
def show_title(title):
|
def script_title(title):
|
||||||
print('\n' + title)
|
printer(title)
|
||||||
|
|
||||||
_,os_win,_ = get_os_ver()
|
_,os_win,_ = get_os_ver()
|
||||||
|
|
||||||
|
@ -60,12 +63,28 @@ def show_title(title):
|
||||||
if os_win: ctypes.windll.kernel32.SetConsoleTitleW(title)
|
if os_win: ctypes.windll.kernel32.SetConsoleTitleW(title)
|
||||||
else: sys.stdout.write('\x1b]2;' + title + '\x07')
|
else: sys.stdout.write('\x1b]2;' + title + '\x07')
|
||||||
|
|
||||||
|
# Initialize Script
|
||||||
|
def script_init(arguments, padding=0):
|
||||||
|
# Pretty Python exception handler (must be after argparse)
|
||||||
|
sys.excepthook = nice_exc_handler
|
||||||
|
|
||||||
|
# Check Python Version (must be after argparse)
|
||||||
|
check_sys_py()
|
||||||
|
|
||||||
|
# Check OS Platform (must be after argparse)
|
||||||
|
check_sys_os()
|
||||||
|
|
||||||
|
# Process input files and generate output path
|
||||||
|
input_files,output_path = process_input_files(arguments, sys.argv)
|
||||||
|
|
||||||
|
return input_files, output_path, padding
|
||||||
|
|
||||||
# https://stackoverflow.com/a/781074 by Torsten Marek
|
# https://stackoverflow.com/a/781074 by Torsten Marek
|
||||||
def nice_exc_handler(exc_type, exc_value, tb):
|
def nice_exc_handler(exc_type, exc_value, tb):
|
||||||
if exc_type is KeyboardInterrupt:
|
if exc_type is KeyboardInterrupt:
|
||||||
print('\n')
|
printer('')
|
||||||
else:
|
else:
|
||||||
print('\nError: Script crashed, please report the following:\n')
|
printer('Error: Script crashed, please report the following:\n')
|
||||||
|
|
||||||
traceback.print_exception(exc_type, exc_value, tb)
|
traceback.print_exception(exc_type, exc_value, tb)
|
||||||
|
|
||||||
|
@ -74,6 +93,17 @@ def nice_exc_handler(exc_type, exc_value, tb):
|
||||||
|
|
||||||
sys.exit(3)
|
sys.exit(3)
|
||||||
|
|
||||||
# Print or Input Message based on --auto-exit|-e
|
# Show message(s) while controlling padding, newline, pausing & separator
|
||||||
def print_input(msg):
|
def printer(in_message='', padd_count=0, new_line=True, pause=False, sep_char=' '):
|
||||||
(print if is_auto_exit() else input)(msg)
|
if type(in_message).__name__ in ('list','tuple'):
|
||||||
|
message = sep_char.join(map(str, in_message))
|
||||||
|
else:
|
||||||
|
message = str(in_message)
|
||||||
|
|
||||||
|
padding = padder(padd_count)
|
||||||
|
|
||||||
|
newline = '\n' if new_line else ''
|
||||||
|
|
||||||
|
output = newline + padding + message
|
||||||
|
|
||||||
|
(input if pause and not is_auto_exit() else print)(output)
|
|
@ -2,5 +2,5 @@
|
||||||
#coding=utf-8
|
#coding=utf-8
|
||||||
|
|
||||||
# Generate padding (spaces or tabs)
|
# Generate padding (spaces or tabs)
|
||||||
def padder(count, tab=False):
|
def padder(padd_count, tab=False):
|
||||||
return ('\t' if tab else ' ') * count
|
return ('\t' if tab else ' ') * padd_count
|
Loading…
Add table
Add a link
Reference in a new issue