mirror of
https://github.com/platomav/BIOSUtilities.git
synced 2025-05-13 06:34:42 -04:00
Revamped path-related operations
Fixed dependencies detecton Fixed frozen state support
This commit is contained in:
parent
44546a67c5
commit
a2eca0aac6
14 changed files with 144 additions and 98 deletions
|
@ -7,12 +7,11 @@ AMI BIOS Guard Extractor
|
||||||
Copyright (C) 2018-2022 Plato Mavropoulos
|
Copyright (C) 2018-2022 Plato Mavropoulos
|
||||||
"""
|
"""
|
||||||
|
|
||||||
title = 'AMI BIOS Guard Extractor v4.0_a6'
|
title = 'AMI BIOS Guard Extractor v4.0_a7'
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
|
||||||
import ctypes
|
import ctypes
|
||||||
|
|
||||||
# Stop __pycache__ generation
|
# Stop __pycache__ generation
|
||||||
|
@ -20,7 +19,7 @@ sys.dont_write_bytecode = True
|
||||||
|
|
||||||
from common.externals import get_bgs_tool
|
from common.externals import get_bgs_tool
|
||||||
from common.num_ops import get_ordinal
|
from common.num_ops import get_ordinal
|
||||||
from common.path_ops import get_safe_name
|
from common.path_ops import safe_name, make_dirs
|
||||||
from common.patterns import PAT_AMI_PFAT
|
from common.patterns import PAT_AMI_PFAT
|
||||||
from common.struct_ops import get_struct, char, uint8_t, uint16_t, uint32_t
|
from common.struct_ops import get_struct, char, uint8_t, uint16_t, uint32_t
|
||||||
from common.system import script_init, argparse_init, printer
|
from common.system import script_init, argparse_init, printer
|
||||||
|
@ -139,7 +138,7 @@ def get_ami_pfat(input_buffer):
|
||||||
return match, buffer
|
return match, buffer
|
||||||
|
|
||||||
def get_file_name(index, title):
|
def get_file_name(index, title):
|
||||||
return get_safe_name('%0.2d -- %s' % (index, title))
|
return safe_name('%0.2d -- %s' % (index, title))
|
||||||
|
|
||||||
def parse_bg_script(script_data, padding):
|
def parse_bg_script(script_data, padding):
|
||||||
is_opcode_div = len(script_data) % 8 == 0
|
is_opcode_div = len(script_data) % 8 == 0
|
||||||
|
@ -226,9 +225,7 @@ def parse_pfat_file(buffer, output_path, padding):
|
||||||
|
|
||||||
extract_path = os.path.join(output_path + '_extracted')
|
extract_path = os.path.join(output_path + '_extracted')
|
||||||
|
|
||||||
if os.path.isdir(extract_path): shutil.rmtree(extract_path)
|
make_dirs(extract_path, delete=True)
|
||||||
|
|
||||||
os.mkdir(extract_path)
|
|
||||||
|
|
||||||
block_all,block_off,file_count = parse_pfat_hdr(buffer, padding)
|
block_all,block_off,file_count = parse_pfat_hdr(buffer, padding)
|
||||||
|
|
||||||
|
|
|
@ -7,16 +7,14 @@ AMI UCP BIOS Extractor
|
||||||
Copyright (C) 2021-2022 Plato Mavropoulos
|
Copyright (C) 2021-2022 Plato Mavropoulos
|
||||||
"""
|
"""
|
||||||
|
|
||||||
title = 'AMI UCP BIOS Extractor v2.0_a8'
|
title = 'AMI UCP BIOS Extractor v2.0_a9'
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
|
||||||
import struct
|
import struct
|
||||||
import ctypes
|
import ctypes
|
||||||
import contextlib
|
import contextlib
|
||||||
from pathlib import Path, PurePath
|
|
||||||
|
|
||||||
# Stop __pycache__ generation
|
# Stop __pycache__ generation
|
||||||
sys.dont_write_bytecode = True
|
sys.dont_write_bytecode = True
|
||||||
|
@ -24,7 +22,7 @@ sys.dont_write_bytecode = True
|
||||||
from common.a7z_comp import a7z_decompress, is_7z_supported
|
from common.a7z_comp import a7z_decompress, is_7z_supported
|
||||||
from common.checksums import get_chk_16
|
from common.checksums import get_chk_16
|
||||||
from common.efi_comp import efi_decompress, is_efi_compressed
|
from common.efi_comp import efi_decompress, is_efi_compressed
|
||||||
from common.path_ops import get_comp_path, get_safe_name, get_safe_path
|
from common.path_ops import agnostic_path, safe_name, safe_path, make_dirs
|
||||||
from common.patterns import PAT_AMI_UCP, PAT_INTEL_ENG
|
from common.patterns import PAT_AMI_UCP, PAT_INTEL_ENG
|
||||||
from common.struct_ops import get_struct, char, uint8_t, uint16_t, uint32_t
|
from common.struct_ops import get_struct, char, uint8_t, uint16_t, uint32_t
|
||||||
from common.system import script_init, argparse_init, printer
|
from common.system import script_init, argparse_init, printer
|
||||||
|
@ -215,9 +213,7 @@ def ucp_extract(buffer, out_path, ucp_tag='@UAF', padding=0, is_checksum=False):
|
||||||
|
|
||||||
extract_path = os.path.join(out_path + '_extracted')
|
extract_path = os.path.join(out_path + '_extracted')
|
||||||
|
|
||||||
if os.path.isdir(extract_path): shutil.rmtree(extract_path)
|
make_dirs(extract_path, delete=True)
|
||||||
|
|
||||||
os.mkdir(extract_path)
|
|
||||||
|
|
||||||
uaf_hdr = get_struct(buffer, 0, UafHeader) # Parse @UAF|@HPU Header Structure
|
uaf_hdr = get_struct(buffer, 0, UafHeader) # Parse @UAF|@HPU Header Structure
|
||||||
|
|
||||||
|
@ -283,13 +279,13 @@ def uaf_extract(buffer, extract_path, mod_info, padding=0, is_checksum=False, na
|
||||||
printer('Note: Detected new AMI UCP Module %s (%s) in @NAL!' % (uaf_tag, nal_dict[uaf_tag][1]), padding + 4, pause=True)
|
printer('Note: Detected new AMI UCP Module %s (%s) in @NAL!' % (uaf_tag, nal_dict[uaf_tag][1]), padding + 4, pause=True)
|
||||||
|
|
||||||
# Generate @UAF|@HPU Module File name, depending on whether decompression will be required
|
# Generate @UAF|@HPU Module File name, depending on whether decompression will be required
|
||||||
uaf_sname = get_safe_name(uaf_name + ('.temp' if is_comp else uaf_fext))
|
uaf_sname = safe_name(uaf_name + ('.temp' if is_comp else uaf_fext))
|
||||||
if uaf_tag in nal_dict:
|
if uaf_tag in nal_dict:
|
||||||
uaf_npath = get_safe_path(extract_path, nal_dict[uaf_tag][0])
|
uaf_npath = safe_path(extract_path, nal_dict[uaf_tag][0])
|
||||||
Path.mkdir(Path(uaf_npath), parents=True, exist_ok=True)
|
make_dirs(uaf_npath, exist_ok=True)
|
||||||
uaf_fname = get_safe_path(uaf_npath, uaf_sname)
|
uaf_fname = safe_path(uaf_npath, uaf_sname)
|
||||||
else:
|
else:
|
||||||
uaf_fname = get_safe_path(extract_path, uaf_sname)
|
uaf_fname = safe_path(extract_path, uaf_sname)
|
||||||
|
|
||||||
if is_checksum: chk16_validate(uaf_data_all, uaf_tag, padding + 4)
|
if is_checksum: chk16_validate(uaf_data_all, uaf_tag, padding + 4)
|
||||||
|
|
||||||
|
@ -387,15 +383,15 @@ def uaf_extract(buffer, extract_path, mod_info, padding=0, is_checksum=False, na
|
||||||
|
|
||||||
printer(info_tag + ' : ' + info_value, padding + 8, False) # Print @NAL Module Tag-Path Info
|
printer(info_tag + ' : ' + info_value, padding + 8, False) # Print @NAL Module Tag-Path Info
|
||||||
|
|
||||||
info_part = PurePath(get_comp_path(info_value)).parts # Split OS agnostic path in parts
|
info_part = agnostic_path(info_value).parts # Split OS agnostic path in parts
|
||||||
info_path = to_string(info_part[1:-1], os.sep) # Get path without drive/root or file
|
info_path = to_string(info_part[1:-1], os.sep) # Get path without drive/root or file
|
||||||
info_name = info_part[-1] # Get file from last path part
|
info_name = info_part[-1] # Get file from last path part
|
||||||
|
|
||||||
nal_dict[info_tag] = (info_path,info_name) # Assign a file path & name to each Tag
|
nal_dict[info_tag] = (info_path,info_name) # Assign a file path & name to each Tag
|
||||||
|
|
||||||
# Parse Insyde BIOS @UAF|@HPU Module (@INS)
|
# Parse Insyde BIOS @UAF|@HPU Module (@INS)
|
||||||
if uaf_tag == '@INS' and is_7z_supported(uaf_fname):
|
if uaf_tag == '@INS' and is_7z_supported(uaf_fname, padding + 4):
|
||||||
ins_dir = os.path.join(extract_path, get_safe_name(uaf_tag + '_nested-SFX')) # Generate extraction directory
|
ins_dir = os.path.join(extract_path, safe_name(uaf_tag + '_nested-SFX')) # Generate extraction directory
|
||||||
|
|
||||||
printer('Insyde BIOS 7z SFX Archive:', padding + 4)
|
printer('Insyde BIOS 7z SFX Archive:', padding + 4)
|
||||||
|
|
||||||
|
@ -406,7 +402,7 @@ def uaf_extract(buffer, extract_path, mod_info, padding=0, is_checksum=False, na
|
||||||
pfat_match,pfat_buffer = get_ami_pfat(uaf_data_raw)
|
pfat_match,pfat_buffer = get_ami_pfat(uaf_data_raw)
|
||||||
|
|
||||||
if pfat_match:
|
if pfat_match:
|
||||||
pfat_dir = os.path.join(extract_path, get_safe_name(uaf_name))
|
pfat_dir = os.path.join(extract_path, safe_name(uaf_name))
|
||||||
|
|
||||||
parse_pfat_file(pfat_buffer, pfat_dir, padding + 4)
|
parse_pfat_file(pfat_buffer, pfat_dir, padding + 4)
|
||||||
|
|
||||||
|
@ -422,7 +418,7 @@ def uaf_extract(buffer, extract_path, mod_info, padding=0, is_checksum=False, na
|
||||||
|
|
||||||
# Parse Nested AMI UCP Structure
|
# Parse Nested AMI UCP Structure
|
||||||
if nested_uaf_off:
|
if nested_uaf_off:
|
||||||
uaf_dir = os.path.join(extract_path, get_safe_name(uaf_tag + '_nested-UCP')) # Generate extraction directory
|
uaf_dir = os.path.join(extract_path, safe_name(uaf_tag + '_nested-UCP')) # Generate extraction directory
|
||||||
|
|
||||||
ucp_extract(nested_uaf_bin, uaf_dir, nested_uaf_tag, padding + 4, is_checksum) # Call recursively
|
ucp_extract(nested_uaf_bin, uaf_dir, nested_uaf_tag, padding + 4, is_checksum) # Call recursively
|
||||||
|
|
||||||
|
|
|
@ -7,14 +7,13 @@ Dell PFS Update Extractor
|
||||||
Copyright (C) 2018-2022 Plato Mavropoulos
|
Copyright (C) 2018-2022 Plato Mavropoulos
|
||||||
"""
|
"""
|
||||||
|
|
||||||
title = 'Dell PFS Update Extractor v6.0_a5'
|
title = 'Dell PFS Update Extractor v6.0_a6'
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import io
|
import io
|
||||||
import sys
|
import sys
|
||||||
import lzma
|
import lzma
|
||||||
import zlib
|
import zlib
|
||||||
import shutil
|
|
||||||
import ctypes
|
import ctypes
|
||||||
import contextlib
|
import contextlib
|
||||||
|
|
||||||
|
@ -22,7 +21,7 @@ import contextlib
|
||||||
sys.dont_write_bytecode = True
|
sys.dont_write_bytecode = True
|
||||||
|
|
||||||
from common.checksums import get_chk_8_xor
|
from common.checksums import get_chk_8_xor
|
||||||
from common.path_ops import get_safe_name
|
from common.path_ops import safe_name, make_dirs
|
||||||
from common.patterns import PAT_DELL_HDR, PAT_DELL_FTR, PAT_DELL_PKG
|
from common.patterns import PAT_DELL_HDR, PAT_DELL_FTR, PAT_DELL_PKG
|
||||||
from common.struct_ops import get_struct, char, uint8_t, uint16_t, uint32_t, uint64_t
|
from common.struct_ops import get_struct, char, uint8_t, uint16_t, uint32_t, uint64_t
|
||||||
from common.system import script_init, argparse_init, printer
|
from common.system import script_init, argparse_init, printer
|
||||||
|
@ -243,13 +242,10 @@ def pfs_section_parse(zlib_data, zlib_start, output_path, pfs_name, pfs_index, p
|
||||||
printer('Extracting Dell PFS %d >%s > %s' % (pfs_index, pfs_name, section_name), padding)
|
printer('Extracting Dell PFS %d >%s > %s' % (pfs_index, pfs_name, section_name), padding)
|
||||||
|
|
||||||
# Set PFS ZLIB Section extraction sub-directory path
|
# Set PFS ZLIB Section extraction sub-directory path
|
||||||
section_path = os.path.join(output_path, get_safe_name(section_name))
|
section_path = os.path.join(output_path, safe_name(section_name))
|
||||||
|
|
||||||
# Delete existing extraction sub-directory (not in recursions)
|
# Create extraction sub-directory and delete old (if present, not in recursions)
|
||||||
if os.path.isdir(section_path) and not is_rec: shutil.rmtree(section_path)
|
make_dirs(section_path, delete=(not is_rec), parents=True, exist_ok=True)
|
||||||
|
|
||||||
# Create extraction sub-directory
|
|
||||||
if not os.path.isdir(section_path): os.makedirs(section_path)
|
|
||||||
|
|
||||||
# Store the compressed zlib stream start offset
|
# Store the compressed zlib stream start offset
|
||||||
compressed_start = zlib_start + 0xB
|
compressed_start = zlib_start + 0xB
|
||||||
|
@ -406,7 +402,7 @@ def pfs_extract(buffer, pfs_index, pfs_name, pfs_count, output_path, pfs_padd, i
|
||||||
name_start = info_start + PFS_INFO_LEN + PFS_NAME_LEN # PFS Entry's FileName start offset
|
name_start = info_start + PFS_INFO_LEN + PFS_NAME_LEN # PFS Entry's FileName start offset
|
||||||
name_size = entry_info_mod.CharacterCount * 2 # PFS Entry's FileName buffer total size
|
name_size = entry_info_mod.CharacterCount * 2 # PFS Entry's FileName buffer total size
|
||||||
name_data = filename_info[name_start:name_start + name_size] # PFS Entry's FileName buffer
|
name_data = filename_info[name_start:name_start + name_size] # PFS Entry's FileName buffer
|
||||||
entry_name = get_safe_name(name_data.decode('utf-16').strip()) # PFS Entry's FileName value
|
entry_name = safe_name(name_data.decode('utf-16').strip()) # PFS Entry's FileName value
|
||||||
|
|
||||||
# Show PFS FileName Structure info
|
# Show PFS FileName Structure info
|
||||||
if is_structure:
|
if is_structure:
|
||||||
|
@ -443,7 +439,7 @@ def pfs_extract(buffer, pfs_index, pfs_name, pfs_count, output_path, pfs_padd, i
|
||||||
|
|
||||||
# As Nested PFS Entry Name, we'll use the actual PFS File Name
|
# As Nested PFS Entry Name, we'll use the actual PFS File Name
|
||||||
# Replace common Windows reserved/illegal filename characters
|
# Replace common Windows reserved/illegal filename characters
|
||||||
entry_name = get_safe_name(entry_info.FileName.decode('utf-8').strip('.exe'))
|
entry_name = safe_name(entry_info.FileName.decode('utf-8').strip('.exe'))
|
||||||
|
|
||||||
# As Nested PFS Entry Version, we'll use the actual PFS File Version
|
# As Nested PFS Entry Version, we'll use the actual PFS File Version
|
||||||
entry_version = entry_info.FileVersion.decode('utf-8')
|
entry_version = entry_info.FileVersion.decode('utf-8')
|
||||||
|
@ -537,7 +533,7 @@ def pfs_extract(buffer, pfs_index, pfs_name, pfs_count, output_path, pfs_padd, i
|
||||||
sub_pfs_name = ' %s v%s' % (info_all[pfs_count - 2][1], info_all[pfs_count - 2][2]) if info_all else ' UNKNOWN'
|
sub_pfs_name = ' %s v%s' % (info_all[pfs_count - 2][1], info_all[pfs_count - 2][2]) if info_all else ' UNKNOWN'
|
||||||
|
|
||||||
# Set the sub-PFS output path (create sub-folders for each sub-PFS and its ZLIB sections)
|
# Set the sub-PFS output path (create sub-folders for each sub-PFS and its ZLIB sections)
|
||||||
sub_pfs_path = os.path.join(output_path, str(pfs_count) + get_safe_name(sub_pfs_name))
|
sub_pfs_path = os.path.join(output_path, str(pfs_count) + safe_name(sub_pfs_name))
|
||||||
|
|
||||||
# Recursively call the PFS ZLIB Section Parser function for the sub-PFS Volume (pfs_index = pfs_count)
|
# Recursively call the PFS ZLIB Section Parser function for the sub-PFS Volume (pfs_index = pfs_count)
|
||||||
pfs_section_parse(entry_data, offset, sub_pfs_path, sub_pfs_name, pfs_count, pfs_count, True, pfs_padd + 4, is_structure, is_advanced)
|
pfs_section_parse(entry_data, offset, sub_pfs_path, sub_pfs_name, pfs_count, pfs_count, True, pfs_padd + 4, is_structure, is_advanced)
|
||||||
|
@ -844,7 +840,7 @@ def chk_pfs_ftr(footer_buffer, data_buffer, data_size, text, padding, is_structu
|
||||||
def pfs_file_write(bin_buff, bin_name, bin_type, full_name, out_path, padding, is_structure=True, is_advanced=True):
|
def pfs_file_write(bin_buff, bin_name, bin_type, full_name, out_path, padding, is_structure=True, is_advanced=True):
|
||||||
# Store Data/Metadata Signature (advanced users only)
|
# Store Data/Metadata Signature (advanced users only)
|
||||||
if bin_name.startswith('sign'):
|
if bin_name.startswith('sign'):
|
||||||
final_name = '%s.%s.sig' % (get_safe_name(full_name), bin_name.split('_')[1])
|
final_name = '%s.%s.sig' % (safe_name(full_name), bin_name.split('_')[1])
|
||||||
final_path = os.path.join(out_path, final_name)
|
final_path = os.path.join(out_path, final_name)
|
||||||
|
|
||||||
with open(final_path, 'wb') as pfs_out: pfs_out.write(bin_buff) # Write final Data/Metadata Signature
|
with open(final_path, 'wb') as pfs_out: pfs_out.write(bin_buff) # Write final Data/Metadata Signature
|
||||||
|
@ -857,7 +853,7 @@ def pfs_file_write(bin_buff, bin_name, bin_type, full_name, out_path, padding, i
|
||||||
# Some Data may be Text or XML files with useful information for non-advanced users
|
# Some Data may be Text or XML files with useful information for non-advanced users
|
||||||
is_text,final_data,file_ext,write_mode = bin_is_text(bin_buff, bin_type, bin_name == 'meta', padding, is_structure, is_advanced)
|
is_text,final_data,file_ext,write_mode = bin_is_text(bin_buff, bin_type, bin_name == 'meta', padding, is_structure, is_advanced)
|
||||||
|
|
||||||
final_name = '%s%s' % (get_safe_name(full_name), bin_ext[:-4] + file_ext if is_text else bin_ext)
|
final_name = '%s%s' % (safe_name(full_name), bin_ext[:-4] + file_ext if is_text else bin_ext)
|
||||||
final_path = os.path.join(out_path, final_name)
|
final_path = os.path.join(out_path, final_name)
|
||||||
|
|
||||||
with open(final_path, write_mode) as pfs_out: pfs_out.write(final_data) # Write final Data/Metadata Payload
|
with open(final_path, write_mode) as pfs_out: pfs_out.write(final_data) # Write final Data/Metadata Payload
|
||||||
|
|
|
@ -57,7 +57,7 @@ PyInstaller can build/freeze/compile the utility at all three supported platform
|
||||||
|
|
||||||
4. Build/Freeze/Compile:
|
4. Build/Freeze/Compile:
|
||||||
|
|
||||||
> pyinstaller --noupx --onefile \<path-to-project\>\/Dell_PFS_Extract.py
|
> pyinstaller --add-data="external/*;external/" --noupx --onefile \<path-to-project\>\/Dell_PFS_Extract.py
|
||||||
|
|
||||||
You should find the final utility executable at "dist" folder
|
You should find the final utility executable at "dist" folder
|
||||||
|
|
||||||
|
@ -121,7 +121,7 @@ PyInstaller can build/freeze/compile the utility at all three supported platform
|
||||||
|
|
||||||
4. Build/Freeze/Compile:
|
4. Build/Freeze/Compile:
|
||||||
|
|
||||||
> pyinstaller --noupx --onefile \<path-to-project\>\/AMI_UCP_Extract.py
|
> pyinstaller --add-data="external/*;external/" --noupx --onefile \<path-to-project\>\/AMI_UCP_Extract.py
|
||||||
|
|
||||||
You should find the final utility executable at "dist" folder
|
You should find the final utility executable at "dist" folder
|
||||||
|
|
||||||
|
@ -181,7 +181,7 @@ PyInstaller can build/freeze/compile the utility at all three supported platform
|
||||||
|
|
||||||
4. Build/Freeze/Compile:
|
4. Build/Freeze/Compile:
|
||||||
|
|
||||||
> pyinstaller --noupx --onefile \<path-to-project\>\/AMI_PFAT_Extract.py
|
> pyinstaller --add-data="external/*;external/" --noupx --onefile \<path-to-project\>\/AMI_PFAT_Extract.py
|
||||||
|
|
||||||
You should find the final utility executable at "dist" folder
|
You should find the final utility executable at "dist" folder
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,14 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
#coding=utf-8
|
#coding=utf-8
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (C) 2022 Plato Mavropoulos
|
||||||
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from common.path_ops import get_script_dir
|
from common.path_ops import project_root, safe_path
|
||||||
from common.system import get_os_ver
|
from common.system import get_os_ver
|
||||||
from common.system import printer
|
from common.system import printer
|
||||||
|
|
||||||
|
@ -12,29 +16,27 @@ from common.system import printer
|
||||||
def get_7z_path(static=False):
|
def get_7z_path(static=False):
|
||||||
exec_name = '7z.exe' if get_os_ver()[1] else ('7zzs' if static else '7zz')
|
exec_name = '7z.exe' if get_os_ver()[1] else ('7zzs' if static else '7zz')
|
||||||
|
|
||||||
exec_path = os.path.join(get_script_dir(), '..', 'external', exec_name)
|
return safe_path(project_root(), ['external',exec_name])
|
||||||
|
|
||||||
return exec_path
|
|
||||||
|
|
||||||
# Check if file is 7z supported
|
# Check if file is 7z supported
|
||||||
def is_7z_supported(in_path, static=False):
|
def is_7z_supported(in_path, padding=0, static=False):
|
||||||
try:
|
try:
|
||||||
subprocess.run([get_7z_path(static), 't', in_path, '-bso0', '-bse0', '-bsp0'], check=True)
|
subprocess.run([get_7z_path(static), 't', in_path, '-bso0', '-bse0', '-bsp0'], check=True)
|
||||||
|
|
||||||
except:
|
except:
|
||||||
|
printer('Error: 7-Zip could not check support for file %s!' % in_path, padding)
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# Archive decompression via 7-Zip
|
# Archive decompression via 7-Zip
|
||||||
def a7z_decompress(in_path, out_path, in_name, padding, static=False):
|
def a7z_decompress(in_path, out_path, in_name, padding=0, static=False):
|
||||||
if not in_name: in_name = 'archive'
|
if not in_name: in_name = 'archive'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
subprocess.run([get_7z_path(static), 'x', '-aou', '-bso0', '-bse0', '-bsp0', '-o' + out_path, in_path], check=True)
|
subprocess.run([get_7z_path(static), 'x', '-aou', '-bso0', '-bse0', '-bsp0', '-o' + out_path, in_path], check=True)
|
||||||
|
|
||||||
if not os.path.isdir(out_path): raise Exception('EXTRACT_DIR_MISSING')
|
if not os.path.isdir(out_path): raise Exception('EXTRACT_DIR_MISSING')
|
||||||
|
|
||||||
except:
|
except:
|
||||||
printer('Error: 7-Zip could not extract %s file %s!' % (in_name, in_path), padding)
|
printer('Error: 7-Zip could not extract %s file %s!' % (in_name, in_path), padding)
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
#coding=utf-8
|
#coding=utf-8
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (C) 2022 Plato Mavropoulos
|
||||||
|
"""
|
||||||
|
|
||||||
# Get Checksum 16-bit
|
# Get Checksum 16-bit
|
||||||
def get_chk_16(data, value=0, order='little'):
|
def get_chk_16(data, value=0, order='little'):
|
||||||
for idx in range(0, len(data), 2):
|
for idx in range(0, len(data), 2):
|
||||||
|
|
|
@ -1,10 +1,14 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
#coding=utf-8
|
#coding=utf-8
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (C) 2022 Plato Mavropoulos
|
||||||
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from common.path_ops import get_script_dir
|
from common.path_ops import project_root, safe_path
|
||||||
from common.system import get_os_ver
|
from common.system import get_os_ver
|
||||||
from common.system import printer
|
from common.system import printer
|
||||||
|
|
||||||
|
@ -25,22 +29,19 @@ def is_efi_compressed(data, strict=True):
|
||||||
return check_diff and check_size
|
return check_diff and check_size
|
||||||
|
|
||||||
# Get TianoCompress path
|
# Get TianoCompress path
|
||||||
def tianocompress_path():
|
def get_tiano_path():
|
||||||
exec_name = 'TianoCompress' + ('.exe' if get_os_ver()[1] else '')
|
exec_name = 'TianoCompress' + ('.exe' if get_os_ver()[1] else '')
|
||||||
|
|
||||||
exec_path = os.path.join(get_script_dir(), '..', 'external', exec_name)
|
return safe_path(project_root(), ['external',exec_name])
|
||||||
|
|
||||||
return exec_path
|
|
||||||
|
|
||||||
# EFI/Tiano Decompression via TianoCompress
|
# EFI/Tiano Decompression via TianoCompress
|
||||||
def efi_decompress(in_path, out_path, padding, comp_type='--uefi'):
|
def efi_decompress(in_path, out_path, padding=0, comp_type='--uefi'):
|
||||||
try:
|
try:
|
||||||
subprocess.run([tianocompress_path(), '-d', in_path, '-o', out_path, '-q', comp_type], check=True, stdout=subprocess.DEVNULL)
|
subprocess.run([get_tiano_path(), '-d', in_path, '-o', out_path, '-q', comp_type], check=True, stdout=subprocess.DEVNULL)
|
||||||
|
|
||||||
with open(in_path, 'rb') as file: _,size_orig = get_compress_sizes(file.read())
|
with open(in_path, 'rb') as file: _,size_orig = get_compress_sizes(file.read())
|
||||||
|
|
||||||
if os.path.getsize(out_path) != size_orig: raise Exception('EFI_DECOMPRESS_ERROR')
|
if os.path.getsize(out_path) != size_orig: raise Exception('EFI_DECOMPRESS_ERROR')
|
||||||
|
|
||||||
except:
|
except:
|
||||||
printer('Error: TianoCompress could not extract file %s!' % in_path, padding)
|
printer('Error: TianoCompress could not extract file %s!' % in_path, padding)
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
#coding=utf-8
|
#coding=utf-8
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (C) 2022 Plato Mavropoulos
|
||||||
|
"""
|
||||||
|
|
||||||
# https://github.com/allowitsme/big-tool by Dmitry Frolov
|
# https://github.com/allowitsme/big-tool by Dmitry Frolov
|
||||||
def get_bgs_tool():
|
def get_bgs_tool():
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
#coding=utf-8
|
#coding=utf-8
|
||||||
|
|
||||||
# https://leancrew.com/all-this/2020/06/ordinals-in-python/ by Dr. Drang
|
"""
|
||||||
|
Copyright (C) 2022 Plato Mavropoulos
|
||||||
|
"""
|
||||||
|
|
||||||
|
# https://leancrew.com/all-this/2020/06/ordinals-in-python/ by Dr. Drang
|
||||||
def get_ordinal(number):
|
def get_ordinal(number):
|
||||||
s = ('th', 'st', 'nd', 'rd') + ('th',) * 10
|
s = ('th', 'st', 'nd', 'rd') + ('th',) * 10
|
||||||
|
|
||||||
|
|
|
@ -1,68 +1,89 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
#coding=utf-8
|
#coding=utf-8
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (C) 2022 Plato Mavropoulos
|
||||||
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import inspect
|
import shutil
|
||||||
from pathlib import Path
|
from pathlib import Path, PurePath
|
||||||
|
|
||||||
from common.text_ops import to_string
|
from common.text_ops import to_string
|
||||||
|
|
||||||
# Fix illegal/reserved Windows characters
|
# Fix illegal/reserved Windows characters
|
||||||
def get_safe_name(in_name):
|
def safe_name(in_name):
|
||||||
name_repr = repr(in_name).strip("'")
|
name_repr = repr(in_name).strip("'")
|
||||||
|
|
||||||
return re.sub(r'[\\/:"*?<>|]+', '_', name_repr)
|
return re.sub(r'[\\/:"*?<>|]+', '_', name_repr)
|
||||||
|
|
||||||
# Check and attempt to fix illegal/unsafe OS path traversals
|
# Check and attempt to fix illegal/unsafe OS path traversals
|
||||||
def get_safe_path(base_path, user_paths):
|
def safe_path(base_path, user_paths):
|
||||||
# Convert base path to absolute path
|
# Convert base path to absolute path
|
||||||
base_path = get_real_path(base_path)
|
base_path = real_path(base_path)
|
||||||
|
|
||||||
# Convert user path(s) to absolute path with OS separators
|
# Merge user path(s) to string with OS separators
|
||||||
user_path = to_string(user_paths, os.sep)
|
user_path = to_string(user_paths, os.sep)
|
||||||
|
|
||||||
# Create target path from base + requested user path
|
# Create target path from base + requested user path
|
||||||
target_path = get_norm_path(base_path, user_path)
|
target_path = norm_path(base_path, user_path)
|
||||||
|
|
||||||
# Check if target path is OS illegal/unsafe
|
# Check if target path is OS illegal/unsafe
|
||||||
if is_safe_path(base_path, target_path):
|
if is_safe_path(base_path, target_path):
|
||||||
return target_path
|
return target_path
|
||||||
|
|
||||||
# Re-create target path from base + leveled/safe illegal "path" (now file)
|
# Re-create target path from base + leveled/safe illegal "path" (now file)
|
||||||
nuked_path = get_norm_path(base_path, get_safe_name(user_path))
|
nuked_path = norm_path(base_path, safe_name(user_path))
|
||||||
|
|
||||||
# Check if illegal path leveling worked
|
# Check if illegal path leveling worked
|
||||||
if is_safe_path(base_path, nuked_path):
|
if is_safe_path(base_path, nuked_path):
|
||||||
return nuked_path
|
return nuked_path
|
||||||
|
|
||||||
# Still illegal, create fallback base path + placeholder file
|
# Still illegal, raise exception to halt execution
|
||||||
failed_path = get_norm_path(base_path, 'illegal_path_traversal')
|
raise Exception(f'ILLEGAL_PATH_TRAVERSAL: {user_path}')
|
||||||
|
|
||||||
return failed_path
|
|
||||||
|
|
||||||
# Check for illegal/unsafe OS path traversal
|
# Check for illegal/unsafe OS path traversal
|
||||||
def is_safe_path(base_path, target_path):
|
def is_safe_path(base_path, target_path):
|
||||||
base_path = get_real_path(base_path)
|
base_path = real_path(base_path)
|
||||||
|
|
||||||
target_path = get_real_path(target_path)
|
target_path = real_path(target_path)
|
||||||
|
|
||||||
common_path = os.path.commonpath((base_path, target_path))
|
common_path = os.path.commonpath((base_path, target_path))
|
||||||
|
|
||||||
return base_path == common_path
|
return base_path == common_path
|
||||||
|
|
||||||
# Create normalized base path + OS separator + user path
|
# Create normalized base path + OS separator + user path
|
||||||
def get_norm_path(base_path, user_path):
|
def norm_path(base_path, user_path):
|
||||||
return os.path.normpath(base_path + os.sep + user_path)
|
return os.path.normpath(base_path + os.sep + user_path)
|
||||||
|
|
||||||
# Get absolute path, resolving any symlinks
|
# Get absolute path, resolving any symlinks
|
||||||
def get_real_path(in_path):
|
def real_path(in_path):
|
||||||
return str(Path(in_path).resolve())
|
return os.path.realpath(in_path)
|
||||||
|
|
||||||
# Get Windows/Posix OS compatible path
|
# Get Windows/Posix OS agnostic path
|
||||||
def get_comp_path(in_path):
|
def agnostic_path(in_path):
|
||||||
return in_path.replace('\\', os.sep)
|
return PurePath(in_path.replace('\\', os.sep))
|
||||||
|
|
||||||
|
# Get absolute parent of path
|
||||||
|
def path_parent(in_path):
|
||||||
|
return Path(in_path).parent.absolute()
|
||||||
|
|
||||||
|
# Check if path is absolute
|
||||||
|
def is_path_absolute(in_path):
|
||||||
|
return Path(in_path).is_absolute()
|
||||||
|
|
||||||
|
# Create folder(s), controlling parents, existence and prior deletion
|
||||||
|
def make_dirs(in_path, parents=True, exist_ok=False, delete=False):
|
||||||
|
if delete: del_dirs(in_path)
|
||||||
|
|
||||||
|
Path.mkdir(Path(in_path), parents=parents, exist_ok=exist_ok)
|
||||||
|
|
||||||
|
# Delete folder(s), if present
|
||||||
|
def del_dirs(in_path):
|
||||||
|
if Path(in_path).is_dir():
|
||||||
|
shutil.rmtree(in_path)
|
||||||
|
|
||||||
# Walk path to get all files
|
# Walk path to get all files
|
||||||
def get_path_files(in_path):
|
def get_path_files(in_path):
|
||||||
|
@ -74,20 +95,18 @@ def get_path_files(in_path):
|
||||||
|
|
||||||
return path_files
|
return path_files
|
||||||
|
|
||||||
# Get parent of path
|
|
||||||
def get_path_parent(in_path):
|
|
||||||
return Path(in_path).parent.absolute()
|
|
||||||
|
|
||||||
# Get absolute file path of argparse object
|
# Get absolute file path of argparse object
|
||||||
def get_argparse_path(argparse_path):
|
def get_argparse_path(argparse_path):
|
||||||
script_dir = get_path_parent(get_script_dir())
|
|
||||||
|
|
||||||
if not argparse_path:
|
if not argparse_path:
|
||||||
absolute_path = script_dir # Use input file directory if no user path is specified
|
# Use runtime directory if no user path is specified
|
||||||
|
absolute_path = runtime_root()
|
||||||
else:
|
else:
|
||||||
# Check if user specified path is absolute, otherwise convert it to input file relative
|
# Check if user specified path is absolute
|
||||||
if Path(argparse_path).is_absolute(): absolute_path = argparse_path
|
if is_path_absolute(argparse_path):
|
||||||
else: absolute_path = os.path.join(script_dir, argparse_path)
|
absolute_path = argparse_path
|
||||||
|
# Otherwise, make it runtime directory relative
|
||||||
|
else:
|
||||||
|
absolute_path = safe_path(runtime_root(), argparse_path)
|
||||||
|
|
||||||
return absolute_path
|
return absolute_path
|
||||||
|
|
||||||
|
@ -104,22 +123,31 @@ def process_input_files(argparse_args, sys_argv=None):
|
||||||
else:
|
else:
|
||||||
input_files = [file.name for file in argparse_args.files]
|
input_files = [file.name for file in argparse_args.files]
|
||||||
|
|
||||||
output_path = get_argparse_path(argparse_args.output_dir or argparse_args.input_dir)
|
# Set output path via argparse Output Path or argparse Input Path or first input file Path
|
||||||
|
output_path = argparse_args.output_dir or argparse_args.input_dir or path_parent(input_files[0])
|
||||||
else:
|
else:
|
||||||
# Script w/o parameters
|
# Script w/o parameters
|
||||||
input_path_user = input('\nEnter input directory path: ')
|
input_path_user = input('\nEnter input directory path: ')
|
||||||
input_path_full = get_argparse_path(input_path_user) if input_path_user else ''
|
input_path_full = get_argparse_path(input_path_user) if input_path_user else ''
|
||||||
input_files = get_path_files(input_path_full)
|
input_files = get_path_files(input_path_full)
|
||||||
|
|
||||||
output_path = get_argparse_path(input('\nEnter output directory path: '))
|
output_path = input('\nEnter output directory path: ')
|
||||||
|
|
||||||
return input_files, output_path
|
output_path_final = get_argparse_path(output_path)
|
||||||
|
|
||||||
# https://stackoverflow.com/a/22881871 by jfs
|
return input_files, output_path_final
|
||||||
def get_script_dir():
|
|
||||||
|
# Get project's root directory
|
||||||
|
def project_root():
|
||||||
|
root = Path(__file__).parent.parent
|
||||||
|
|
||||||
|
return real_path(root)
|
||||||
|
|
||||||
|
# Get runtime's root directory
|
||||||
|
def runtime_root():
|
||||||
if getattr(sys, 'frozen', False):
|
if getattr(sys, 'frozen', False):
|
||||||
path = sys.executable
|
root = Path(sys.executable).parent
|
||||||
else:
|
else:
|
||||||
path = inspect.getabsfile(get_script_dir)
|
root = project_root()
|
||||||
|
|
||||||
return os.path.dirname(get_real_path(path))
|
return real_path(root)
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
#coding=utf-8
|
#coding=utf-8
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (C) 2022 Plato Mavropoulos
|
||||||
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
PAT_AMI_PFAT = re.compile(b'_AMIPFAT.AMI_BIOS_GUARD_FLASH_CONFIGURATIONS', re.DOTALL)
|
PAT_AMI_PFAT = re.compile(b'_AMIPFAT.AMI_BIOS_GUARD_FLASH_CONFIGURATIONS', re.DOTALL)
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
#coding=utf-8
|
#coding=utf-8
|
||||||
|
|
||||||
# https://github.com/skochinsky/me-tools/blob/master/me_unpack.py by Igor Skochinsky
|
"""
|
||||||
|
Copyright (C) 2022 Plato Mavropoulos
|
||||||
|
"""
|
||||||
|
|
||||||
import ctypes
|
import ctypes
|
||||||
|
|
||||||
|
@ -11,6 +13,7 @@ uint16_t = ctypes.c_ushort
|
||||||
uint32_t = ctypes.c_uint
|
uint32_t = ctypes.c_uint
|
||||||
uint64_t = ctypes.c_uint64
|
uint64_t = ctypes.c_uint64
|
||||||
|
|
||||||
|
# https://github.com/skochinsky/me-tools/blob/master/me_unpack.py by Igor Skochinsky
|
||||||
def get_struct(buffer, start_offset, class_name, param_list=None):
|
def get_struct(buffer, start_offset, class_name, param_list=None):
|
||||||
if param_list is None: param_list = []
|
if param_list is None: param_list = []
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
#coding=utf-8
|
#coding=utf-8
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (C) 2022 Plato Mavropoulos
|
||||||
|
"""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import ctypes
|
import ctypes
|
||||||
import argparse
|
import argparse
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
#coding=utf-8
|
#coding=utf-8
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (C) 2022 Plato Mavropoulos
|
||||||
|
"""
|
||||||
|
|
||||||
# Generate padding (spaces or tabs)
|
# Generate padding (spaces or tabs)
|
||||||
def padder(padd_count, tab=False):
|
def padder(padd_count, tab=False):
|
||||||
return ('\t' if tab else ' ') * padd_count
|
return ('\t' if tab else ' ') * padd_count
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue