Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/usr/bin/python
- from collections import namedtuple
- import struct
- import sys
- import os
- field_names = {}
- field_names["SPPCIA"] = ["core_uniq_data", "timebase_struct", "cache_size", "thread_data_array", "cpu_attributes"];
- field_names["SPPCRD"] = ["chip_info", "chiptod", "fru_id", "fru_vpd", "chip_vpd", "hosti2c", "pnor_info", "smp_link", "chip_ec"];
- field_names["MS VPD"] = ["max_msaddr", "total_ms", "page_mover", "trace_areas", "ue_addr", "hb_reserved"];
- field_names["MSAREA"] = ["fru_id", "fru_vpd", "ms_area_id", "ms_size", "addr_range_array", "mem_affinity", "chip_ec", "deprecated", "hosti2c"];
- field_names["RAM "] = ["fru_id", "fru_vpd", "ram_area_id", "ram_size"];
- field_names["IPLPMS"] = ["system_params", "ipl_params", "ipl_time", "sp_priv_ipl_params", "platform_dump", "hmc_conns", "cuod", "mfg_mode_data", "serial_port_loc", "features"];
- field_names["IO HUB"] = ["fru_id", "fru_vpd", "cec_fru_id", "iohubs"];
- field_names["IO KID"] = ["fru_id", "fru_vpd", "padding?"];
- field_names["IOSLOT"] = ["slot_map", "slot_details"];
- field_names["SLCA "] = ["slca"];
- field_names["SPINFO"] = ["fru_id", "kw_vpd", "sp_impl", "deprecated", "sp_mem_loc", "iopath"];
- # HDIF header crap
- hdif_fields = "d1f0 eyecatcher instance version size header_size idata_offset idata_count child_count child_off"
- hdif_hdr = ">H6sHHIIIHHI"
- def hdr(offset):
- return HDIF._make(struct.unpack_from(hdif_hdr, contents, offset))
- # namedtuple tupes we use
- HDIF = namedtuple('HDIF', hdif_fields)
- IData = namedtuple("IData", "offset size")
- ChildPtr = namedtuple("ChildPtr", "offset size count")
- IdataArray = namedtuple("IDataArray", "offset count elem_alloc elem_actual")
- # TODO: Factor out the generic HDIF parsing stuff into a seperate file and
- # make all this a bit more OOPy
- # Internal data crap
- def idata_off(off, index):
- idata_off = off + hdr(off).idata_offset + struct.calcsize(">LL") * index;
- return IData._make(struct.unpack_from(">LL", contents, idata_off))
- def idata_array_hdr(offset):
- return IdataArray._make(struct.unpack_from(">LLLL", contents, offset))
- def idata_name(hdif, index):
- if field_names.has_key(hdif.eyecatcher):
- try:
- name = field_names[hdif.eyecatcher][index]
- except Exception:
- name = "idata"
- else:
- name = "idata"
- return "{:d}-{:s}".format(index, name)
- def get_idata(hdr_offset):
- hdif = hdr(hdr_offset)
- idata = []
- for i in xrange(0, hdif.idata_count):
- offset, size = idata_off(hdr_offset, i)
- start = hdr_offset + offset;
- end = start + size
- idata.append(contents[start:end])
- return idata
- def get_child_ptr(off, index):
- child_ptr_off = off + hdr(off).child_off + struct.calcsize(">LLL") * index;
- return ChildPtr._make(struct.unpack_from(">LLL", contents, child_ptr_off))
- def get_child_ptrs(hdr_offset):
- hdif = hdr(hdr_offset)
- return [get_child_ptr(hdr_offset, i) for i in xrange(0, hdif.child_count)]
- # XXX: look into making generators for the idata/child structures
- def idata_array_hdr(offset):
- arr = namedtuple("IData Array", "offset count elem_alloc elem_actual")
- return arr._make(struct.unpack_from(">LLLL", contents, offset))
- def get_idata_arr_items(data):
- offset, count, alloc, actual = struct.unpack_from(">LLLL", data, 0)
- items = []
- for i in xrange(0, count):
- start = offset + i * alloc;
- end = start + actual
- items.append(data[start:end])
- return items;
- def dump_into(filename, blob):
- f = open(filename, "w")
- f.write(blob);
- f.close()
- def child_name(offset, i, j):
- hdif = hdr(offset)
- if hdif.d1f0 != 0xd1f0:
- return "child-{:d}-{:d}".format(i, j);
- return "{:s}-{:d}".format(hdif.eyecatcher.strip(), j)
- def extract_struct(offset, into_dir):
- hdif = hdr(offset);
- print("Extracting from {:#6x} HDIF {:s}".format(offset, hdif))
- if hdif.d1f0 != 0xd1f0:
- # XXX: make this fuzz around the pointer for a HDIF header
- print("WARNING: Broken child pointer!");
- return
- for i,idata in enumerate(get_idata(offset)):
- print("\t {:s}: {:x}".format(idata_name(hdif, i), offset + idata_off(offset, i).offset))
- dump_into("{:s}/{:s}".format(into_dir, idata_name(hdif, i)), idata)
- dump_into(into_dir + '/hdif', contents[offset:offset+blob_limit])
- for i, c in enumerate(get_child_ptrs(offset)):
- for j in xrange(c.count):
- child_offset = offset + c.offset + j * c.size;
- print("\t extracting children: {:s}".format(c))
- dirname = "{:s}/{:s}/".format(into_dir, child_name(child_offset, i, j))
- os.mkdir(dirname)
- dump_into(dirname + 'hdif', contents[child_offset:blob_limit])
- extract_struct(offset + c.offset + j * c.size, dirname);
- # root array ntuple
- if len(sys.argv) < 2:
- print("usage: hdatpack <dump file name>")
- sys.exit(1);
- f = open(sys.argv[1], "r")
- contents = f.read()
- f.close()
- # FIXME: make this scan for the SPIRA-S
- root_offset = 0;
- root = hdr(root_offset)
- if root.eyecatcher != 'SPIRAS':
- print("This script only supports the new style SPIRA-S, exiting")
- sys.exit(1)
- # Parse the root array and find the common base offset
- tuples = get_idata_arr_items(get_idata(root_offset)[0]);
- # base addr, alloc count, actual count, alloc len, actual len, TCE offset
- tuples = [struct.unpack(">QHHIII", t) for t in tuples]
- tuples = [t for t in tuples if t[0] > 0] # filter out unpopulated tuples
- # find the "base" offset, skiboot uses 0x31200000 to 0x08000000 for the SPIRA
- # heap, but this should work with PHYP HDATs too
- heap_base = min([t[0] & 0xffff0000 for t in tuples])
- print('Guesstimated heap address: {:#x}'.format(heap_base))
- dirname = '{:s}-unpacked'.format(sys.argv[1]);
- os.mkdir(dirname);
- # blob_limit is a stupid hack to limit the amount of data dumped
- # when dumping the "raw" child structures
- blob_limit = len(contents)
- for offset, alloc_cnt,actual_cnt,alloc_len,_,_ in tuples:
- offset -= heap_base
- size = alloc_cnt * alloc_len
- hdif = hdr(offset)
- blob_limit = offset + size
- for i in xrange(actual_cnt):
- file_name = dirname + '/' + hdif.eyecatcher.strip() + '-' + str(i);
- print("Extracting '{:s} - {:d} of {:d}".format(hdif.eyecatcher, i, actual_cnt))
- try:
- os.mkdir(file_name)
- except Exception: # FRUVPD causes duplicates
- continue
- extract_struct(offset + alloc_len * i, file_name);
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement