Advertisement
Guest User

Untitled

a guest
Jan 16th, 2018
115
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 6.48 KB | None | 0 0
  1. #!/usr/bin/python
  2.  
  3. from collections import namedtuple
  4. import struct
  5. import sys
  6. import os
  7.  
  8. field_names = {}
  9. field_names["SPPCIA"] = ["core_uniq_data", "timebase_struct", "cache_size", "thread_data_array", "cpu_attributes"];
  10. field_names["SPPCRD"] = ["chip_info", "chiptod", "fru_id", "fru_vpd", "chip_vpd", "hosti2c", "pnor_info", "smp_link", "chip_ec"];
  11. field_names["MS VPD"] = ["max_msaddr", "total_ms", "page_mover", "trace_areas", "ue_addr", "hb_reserved"];
  12. field_names["MSAREA"] = ["fru_id", "fru_vpd", "ms_area_id", "ms_size", "addr_range_array", "mem_affinity", "chip_ec", "deprecated", "hosti2c"];
  13. field_names["RAM   "] = ["fru_id", "fru_vpd", "ram_area_id", "ram_size"];
  14. field_names["IPLPMS"] = ["system_params", "ipl_params", "ipl_time", "sp_priv_ipl_params", "platform_dump", "hmc_conns", "cuod", "mfg_mode_data", "serial_port_loc", "features"];
  15. field_names["IO HUB"] = ["fru_id", "fru_vpd", "cec_fru_id", "iohubs"];
  16. field_names["IO KID"] = ["fru_id", "fru_vpd", "padding?"];
  17. field_names["IOSLOT"] = ["slot_map", "slot_details"];
  18. field_names["SLCA  "] = ["slca"];
  19. field_names["SPINFO"] = ["fru_id", "kw_vpd", "sp_impl", "deprecated", "sp_mem_loc", "iopath"];
  20.  
  21.  
  22. # HDIF header crap
  23. hdif_fields = "d1f0 eyecatcher instance version size header_size idata_offset idata_count child_count child_off"
  24. hdif_hdr = ">H6sHHIIIHHI"
  25. def hdr(offset):
  26.     return HDIF._make(struct.unpack_from(hdif_hdr, contents, offset))
  27.  
  28. # namedtuple tupes we use
  29. HDIF = namedtuple('HDIF', hdif_fields)
  30. IData = namedtuple("IData", "offset size")
  31. ChildPtr = namedtuple("ChildPtr", "offset size count")
  32. IdataArray = namedtuple("IDataArray", "offset count elem_alloc elem_actual")
  33.  
  34. # TODO: Factor out the generic HDIF parsing stuff into a seperate file and
  35. #       make all this a bit more OOPy
  36.  
  37. # Internal data crap
  38. def idata_off(off, index):
  39.     idata_off = off + hdr(off).idata_offset + struct.calcsize(">LL") * index;
  40.     return IData._make(struct.unpack_from(">LL", contents, idata_off))
  41.  
  42. def idata_array_hdr(offset):
  43.     return IdataArray._make(struct.unpack_from(">LLLL", contents, offset))
  44.  
  45. def idata_name(hdif, index):
  46.     if field_names.has_key(hdif.eyecatcher):
  47.         try:
  48.             name = field_names[hdif.eyecatcher][index]
  49.         except Exception:
  50.             name = "idata"
  51.     else:
  52.         name = "idata"
  53.     return "{:d}-{:s}".format(index, name)
  54.  
  55. def get_idata(hdr_offset):
  56.     hdif = hdr(hdr_offset)
  57.     idata = []
  58.     for i in xrange(0, hdif.idata_count):
  59.         offset, size = idata_off(hdr_offset, i)
  60.         start = hdr_offset + offset;
  61.         end = start + size
  62.         idata.append(contents[start:end])
  63.     return idata
  64.  
  65. def get_child_ptr(off, index):
  66.     child_ptr_off = off + hdr(off).child_off + struct.calcsize(">LLL") * index;
  67.     return ChildPtr._make(struct.unpack_from(">LLL", contents, child_ptr_off))
  68.  
  69. def get_child_ptrs(hdr_offset):
  70.     hdif = hdr(hdr_offset)
  71.     return [get_child_ptr(hdr_offset, i) for i in xrange(0, hdif.child_count)]
  72.  
  73. # XXX: look into making generators for the idata/child structures
  74. def idata_array_hdr(offset):
  75.     arr = namedtuple("IData Array", "offset count elem_alloc elem_actual")
  76.     return arr._make(struct.unpack_from(">LLLL", contents, offset))
  77.  
  78. def get_idata_arr_items(data):
  79.     offset, count, alloc, actual = struct.unpack_from(">LLLL", data, 0)
  80.     items = []
  81.     for i in xrange(0, count):
  82.         start = offset + i * alloc;
  83.         end = start + actual
  84.         items.append(data[start:end])
  85.     return items;
  86.  
  87. def dump_into(filename, blob):
  88.     f = open(filename, "w")
  89.     f.write(blob);
  90.     f.close()
  91.  
  92. def child_name(offset, i, j):
  93.     hdif = hdr(offset)
  94.     if hdif.d1f0 != 0xd1f0:
  95.         return "child-{:d}-{:d}".format(i, j);
  96.     return "{:s}-{:d}".format(hdif.eyecatcher.strip(), j)
  97.  
  98. def extract_struct(offset, into_dir):
  99.     hdif = hdr(offset);
  100.     print("Extracting from {:#6x} HDIF {:s}".format(offset, hdif))
  101.     if hdif.d1f0 != 0xd1f0:
  102.         # XXX: make this fuzz around the pointer for a HDIF header
  103.         print("WARNING: Broken child pointer!");
  104.         return
  105.  
  106.     for i,idata in enumerate(get_idata(offset)):
  107.         print("\t {:s}: {:x}".format(idata_name(hdif, i), offset + idata_off(offset, i).offset))
  108.         dump_into("{:s}/{:s}".format(into_dir, idata_name(hdif, i)), idata)
  109.  
  110.     dump_into(into_dir + '/hdif', contents[offset:offset+blob_limit])
  111.  
  112.     for i, c in enumerate(get_child_ptrs(offset)):
  113.         for j in xrange(c.count):
  114.             child_offset = offset + c.offset + j * c.size;
  115.             print("\t extracting children: {:s}".format(c))
  116.             dirname = "{:s}/{:s}/".format(into_dir, child_name(child_offset, i, j))
  117.             os.mkdir(dirname)
  118.             dump_into(dirname + 'hdif', contents[child_offset:blob_limit])
  119.             extract_struct(offset + c.offset + j * c.size, dirname);
  120.  
  121. # root array ntuple
  122. if len(sys.argv) < 2:
  123.     print("usage: hdatpack <dump file name>")
  124.     sys.exit(1);
  125.  
  126. f = open(sys.argv[1], "r")
  127. contents = f.read()
  128. f.close()
  129.  
  130. # FIXME: make this scan for the SPIRA-S
  131. root_offset = 0;
  132.  
  133. root = hdr(root_offset)
  134. if root.eyecatcher != 'SPIRAS':
  135.     print("This script only supports the new style SPIRA-S, exiting")
  136.     sys.exit(1)
  137.  
  138. # Parse the root array and find the common base offset
  139. tuples = get_idata_arr_items(get_idata(root_offset)[0]);
  140.  
  141. # base addr, alloc count, actual count, alloc len, actual len, TCE offset
  142. tuples = [struct.unpack(">QHHIII", t) for t in tuples]
  143. tuples = [t for t in tuples if t[0] > 0] # filter out unpopulated tuples
  144.  
  145. # find the "base" offset, skiboot uses 0x31200000 to 0x08000000 for the SPIRA
  146. # heap, but this should work with PHYP HDATs too
  147. heap_base = min([t[0] & 0xffff0000 for t in tuples])
  148. print('Guesstimated heap address: {:#x}'.format(heap_base))
  149.  
  150. dirname = '{:s}-unpacked'.format(sys.argv[1]);
  151. os.mkdir(dirname);
  152.  
  153. # blob_limit is a stupid hack to limit the amount of data dumped
  154. # when dumping the "raw" child structures
  155. blob_limit = len(contents)
  156.  
  157. for offset, alloc_cnt,actual_cnt,alloc_len,_,_ in tuples:
  158.     offset -= heap_base
  159.     size = alloc_cnt * alloc_len
  160.     hdif = hdr(offset)
  161.     blob_limit = offset + size
  162.     for i in xrange(actual_cnt):
  163.         file_name = dirname + '/' + hdif.eyecatcher.strip() + '-' + str(i);
  164.         print("Extracting '{:s} - {:d} of {:d}".format(hdif.eyecatcher, i, actual_cnt))
  165.         try:
  166.             os.mkdir(file_name)
  167.         except Exception: # FRUVPD causes duplicates
  168.             continue
  169.         extract_struct(offset + alloc_len * i, file_name);
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement