SHARE
TWEET

QB Script Insert (Bulk Inserter) v1

a guest Dec 3rd, 2015 33 Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. import struct   #So many imports...
  2. import codecs
  3. import re
  4. import zlib
  5. import copy
  6. import os
  7. import shutil
  8.  
  9. BASE_ADDR = 0x17da00
  10.                
  11. s1 = " .,'!?abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890~-&>():;/"
  12. s2 = " .,’!?abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890~‐&>():;/"
  13. translate_table = str.maketrans(s1, s2)     #For converting between ASCII and fullwidth SHIFT-JIS characters
  14. namelist = {'#M': '#M', '#F': '#F', '#SC': '#SC', '#SA': '#SA', '#SB': '#SB',   #For fixing corruptions
  15.             '#TB': '#TB', '#TC': '#TC', '#GB': '#GB', '#GA': '#GA',
  16.             '#GC': '#GC', '#TA': '#TA'}
  17.  
  18. if os.path.isfile('shared.orig'):
  19.     pass
  20. elif os.path.isfile('shared.bin'):
  21.     shutil.copy('shared.bin','shared.orig')
  22. else:
  23.     print('Missing shared.bin.')
  24.     quit()
  25.  
  26. #Given the pointer location, it returns the location and the value
  27. def getpointer(pos, filedata):
  28.     return [pos, struct.unpack('<I', filedata[pos:pos+4])[0]]
  29.  
  30. #Multiple replace. Pretty cool, huh? repls is a dic w/ the replacements
  31. def replace_all(repls, str):
  32.     return re.sub('|'.join(re.escape(key) for key in repls.keys()),
  33.                   lambda k: repls[k.group(0)], str)                                    
  34.  
  35. #It's supposed to insert the script with your input.
  36. #Writes a compressed, inserted script file
  37. #Also, it updates the archive index
  38. def insert_script(file_num, input_col):
  39. #Declaring globals is cheating. But I'm not sure how to avoid it.
  40.     global archive_index
  41.     file_num_str = '{:0>3}'.format(file_num)            #Need the str for file operations
  42.  
  43.     inputdata = []
  44.     with codecs.open(file_num_str + '.tsv','rb','utf-8') as f:  #Open input file
  45.         for line in f:
  46.             line = line.rstrip('\r\n').split('\t')
  47.             text = line[input_col].translate(translate_table)   #Change to fullwidth characters
  48.             text = replace_all(namelist, text)                  #Get rid of corruptions like #M (should be #M)
  49.             while '"' in text:                          #Mess with quotations (gives open/close quotation marks)
  50.                 text = text.replace('"', '“', 1)
  51.                 text = text.replace('"', '”', 1)
  52.             if line[1] != '':                           #Start of opcode
  53.                 inputdata.append([line[0]])             #Append address
  54.                 if line[1] in names:                    #Starts w/ a name
  55.                     inputdata[-1] += [names[line[1]]]   #Lookup translated name
  56.                 elif line[2] != '':                     #Two strings on this line
  57.                     inputdata[-1] += text.split('|')
  58.                     continue
  59.                 else:                                   #One string, non-name
  60.                     inputdata[-1] += [text]
  61.                     continue
  62.             if line[input_col] == '':                   #Blank line
  63.                 continue
  64.             inputdata[-1] += [text]                     #Continuation line (additional line for same opcode)
  65.  
  66.     #Load and decompress data
  67.     offset, compressed_size = (archive_index_orig[file_num][0],
  68.                                archive_index_orig[file_num][2])
  69.     with open('shared.orig', 'rb') as f:
  70.         f.seek(BASE_ADDR + offset)
  71.         filedata = bytearray(zlib.decompress(f.read(compressed_size)))
  72.  
  73.     #Get pointers
  74.     ptr_loc = [0x4, 0x44, 0x4c, 0x54, 0x5c, 0x6c]
  75.     num_of_strings = struct.unpack('<I', filedata[0x38:0x3C])[0]
  76.     ptr_table_addr = struct.unpack('<I', filedata[0x44:0x48])[0]
  77.     str_table_addr = struct.unpack('<I', filedata[0x4c:0x50])[0]
  78.  
  79.     ptr_loc2 = [ptr_table_addr + x * 4 for x in range(num_of_strings + 1)]
  80.     ptrs = [getpointer(ptr, filedata) for ptr in ptr_loc]
  81.     ptrs2 = [getpointer(ptr, filedata) for ptr in ptr_loc2]
  82.  
  83.     #Replace each string, update pointers
  84.     offset = 0
  85.     for data in inputdata:
  86.         pos = int(data[0],16) + str_table_addr + offset
  87.         text = b'\x40'.join([x.encode('cp932') for x in data[1:]])
  88.         orig_len = len(filedata[pos:filedata.find(b'\x00',pos)])
  89.         for i, (ptr_loc, ptr_tgt) in enumerate(ptrs):
  90.             if ptr_tgt > pos:
  91.                 ptrs[i][1] += len(text) - orig_len
  92.         for i, (ptr_loc, ptr_tgt) in enumerate(ptrs2):
  93.             if ptr_tgt + str_table_addr > pos:
  94.                 ptrs2[i][1] += len(text) - orig_len
  95.         filedata[pos:pos + orig_len] = text
  96.         offset += len(text) - orig_len
  97.  
  98.     #Write-back pointers
  99.     for ptr_loc, ptr_tgt in ptrs + ptrs2:
  100.         filedata[ptr_loc:ptr_loc+4] = struct.pack('<I', ptr_tgt)
  101.  
  102.     #Update archive index compressed and decompressed sizes
  103.     archive_index[file_num][1] = len(filedata)      #Update decompressed size
  104.     with open(file_num_str + '.temp1','wb') as f:
  105.         f.write(filedata)
  106.     filedata = zlib.compress(filedata, 9)           #Compress data
  107.     archive_index[file_num][2] = len(filedata)      #Update compressed size
  108.  
  109.     #Update archive index offsets
  110.     if file_num == num_of_files - 1:                #Ignore last file
  111.         pass
  112.     else:                                           #Not the last file
  113.         avail_blocks = (archive_index[file_num + 1][0] - archive_index[file_num][0]) // 16  #1 block = 16 bytes
  114.         if len(filedata) % 16 == 0:
  115.             needed_blocks = len(filedata) // 16 + 1     #It gives an extra whole block of zeros. Doesn't really matter I don't think.
  116.         else:
  117.             needed_blocks = len(filedata) // 16 + 2    
  118.         if needed_blocks > avail_blocks:            #More blocks are needed. This is usually the case.
  119.             for x in range(file_num + 1, num_of_files):     #Move all files after this one down by # of extra blocks needed
  120.                 archive_index[x][0] += 16 * (needed_blocks - avail_blocks)
  121.  
  122.     with open(file_num_str + '.temp','wb') as f:    #Write compressed data to file (it's needed later)
  123.         f.write(filedata)
  124.  
  125. #Load names file
  126. names = {}
  127. with codecs.open('names.tsv','rb','utf-8') as f:
  128.     for line in f:
  129.         line = line.rstrip('\r\n').split('\t')
  130.         if line[0] in namelist.values():
  131.             names[line[0]] = line[0]
  132.         else:
  133.             names[line[0]] = line[1].translate(translate_table)
  134.  
  135. #Load archive index
  136. with open('shared.orig', 'rb') as f:
  137.     f.seek(BASE_ADDR + 4)
  138.     num_of_files = struct.unpack('<I', f.read(4))[0]
  139.     archive_index = []
  140.     for x in range(num_of_files):
  141.         archive_index.append(list(struct.unpack('<III', f.read(12))))   #Offset, decompressed size, compressed size
  142.     #The program needs data from shared.orig.
  143.     #It uses archive_index_orig to access that data.
  144.     #You need deepcopy! Nothing else worked!
  145. archive_index_orig = copy.deepcopy(archive_index)
  146.  
  147. scripts_to_insert_3 = [3, 4, 5, 6, 7, 8]
  148. scripts_to_insert_4 = []
  149. scripts_to_insert_5 = [0, 1, 2]
  150. scripts_to_insert = [[x] + [3] for x in scripts_to_insert_3] + \
  151.                     [[x] + [4] for x in scripts_to_insert_4] + \
  152.                     [[x] + [5] for x in scripts_to_insert_5]
  153. for file_num, col_to_insert in sorted(scripts_to_insert, key=lambda x: x[0]):
  154.     insert_script(file_num, col_to_insert)
  155.  
  156. with open('shared.orig','rb') as f:
  157.     with open('shared.bin','wb') as g:
  158.         g.write(f.read(BASE_ADDR))                  #Everything up to where shared.bin should go.
  159.         g.write(b'Wpbb')                            #0x0 - File identifier
  160.         g.write(struct.pack('<I', num_of_files))    #0x4 - Number of files
  161.         for i in archive_index:                     #Write archive index
  162.             g.write(struct.pack('<III', *i))
  163.         for i in range(num_of_files):               #Write each sub-file in archive
  164.             if g.tell() - BASE_ADDR > archive_index[i][0]:  
  165.                 print('error')
  166.                 print(str(i), hex(g.tell() - BASE_ADDR), hex(archive_index[i][0]))
  167.                 quit()
  168.             while g.tell() - BASE_ADDR < archive_index[i][0]:   #Write zeros to get to the correct start position
  169.                 g.write(b'\x00')
  170.             if i in [x[0] for x in scripts_to_insert]:          #If it's a new custom script file...
  171.                 with open('{:0>3}'.format(i) + '.temp','rb') as h:  #Write the previously saved, compressed script
  172.                     g.write(h.read())
  173.             else:                                               #Not a new custom script file
  174.                 f.seek(BASE_ADDR + archive_index_orig[i][0])    #Write original data from shared.bin
  175.                 g.write(f.read(archive_index[i][2]))
  176.         #Finished writing archive file
  177.         g.write(b'\x00' * (0x243000 - g.tell()))    #Write zeros until where the next file in shared.bin should go
  178.         f.seek(0x243000)                            #Jump shared.bin to correct position        
  179.         g.write(f.read())                           #Write the rest of shared.bin, unchanged
RAW Paste Data
We use cookies for various purposes including analytics. By continuing to use Pastebin, you agree to our use of cookies as described in the Cookies Policy. OK, I Understand
 
Top