Advertisement
misdocumeno

Untitled

Oct 5th, 2020
820
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 9.08 KB | None | 0 0
  1. import os
  2. import re
  3. import json
  4. import shutil
  5. import subprocess
  6. from pathlib import Path
  7.  
  8. # some colors
  9. class colors:
  10.     default = "\u001b[0m"
  11.     red     = "\u001b[31m"
  12.     yellow  = "\u001b[33m"
  13.     liblue  = "\u001b[94m"
  14.     green   = "\u001b[32m"
  15.     gray    = "\u001b[90m"
  16.  
  17. # ========== preparation ========== #
  18.  
  19. # get directories
  20. input_dir = os.getcwd() + "\\scripting"
  21. output_dir = os.getcwd() + "\\compiled"
  22. include_dir = os.getcwd() + "\\include"
  23.  
  24. # duplicate scripting folder tree structure, for compiled smx files
  25. def ig_f(dir, files):
  26.     return [f for f in files if os.path.isfile(os.path.join(dir, f))]
  27.  
  28. shutil.copytree(input_dir, output_dir, ignore=ig_f, dirs_exist_ok=True)
  29.  
  30. # ========== checking stuff ========== #
  31.  
  32. # check if the json file exists
  33. json_exists = os.path.isfile(os.getcwd() + "\\" + "compile.json")
  34.  
  35. # this will be the new json file
  36. json_dict = {"srcs": {}, "inc": {}}
  37.  
  38. # get all existing sp files
  39. sp_list = Path(input_dir).glob('**/*.sp')
  40. sp_list = list(map(str, sp_list)) # convert WindowsPath type to string type
  41.  
  42. # get all includes of a file
  43. def get_includes(src_file):
  44.     need_parsing = [src_file, include_dir + "\\sourcemod.inc"] # everything here will be parsed for includes
  45.     already_parsed = [] # we don't want to parse a file more than once
  46.     all_includes = [] # all includes found
  47.  
  48.     while len(need_parsing): # as long as there is some include without having been parsed...
  49.  
  50.         add_to_parsing = [] # files that we will have to parse, but we can't add them to the list during it's own iteration
  51.  
  52.         for file in need_parsing:
  53.             # only parse existing files, and only once
  54.             if os.path.isfile(file) and not file in already_parsed:
  55.                 file_as_str = "" # we will temporarily store the entire file as a string here
  56.                 with open(file, "r") as opened_file:
  57.                     for line in opened_file:
  58.                         file_as_str += line
  59.  
  60.                 # strip comments
  61.                 file_as_str = re.sub(r"/\*.*?\*/", "", file_as_str, flags=re.DOTALL)
  62.                 file_as_str = re.sub(r"(/[^\n]*)\n", "", file_as_str)
  63.                        
  64.                 # get includes
  65.                 included_files = re.findall(r'#include ?[<"]([^>"]+)[>"]', file_as_str)
  66.  
  67.                 if included_files != None:
  68.                     # format include string
  69.                     for i, inc in enumerate(included_files):
  70.                         included_files[i] = include_dir + "\\" + included_files[i]
  71.                         # add inc extension if necessary
  72.                         if not inc.endswith(".inc") and not inc.endswith(".sp"):
  73.                             included_files[i] += ".inc"
  74.  
  75.                     # we will have to parse these files too
  76.                     add_to_parsing.extend(included_files)
  77.  
  78.                     # add them to all includes found
  79.                     all_includes.extend(included_files)
  80.  
  81.             # we don't want to parse this file again
  82.             already_parsed.append(file)
  83.        
  84.         # end of iteration, now we can modify need_parsing
  85.         need_parsing.extend(add_to_parsing)
  86.         need_parsing = [x for x in need_parsing if x not in already_parsed] # remove already parsed files
  87.  
  88.     return list(set(all_includes)) # return the list without duplicates
  89.  
  90. # check if any include was updated
  91. def includes_updated(file):
  92.     includes = get_includes(file)
  93.     for inc in includes:
  94.         inc_time = os.path.getmtime(inc)
  95.         if inc_time > json_dict["inc"][inc]:
  96.             return True
  97.     return False
  98.        
  99. # check which files should be compiled
  100. if json_exists:
  101.     # load current json file
  102.     with open("compile.json") as json_file:
  103.         json_dict = json.load(json_file)
  104.  
  105.     # iterate over every sp file on scripting folder
  106.     for file in sp_list:
  107.        
  108.         smx_file = (file.replace(input_dir, output_dir)).replace(".sp", ".smx")
  109.         time = os.path.getmtime(file)
  110.  
  111.         # if the smx file doesn't exists, we have to compile it anyway
  112.         if not os.path.isfile(smx_file):
  113.             json_dict["srcs"][file] = [False, time]
  114.  
  115.         # if the smx file exists, then...
  116.         # we have to check if the sp file was updated since the last time this script was executed
  117.         elif time != json_dict["srcs"][file][1]:
  118.             json_dict["srcs"][file] = [False, time]
  119.  
  120.         # if the update time is the same, then we just have to check if it was successfully compiled the last time
  121.         elif not json_dict["srcs"][file][0]:
  122.             json_dict["srcs"][file] = [False, time]
  123.        
  124.         # otherwise...
  125.         else:
  126.             # lets check if any include file was updated
  127.             if includes_updated(file):
  128.                 json_dict["srcs"][file] = [False, time]
  129.             # if not, then don't compile this
  130.             else:
  131.                 json_dict["srcs"][file] = [True, time]
  132.  
  133.     # remove files that don't exist from the dictionary, to save disk space
  134.     delete = []
  135.  
  136.     for key in json_dict["srcs"]:
  137.         if not key in sp_list:
  138.             delete.append(key)
  139.  
  140.     for key in delete:
  141.         del(json_dict["srcs"][key])
  142.  
  143. # if there is no json file, we should compile everything
  144. else:
  145.     # store update time
  146.     for file in sp_list:
  147.         json_dict["srcs"][file] = [False, os.path.getmtime(file)]
  148.         # store includes update time
  149.         includes = get_includes(file)
  150.         for inc in includes:
  151.             json_dict["inc"][inc] = os.path.getmtime(inc)
  152.  
  153.  
  154. # ========== compiling ========== #
  155.  
  156. sp_amount = 0
  157. compiled_amount = 0
  158. errors_amount = 0
  159. warnings_amount = 0
  160. last_printed_line = False
  161.  
  162. # compile files that need to be compiled
  163. for file in json_dict["srcs"]:
  164.     if not json_dict["srcs"][file][0]:
  165.        
  166.         sp_amount += 1
  167.         errors = False
  168.         warnings = False
  169.        
  170.         smx_file = (file.replace(input_dir, output_dir)).replace(".sp", ".smx")
  171.  
  172.         # run the compiler
  173.         compiler = subprocess.Popen(["spcomp.exe", file, "-o", smx_file, "-v=0"], stdout=subprocess.PIPE)
  174.  
  175.         # check for errors and warnings
  176.         logs_list = []
  177.         for line in compiler.stdout:
  178.             line = line.decode("utf-8")
  179.            
  180.             if len(re.findall(" : (fatal )?error [0-9]+:", line)):
  181.                 logs_list.append(line)
  182.                 errors = True
  183.             elif len(re.findall(" : warning [0-9]+:", line)):
  184.                 logs_list.append(line)
  185.                 warnings = True
  186.        
  187.         # print errors and warnings
  188.  
  189.         source_file = (file.replace(input_dir, "")).replace("\\", "", 1) + f": "
  190.  
  191.         if not errors and not warnings:
  192.             print(" " + source_file + f"{colors.green}OK{colors.default}")
  193.         elif errors:
  194.             if not last_printed_line: print(f"{colors.gray}", "─" * 64 + f"{colors.default}")
  195.             print(" " + source_file + f"{colors.red}ERROR{colors.default}:")
  196.         elif warnings:
  197.             if not last_printed_line: print(f"{colors.gray}", "─" * 64 + f"{colors.default}")
  198.             print(" " + source_file + f"{colors.yellow}WARNING{colors.default}:")
  199.  
  200.         if errors or warnings:
  201.             for line in logs_list:
  202.                 print(" " + line.replace(file, "line "), end="")
  203.  
  204.             print(f"{colors.gray}", "─" * 64 + f"{colors.default}")
  205.             last_printed_line = True
  206.         else:
  207.             last_printed_line = False
  208.  
  209.         time = os.path.getmtime(file)
  210.  
  211.         # check return code
  212.         if compiler.wait() == 0: # compiled successfully
  213.             json_dict["srcs"][file] = [True, time]
  214.             compiled_amount += 1
  215.  
  216.         # check includes update time
  217.         includes = get_includes(file)
  218.         for inc in includes:
  219.             json_dict["inc"][inc] = os.path.getmtime(inc)
  220.  
  221.         if errors:
  222.             errors_amount += 1
  223.         elif warnings:
  224.             warnings_amount += 1
  225.  
  226. # store everything in the json file
  227. with open("compile.json", 'w') as json_file:
  228.     json.dump(json_dict, json_file, indent=4)
  229.  
  230. # print results
  231. if sp_amount == 0:
  232.     print(f"{colors.gray}Nothing to compile.{colors.default}")
  233. elif sp_amount == compiled_amount:
  234.     print(f"\n\n {colors.green}All (" + str(sp_amount) + f") plugins have been successfully compiled.{colors.default}", end="")
  235. elif compiled_amount > 0:
  236.     print(f"{colors.liblue}\n\n " + str(compiled_amount) + " of " + str(sp_amount) + f" plugins have been successfully compiled.{colors.default}", end="")
  237. else:
  238.     print(f"\n\n {colors.red}No plugin has been compiled.{colors.default}", end="")
  239.  
  240. if warnings_amount and errors_amount:
  241.     print(f" {colors.gray}({warnings_amount} plugin" + ("s" if warnings_amount > 1 else "") + f" with warnings and {errors_amount} with errors){colors.default}")
  242. else:
  243.     if warnings_amount:
  244.         print(f" {colors.gray}({warnings_amount} plugin" + ("s" if warnings_amount > 1 else "") + f" compiled with warnings){colors.default}")
  245.     if errors_amount:
  246.         print(f" {colors.gray}({errors_amount} plugin" + ("s" if errors_amount > 1 else "") + f" with errors){colors.default}")
  247.    
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement