Advertisement
misdocumeno

Untitled

Oct 22nd, 2020
100
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 12.63 KB | None | 0 0
  1. # https://github.com/misdocumeno
  2.  
  3. import os
  4. import re
  5. import json
  6. import pathlib
  7. import subprocess
  8. from colorama import init
  9.  
  10. # some "macros"
  11. UPD_TIME = 0
  12. COMPILED = 1
  13.  
  14. # ANSI colors
  15. class colors:
  16.     default = "\u001b[0m"
  17.     red     = "\u001b[31m"
  18.     yellow  = "\u001b[33m"
  19.     liblue  = "\u001b[94m"
  20.     cyan    = "\u001b[36m"
  21.     green   = "\u001b[32m"
  22.     gray    = "\u001b[90m"
  23.  
  24. # ========== preparation ========== #
  25.  
  26. # make colors work on cmd
  27. init()
  28.  
  29. # get source code files
  30. all_sp_files = list(map(str, pathlib.Path(os.path.dirname(__file__)).glob("**/*.sp")))
  31. all_inc_files = list(map(str, pathlib.Path(os.path.dirname(__file__)).glob("**/*.inc")))
  32.  
  33. # get included folders
  34. include_folders = []
  35. for path, sub_dirs, _ in os.walk(os.path.dirname(__file__)):
  36.     for folder in sub_dirs:
  37.         if folder == "include":
  38.             include_folders.append(os.path.join(path, folder))
  39.  
  40. # ========== checking stuff ========== #
  41.  
  42. # some buffers
  43. file_was_updated = {}
  44. successfully_compiled = {}
  45. json_dictionary = {"src": {}, "inc": {}}
  46.  
  47. # check which files were updated
  48. if not os.path.isfile(os.path.join(os.path.dirname(__file__), "compile.json")):
  49.     file_was_updated = {file: True for file in all_sp_files + all_inc_files}
  50. else:
  51.     # load the current file
  52.     with open(os.path.join(os.path.dirname(__file__), "compile.json")) as json_file:
  53.         json_dictionary = json.load(json_file)
  54.     # check sp files
  55.     for file in all_sp_files:
  56.         # only check update time if the file exists in the json
  57.         if file in json_dictionary["src"]:
  58.             old_time = json_dictionary["src"][file][UPD_TIME]
  59.             file_was_updated[file] = True if old_time != os.path.getmtime(file) else False
  60.         else:
  61.             file_was_updated[file] = True
  62.     # check inc files
  63.     for file in all_inc_files:
  64.         # only check update time if the file exists in the json
  65.         if file in json_dictionary["inc"]:
  66.             old_time = json_dictionary["inc"][file]
  67.             file_was_updated[file] = True if old_time != os.path.getmtime(file) else False
  68.         else:
  69.             file_was_updated[file] = True
  70.  
  71. # ========== compiling ========== #
  72.  
  73. # get the output path for the smx file
  74. def output_path(sp_file):
  75.     sp_file = sp_file[:-2] + "smx"
  76.     return sp_file.replace("\\scripting\\", "\\compiled\\")
  77.  
  78. # get the folder that contains the file
  79. def containing_folder(sp_file):
  80.     path = pathlib.Path(sp_file)
  81.     return str(path.parent) + "\\" if str(path.parent) != "." else ""
  82.  
  83. # strip the path to the file
  84. def file_name(sp_file):
  85.     path = pathlib.Path(sp_file)
  86.     return str(path.name)
  87.  
  88. # make the logs prettier and colorful
  89. def format_logs(logs, sp_file):
  90.     # check if there are warnings of any file besides the sp
  91.     warnings_in_other_files = False
  92.     for line in logs:
  93.         if line.startswith(os.path.dirname(__file__)) and not line.startswith(sp_file):
  94.             warnings_in_other_files = True
  95.             break
  96.  
  97.     # modify the lines
  98.     for i, line in enumerate(logs):
  99.         # just modify errors and warnings lines
  100.         if "warning" in line or "error" in line:
  101.             # split each part with some regex
  102.             file, linenum, warning, message = "", "", "", ""
  103.             file = (re.findall(r"(.*?)\(", line)[0]).replace(os.path.dirname(__file__) + "\\", "") + ' '
  104.             linenum = re.findall(r".*?\(([0-9]+)\)", line)[0]
  105.             warning = re.findall(r".*?\([0-9]+\) : ((?:warning|error|fatal error) [0-9]+): ", line)[0]
  106.             message = re.findall(r".*? :.*?: (.*)", line)[0]
  107.             # add color
  108.             file = "" if not warnings_in_other_files else containing_folder(file) + file_name(file)
  109.             warning = (colors.red if "error" in warning else colors.yellow) + warning
  110.             message = colors.default + re.sub(r'"(.*?)"', colors.cyan + r'\1' + colors.default, message) + colors.default
  111.             # concatenate all pieces
  112.             final_log = colors.default + file + colors.liblue + "line " + linenum + ':' + ' ' + warning + ': ' + message
  113.             logs[i] = final_log
  114.  
  115. # used to avoid duplicate separators
  116. separator_printed = False
  117.  
  118. # compile a sp file
  119. def compile_plugin(sp_file, smx_out):
  120.     global separator_printed
  121.     # this will be the smx location
  122.     output_file_path = containing_folder(smx_out)
  123.     # create the destination folder, if it doesn't exists
  124.     if not os.path.isdir(output_file_path):
  125.         os.makedirs(output_file_path, exist_ok=True)
  126.     # file name, relative to current folder
  127.     sp_file_relative = sp_file.replace(os.path.dirname(__file__) + "\\", "")
  128.  
  129.     # compiler process
  130.     compiler_launch_params = ["spcomp.exe", sp_file, "-o", smx_out, "-v=0"]
  131.     # add all include folders
  132.     for include in include_folders:
  133.         compiler_launch_params += ["-i", include]
  134.     run_compiler = subprocess.Popen(compiler_launch_params, stdout=subprocess.PIPE)
  135.     # all the compiler output will be stored here
  136.     compiler_stdout = []
  137.     # keep track of whats happening
  138.     errors, warnings, compiled = False, False, False
  139.     # analyze lines, check for errors or warnings
  140.     for line in run_compiler.stdout:
  141.         line = (line.decode("utf-8")).rstrip()
  142.         if "Code size:" not in line and "Data size:" not in line and "Stack/heap size:" not in line and "Total requirements:" not in line:
  143.             if not re.findall(r"[0-9]+ Warnings?\.", line) and not re.findall(r"[0-9]+ Errors?\.", line) and re.findall(r"\w+", line):
  144.                 compiler_stdout.append(line)
  145.         # this will be for a final message, after compiling everything
  146.         if not errors and re.findall(" : (fatal )?error [0-9]+:", line):
  147.             errors = True
  148.         elif not warnings and re.findall(" : warning [0-9]+:", line):
  149.             warnings = True
  150.  
  151.     # check return code
  152.     if run_compiler.wait() == 0:
  153.         compiled = True
  154.     else:
  155.         compiled = False
  156.  
  157.     # make it more beautiful
  158.     format_logs(compiler_stdout, sp_file)
  159.  
  160.     # print results
  161.     if errors or warnings:
  162.         if not separator_printed:
  163.             print(colors.gray + ("─" * 64) + colors.default)
  164.         # compilation result
  165.         print(sp_file_relative + ":", f"{colors.red}ERROR" if errors else f"{colors.yellow}WARNING", colors.default)
  166.         # compiler logs
  167.         for line in compiler_stdout:
  168.             print(line)
  169.         print(colors.gray + ("─" * 64) + colors.default)
  170.         separator_printed = True
  171.     else:
  172.         print(sp_file_relative + ":", f"{colors.green}OK", f"{colors.default}")
  173.         separator_printed = False
  174.  
  175.     return errors, warnings, compiled # return if this compilation had errors, warnings, and if it was compiled or not (all booleans)
  176.  
  177. # get includes from just this file (not recursively)
  178. def get_includes(file):
  179.     # store the file as a string
  180.     file_as_str = ""
  181.     with open(file, "r") as opened_file:
  182.         for line in opened_file:
  183.             file_as_str += line
  184.  
  185.     # remove comments
  186.     file_as_str = re.sub(r"/\*.*?\*/", "", file_as_str, flags=re.DOTALL)
  187.     file_as_str = re.sub(r"(//[^\n]*)\n", "", file_as_str)
  188.  
  189.     # get includes
  190.     file_includes = re.findall(r'#include ?([<"][^>"]+[>"])', file_as_str)
  191.  
  192.     if file_includes is not None:
  193.         file_includes = [string.replace('"', '') for string in file_includes]
  194.         file_includes = [string.replace('<', '') for string in file_includes]
  195.         file_includes = [string.replace('>', '.inc') for string in file_includes]
  196.  
  197.     return file_includes
  198.  
  199. # check if any include was updated (recursively)
  200. def has_includes_updated(sp_file):
  201.     # first, lets get all included files, recursively
  202.     files_to_parse = ["sourcemod.inc"] # we will search files included on these files
  203.  
  204.     sp_file_includes = get_includes(sp_file) # get included files on the main sp file
  205.  
  206.     if sp_file_includes is not None:
  207.         files_to_parse += sp_file_includes
  208.  
  209.     while files_to_parse:
  210.  
  211.         already_parsed = []
  212.         includes_found = []
  213.  
  214.         for inc_file in files_to_parse:
  215.  
  216.             for inc_folder in include_folders: # search on all include folders, in order
  217.  
  218.                 inc_file_path = os.path.join(inc_folder, inc_file)
  219.  
  220.                 if os.path.isfile(inc_file_path):
  221.                     # check if the file was updated
  222.                     if file_was_updated[inc_file_path]:
  223.                         return True
  224.                     # if not, keep searching for includes
  225.                     includes_found = get_includes(inc_file_path) # get includes on this file...
  226.                     if includes_found is not None:
  227.                         sp_file_includes += includes_found # ...and add them into the included files found
  228.                     break # we don't need to keep searching in the next folders, since spcomp gives priority to the folders...
  229.                           # ...in the order that they were added in the parameters
  230.  
  231.             already_parsed.append(inc_file) # we don't want to parse it again
  232.  
  233.         files_to_parse = [x for x in files_to_parse if x not in already_parsed]
  234.         if includes_found is not None:
  235.             files_to_parse += includes_found
  236.  
  237.     return False
  238.  
  239. # define whether it has to be compiled or not
  240. def should_be_compiled(file):
  241.     # check if the file was updated
  242.     if file_was_updated[file]:
  243.         return True
  244.     # check if the smx file exists
  245.     if not os.path.isfile(output_path(file)):
  246.         return True
  247.     # check if the file was compiled the last time
  248.     if not json_dictionary["src"][file][COMPILED]:
  249.         return True
  250.     if has_includes_updated(file):
  251.         return True
  252.  
  253.     return False
  254.  
  255. # entry point
  256. def start_compiler():
  257.     # print compiler header (AM copyright and stuff)
  258.     compiler_head = subprocess.Popen(["spcomp.exe"], stdout=subprocess.PIPE)
  259.     for line in compiler_head.stdout:
  260.         line = (line.decode("utf-8")).rstrip()
  261.         if line.startswith("S") or line.startswith("C"):
  262.             print(colors.liblue + line + colors.default)
  263.  
  264.     print("")
  265.  
  266.     # keep track of compilations
  267.     all_plugins, all_errors, all_warnings, all_compiled = 0, 0, 0, 0
  268.  
  269.     for sp_file in all_sp_files:
  270.         if should_be_compiled(sp_file):
  271.             errors, warnings, compiled = compile_plugin(sp_file, output_path(sp_file))
  272.             if compiled: all_compiled += 1
  273.             if errors: all_errors += 1
  274.             elif warnings: all_warnings += 1
  275.             all_plugins += 1
  276.             successfully_compiled[sp_file] = compiled
  277.  
  278.     print("\n")
  279.  
  280.     # print a summary of all compilations
  281.     if not all_plugins:
  282.         print(f"{colors.gray}Nothing to compile.{colors.default}")
  283.     else:
  284.         if not all_compiled:
  285.             print(f"{colors.red}No plugin has been compiled.{colors.default} ", end="")
  286.         else:
  287.             if all_plugins == all_compiled:
  288.                 if not all_warnings:
  289.                     print(f"{colors.green}Everything was successfully compiled.{colors.default}")
  290.                 else:
  291.                     print(f"{colors.green}Everything was compiled.{colors.default} ", end="")
  292.             else:
  293.                 print(f"{colors.yellow}{all_compiled}/{all_plugins} plugin" + ("s" if all_plugins > 1 else "") + f" compiled.{colors.default} ", end="")
  294.  
  295.         if all_warnings and all_errors:
  296.             print(f"{colors.gray}({colors.yellow}{all_warnings} warning" + ("s" if all_warnings > 1 else "") + f" {colors.gray}and {colors.red}{all_errors} error" + ("s" if all_errors > 1 else "") + f"{colors.gray}){colors.default}",)
  297.         else:
  298.             if all_warnings:
  299.                 print(f"{colors.gray}({colors.yellow}{all_warnings} warning" + ("s" if all_warnings > 1 else "") + f"{colors.gray}){colors.default}")
  300.             elif all_errors:
  301.                 print(f"{colors.gray}({colors.red}{all_errors} error" + ("s" if all_errors > 1 else "") + f"{colors.gray}){colors.default}")
  302.  
  303.     print("")
  304.  
  305. start_compiler()
  306.  
  307. # ========== preparing for the next compilation ========== #
  308.  
  309. # update json file
  310. new_json_dictionary = {"src": {}, "inc": {}}
  311.  
  312. for sp_file in all_sp_files:
  313.     new_json_dictionary["src"][sp_file] = [0.0, False]
  314.     new_json_dictionary["src"][sp_file][UPD_TIME] = os.path.getmtime(sp_file)
  315.     if sp_file in successfully_compiled:
  316.         new_json_dictionary["src"][sp_file][COMPILED] = successfully_compiled[sp_file]
  317.     else:
  318.         new_json_dictionary["src"][sp_file][COMPILED] = json_dictionary["src"][sp_file][COMPILED]
  319. for inc_file in all_inc_files:
  320.     new_json_dictionary["inc"][inc_file] = os.path.getmtime(inc_file)
  321.  
  322. # write to file
  323. with open("compile.json", 'w') as json_file:
  324.     json.dump(new_json_dictionary, json_file, indent=4)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement