misdocumeno

Untitled

Oct 17th, 2020
851
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. import os
  2. import re
  3. import json
  4. import pathlib
  5. import subprocess
  6. from colorama import init
  7.  
  8. # some "macros"
  9. UPD_TIME = 0
  10. COMPILED = 1
  11.  
  12. # ANSI colors
  13. class colors:
  14.     default = "\u001b[0m"
  15.     red     = "\u001b[31m"
  16.     yellow  = "\u001b[33m"
  17.     liblue  = "\u001b[94m"
  18.     cyan    = "\u001b[36m"
  19.     green   = "\u001b[32m"
  20.     gray    = "\u001b[90m"
  21.  
  22. # ========== preparation ========== #
  23.  
  24. # make colors work on cmd
  25. init()
  26.  
  27. # get source code files
  28. all_sp_files = list(map(str, pathlib.Path(os.getcwd()).glob("**/*.sp")))
  29. all_inc_files = list(map(str, pathlib.Path(os.getcwd()).glob("**/*.inc")))
  30.  
  31. # get included folders
  32. include_folders = []
  33. for path, sub_dirs, _ in os.walk(os.getcwd()):
  34.     for folder in sub_dirs:
  35.         if folder == "include":
  36.             include_folders.append(os.path.join(path, folder))
  37.  
  38. # ========== checking stuff ========== #
  39.  
  40. # some buffers
  41. file_was_updated = {}
  42. successfully_compiled = {}
  43. json_dictionary = {"src": {}, "inc": {}}
  44.  
  45. # check which files were updated
  46. if not os.path.isfile(os.path.join(os.getcwd(), "compile.json")):
  47.     file_was_updated = {file: True for file in all_sp_files + all_inc_files}
  48. else:
  49.     # load the current file
  50.     with open("compile.json") as json_file:
  51.         json_dictionary = json.load(json_file)
  52.     # check sp files
  53.     for file in all_sp_files:
  54.         # only check update time if the file exists in the json
  55.         if file in json_dictionary["src"]:
  56.             old_time = json_dictionary["src"][file][UPD_TIME]
  57.             file_was_updated[file] = True if old_time != os.path.getmtime(file) else False
  58.         else:
  59.             file_was_updated[file] = True
  60.     # check inc files
  61.     for file in all_inc_files:
  62.         # only check update time if the file exists in the json
  63.         if file in json_dictionary["inc"]:
  64.             old_time = json_dictionary["inc"][file]
  65.             file_was_updated[file] = True if old_time != os.path.getmtime(file) else False
  66.         else:
  67.             file_was_updated[file] = True
  68.  
  69. # ========== compiling ========== #
  70.  
  71. # get the output path for the smx file
  72. def output_path(sp_file):
  73.     sp_file = sp_file[:-2] + "smx"
  74.     return sp_file.replace("\\scripting\\", "\\compiled\\")
  75.  
  76. # get the folder that contains the file
  77. def containing_folder(sp_file):
  78.     path = pathlib.Path(sp_file)
  79.     return str(path.parent) + "\\" if str(path.parent) != "." else ""
  80.  
  81. # strip the path to the file
  82. def file_name(sp_file):
  83.     path = pathlib.Path(sp_file)
  84.     return str(path.name)
  85.  
  86. # make the logs prettier and colorful
  87. def format_logs(logs, sp_file):
  88.     # check if there are warnings of any file besides the sp
  89.     warnings_in_other_files = False
  90.     for line in logs:
  91.         if line.startswith(os.getcwd()) and not line.startswith(sp_file):
  92.             warnings_in_other_files = True
  93.             break
  94.  
  95.     # modify the lines
  96.     for i, line in enumerate(logs):
  97.         # just modify errors and warnings lines
  98.         if "warning" in line or "error" in line:
  99.             # split each part with some regex
  100.             file, linenum, warning, message = "", "", "", ""
  101.             file = (re.findall(r"(.*?)\(", line)[0]).replace(os.getcwd() + "\\", "") + ' '
  102.             linenum = re.findall(r".*?\(([0-9]+)\)", line)[0]
  103.             warning = re.findall(r".*?\([0-9]+\) : ((?:warning|error|fatal error) [0-9]+): ", line)[0]
  104.             message = re.findall(r".*? :.*?: (.*)", line)[0]
  105.             # add color
  106.             file = "" if not warnings_in_other_files else containing_folder(file) + file_name(file)
  107.             warning = (colors.red if "error" in warning else colors.yellow) + warning
  108.             message = colors.default + re.sub(r'"(.*?)"', colors.cyan + r'\1' + colors.default, message) + colors.default
  109.             # concatenate all pieces
  110.             final_log = colors.default + file + colors.liblue + "line " + linenum + ':' + ' ' + warning + ': ' + message
  111.             logs[i] = final_log
  112.  
  113. # used to avoid duplicate separators
  114. separator_printed = False
  115.  
  116. # compile a sp file
  117. def compile_plugin(sp_file, smx_out):
  118.     global separator_printed
  119.     # this will be the smx location
  120.     output_file_path = containing_folder(smx_out)
  121.     # create the destination folder, if it doesn't exists
  122.     if not os.path.isdir(output_file_path):
  123.         os.mkdir(output_file_path)
  124.     # file name, relative to current folder
  125.     sp_file_relative = sp_file.replace(os.getcwd() + "\\", "")
  126.  
  127.     # compiler process
  128.     compiler_launch_params = ["spcomp.exe", sp_file, "-o", smx_out, "-v=0"]
  129.     # add all include folders
  130.     for include in include_folders:
  131.         compiler_launch_params += ["-i", include]
  132.     run_compiler = subprocess.Popen(compiler_launch_params, stdout=subprocess.PIPE)
  133.     # all the compiler output will be stored here
  134.     compiler_stdout = []
  135.     # keep track of whats happening
  136.     errors, warnings, compiled = False, False, False
  137.     # analyze lines, check for errors or warnings
  138.     for line in run_compiler.stdout:
  139.         line = (line.decode("utf-8")).rstrip()
  140.         if "Code size:" not in line and "Data size:" not in line and "Stack/heap size:" not in line and "Total requirements:" not in line:
  141.             if not re.findall(r"[0-9]+ Warnings?\.", line) and not re.findall(r"[0-9]+ Errors?\.", line) and re.findall(r"\w+", line):
  142.                 compiler_stdout.append(line)
  143.         # this will be for a final message, after compiling everything
  144.         if not errors and re.findall(" : (fatal )?error [0-9]+:", line):
  145.             errors = True
  146.         elif not warnings and re.findall(" : warning [0-9]+:", line):
  147.             warnings = True
  148.  
  149.     # check return code
  150.     if run_compiler.wait() == 0:
  151.         compiled = True
  152.     else:
  153.         compiled = False
  154.  
  155.     # make it more beautiful
  156.     format_logs(compiler_stdout, sp_file)
  157.  
  158.     # print results
  159.     if errors or warnings:
  160.         if not separator_printed:
  161.             print(colors.gray + ("─" * 64) + colors.default)
  162.         # compilation result
  163.         print(sp_file_relative + ":", f"{colors.red}ERROR" if errors else f"{colors.yellow}WARNING", colors.default)
  164.         # compiler logs
  165.         for line in compiler_stdout:
  166.             print(line)
  167.         print(colors.gray + ("─" * 64) + colors.default)
  168.         separator_printed = True
  169.     else:
  170.         print(sp_file_relative + ":", f"{colors.green}OK", f"{colors.default}")
  171.         separator_printed = False
  172.  
  173.     return errors, warnings, compiled # return if this compilation had errors, warnings, and if it was compiled or not (all booleans)
  174.  
  175. # get includes from just this file (not recursively)
  176. def get_includes(file):
  177.     # store the file as a string
  178.     file_as_str = ""
  179.     with open(file, "r") as opened_file:
  180.         for line in opened_file:
  181.             file_as_str += line
  182.  
  183.     # remove comments
  184.     file_as_str = re.sub(r"/\*.*?\*/", "", file_as_str, flags=re.DOTALL)
  185.     file_as_str = re.sub(r"(//[^\n]*)\n", "", file_as_str)
  186.  
  187.     # get includes
  188.     file_includes = re.findall(r'#include ?([<"][^>"]+[>"])', file_as_str)
  189.  
  190.     if file_includes is not None:
  191.         file_includes = [string.replace('"', '') for string in file_includes]
  192.         file_includes = [string.replace('<', '') for string in file_includes]
  193.         file_includes = [string.replace('>', '.inc') for string in file_includes]
  194.  
  195.     return file_includes
  196.  
  197. # check if any include was updated (recursively)
  198. def has_includes_updated(sp_file):
  199.     # first, lets get all included files, recursively
  200.     files_to_parse = ["sourcemod.inc"] # we will search files included on these files
  201.  
  202.     sp_file_includes = get_includes(sp_file) # get included files on the main sp file
  203.  
  204.     if sp_file_includes is not None:
  205.         files_to_parse += sp_file_includes
  206.  
  207.     while files_to_parse:
  208.  
  209.         already_parsed = []
  210.         includes_found = []
  211.  
  212.         for inc_file in files_to_parse:
  213.  
  214.             for inc_folder in include_folders: # search on all include folders, in order
  215.  
  216.                 inc_file_path = os.path.join(inc_folder, inc_file)
  217.  
  218.                 if os.path.isfile(inc_file_path):
  219.                     # check if the file was updated
  220.                     if file_was_updated[inc_file_path]:
  221.                         return True
  222.                     # if not, keep searching for includes
  223.                     includes_found = get_includes(inc_file_path) # get includes on this file...
  224.                     if includes_found is not None:
  225.                         sp_file_includes += includes_found # ...and add them into the included files found
  226.                     break # we don't need to keep searching in the next folders, since spcomp gives priority to the folders...
  227.                           # ...in the order that they were added in the parameters
  228.  
  229.             already_parsed.append(inc_file) # we don't want to parse it again
  230.  
  231.         files_to_parse = [x for x in files_to_parse if x not in already_parsed]
  232.         if includes_found is not None:
  233.             files_to_parse += includes_found
  234.  
  235.     return False
  236.  
  237. # define whether it has to be compiled or not
  238. def should_be_compiled(file):
  239.     # check if the file was updated
  240.     if file_was_updated[file]:
  241.         return True
  242.     # check if the smx file exists
  243.     if not os.path.isfile(output_path(file)):
  244.         return True
  245.     # check if the file was compiled the last time
  246.     if not json_dictionary["src"][file][COMPILED]:
  247.         return True
  248.     if has_includes_updated(file):
  249.         return True
  250.  
  251.     return False
  252.  
  253. # entry point
  254. def start_compiler():
  255.     # print compiler header (AM copyright and stuff)
  256.     compiler_head = subprocess.Popen(["spcomp.exe"], stdout=subprocess.PIPE)
  257.     for line in compiler_head.stdout:
  258.         line = (line.decode("utf-8")).rstrip()
  259.         if line.startswith("S") or line.startswith("C"):
  260.             print(colors.liblue + line + colors.default)
  261.  
  262.     print("")
  263.  
  264.     # keep track of compilations
  265.     all_plugins, all_errors, all_warnings, all_compiled = 0, 0, 0, 0
  266.  
  267.     for sp_file in all_sp_files:
  268.         if should_be_compiled(sp_file):
  269.             errors, warnings, compiled = compile_plugin(sp_file, output_path(sp_file))
  270.             if compiled: all_compiled += 1
  271.             if errors: all_errors += 1
  272.             elif warnings: all_warnings += 1
  273.             all_plugins += 1
  274.             successfully_compiled[sp_file] = compiled
  275.  
  276.     print("\n")
  277.  
  278.     # print a summary of all compilations
  279.     if not all_plugins:
  280.         print(f"{colors.gray}Nothing to compile.{colors.default}")
  281.     else:
  282.         if not all_compiled:
  283.             print(f"{colors.red}No plugin has been compiled.{colors.default} ", end="")
  284.         else:
  285.             if all_plugins == all_compiled:
  286.                 if not all_warnings:
  287.                     print(f"{colors.green}Everything was successfully compiled.{colors.default}")
  288.                 else:
  289.                     print(f"{colors.green}Everything was compiled.{colors.default} ", end="")
  290.             else:
  291.                 print(f"{colors.yellow}{all_compiled}/{all_plugins} plugin" + ("s" if all_plugins > 1 else "") + f" compiled.{colors.default} ", end="")
  292.  
  293.         if all_warnings and all_errors:
  294.             print(f"{colors.gray}({colors.yellow}{all_warnings} warning" + ("s" if all_warnings > 1 else "") + f" {colors.gray}and {colors.red}{all_errors} error" + ("s" if all_errors > 1 else "") + f"{colors.gray}){colors.default}",)
  295.         else:
  296.             if all_warnings:
  297.                 print(f"{colors.gray}({colors.yellow}{all_warnings} warning" + ("s" if all_warnings > 1 else "") + f"{colors.gray}){colors.default}")
  298.             elif all_errors:
  299.                 print(f"{colors.gray}({colors.red}{all_errors} error" + ("s" if all_errors > 1 else "") + f"{colors.gray}){colors.default}")
  300.  
  301.     print("")
  302.  
  303. start_compiler()
  304.  
  305. # ========== preparing for the next compilation ========== #
  306.  
  307. # update json file
  308. new_json_dictionary = {"src": {}, "inc": {}}
  309.  
  310. for sp_file in all_sp_files:
  311.     new_json_dictionary["src"][sp_file] = [0.0, False]
  312.     new_json_dictionary["src"][sp_file][UPD_TIME] = os.path.getmtime(sp_file)
  313.     if sp_file in successfully_compiled:
  314.         new_json_dictionary["src"][sp_file][COMPILED] = successfully_compiled[sp_file]
  315.     else:
  316.         new_json_dictionary["src"][sp_file][COMPILED] = json_dictionary["src"][sp_file][COMPILED]
  317. for inc_file in all_inc_files:
  318.     new_json_dictionary["inc"][inc_file] = os.path.getmtime(inc_file)
  319.  
  320. # write to file
  321. with open("compile.json", 'w') as json_file:
  322.     json.dump(new_json_dictionary, json_file, indent=4)
  323.  
  324. # keep the program open
  325. os.system("pause")
RAW Paste Data