Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # https://github.com/misdocumeno
- import os
- import re
- import json
- import pathlib
- import subprocess
- from colorama import init
- # some "macros"
- UPD_TIME = 0
- COMPILED = 1
- # ANSI colors
- class colors:
- default = "\u001b[0m"
- red = "\u001b[31m"
- yellow = "\u001b[33m"
- liblue = "\u001b[94m"
- cyan = "\u001b[36m"
- green = "\u001b[32m"
- gray = "\u001b[90m"
- # ========== preparation ========== #
- # make colors work on cmd
- init()
- # get source code files
- all_sp_files = list(map(str, pathlib.Path(os.path.dirname(__file__)).glob("**/*.sp")))
- all_inc_files = list(map(str, pathlib.Path(os.path.dirname(__file__)).glob("**/*.inc")))
- # get included folders
- include_folders = []
- for path, sub_dirs, _ in os.walk(os.path.dirname(__file__)):
- for folder in sub_dirs:
- if folder == "include":
- include_folders.append(os.path.join(path, folder))
- # ========== checking stuff ========== #
- # some buffers
- file_was_updated = {}
- successfully_compiled = {}
- json_dictionary = {"src": {}, "inc": {}}
- # check which files were updated
- if not os.path.isfile(os.path.join(os.path.dirname(__file__), "compile.json")):
- file_was_updated = {file: True for file in all_sp_files + all_inc_files}
- else:
- # load the current file
- with open(os.path.join(os.path.dirname(__file__), "compile.json")) as json_file:
- json_dictionary = json.load(json_file)
- # check sp files
- for file in all_sp_files:
- # only check update time if the file exists in the json
- if file in json_dictionary["src"]:
- old_time = json_dictionary["src"][file][UPD_TIME]
- file_was_updated[file] = True if old_time != os.path.getmtime(file) else False
- else:
- file_was_updated[file] = True
- # check inc files
- for file in all_inc_files:
- # only check update time if the file exists in the json
- if file in json_dictionary["inc"]:
- old_time = json_dictionary["inc"][file]
- file_was_updated[file] = True if old_time != os.path.getmtime(file) else False
- else:
- file_was_updated[file] = True
- # ========== compiling ========== #
- # get the output path for the smx file
- def output_path(sp_file):
- sp_file = sp_file[:-2] + "smx"
- return sp_file.replace("\\scripting\\", "\\compiled\\")
- # get the folder that contains the file
- def containing_folder(sp_file):
- path = pathlib.Path(sp_file)
- return str(path.parent) + "\\" if str(path.parent) != "." else ""
- # strip the path to the file
- def file_name(sp_file):
- path = pathlib.Path(sp_file)
- return str(path.name)
- # make the logs prettier and colorful
- def format_logs(logs, sp_file):
- # check if there are warnings of any file besides the sp
- warnings_in_other_files = False
- for line in logs:
- if line.startswith(os.path.dirname(__file__)) and not line.startswith(sp_file):
- warnings_in_other_files = True
- break
- # modify the lines
- for i, line in enumerate(logs):
- # just modify errors and warnings lines
- if "warning" in line or "error" in line:
- # split each part with some regex
- file, linenum, warning, message = "", "", "", ""
- file = (re.findall(r"(.*?)\(", line)[0]).replace(os.path.dirname(__file__) + "\\", "") + ' '
- linenum = re.findall(r".*?\(([0-9]+)\)", line)[0]
- warning = re.findall(r".*?\([0-9]+\) : ((?:warning|error|fatal error) [0-9]+): ", line)[0]
- message = re.findall(r".*? :.*?: (.*)", line)[0]
- # add color
- file = "" if not warnings_in_other_files else containing_folder(file) + file_name(file)
- warning = (colors.red if "error" in warning else colors.yellow) + warning
- message = colors.default + re.sub(r'"(.*?)"', colors.cyan + r'\1' + colors.default, message) + colors.default
- # concatenate all pieces
- final_log = colors.default + file + colors.liblue + "line " + linenum + ':' + ' ' + warning + ': ' + message
- logs[i] = final_log
- # used to avoid duplicate separators
- separator_printed = False
- # compile a sp file
- def compile_plugin(sp_file, smx_out):
- global separator_printed
- # this will be the smx location
- output_file_path = containing_folder(smx_out)
- # create the destination folder, if it doesn't exists
- if not os.path.isdir(output_file_path):
- os.makedirs(output_file_path, exist_ok=True)
- # file name, relative to current folder
- sp_file_relative = sp_file.replace(os.path.dirname(__file__) + "\\", "")
- # compiler process
- compiler_launch_params = ["spcomp.exe", sp_file, "-o", smx_out, "-v=0"]
- # add all include folders
- for include in include_folders:
- compiler_launch_params += ["-i", include]
- run_compiler = subprocess.Popen(compiler_launch_params, stdout=subprocess.PIPE)
- # all the compiler output will be stored here
- compiler_stdout = []
- # keep track of whats happening
- errors, warnings, compiled = False, False, False
- # analyze lines, check for errors or warnings
- for line in run_compiler.stdout:
- line = (line.decode("utf-8")).rstrip()
- if "Code size:" not in line and "Data size:" not in line and "Stack/heap size:" not in line and "Total requirements:" not in line:
- if not re.findall(r"[0-9]+ Warnings?\.", line) and not re.findall(r"[0-9]+ Errors?\.", line) and re.findall(r"\w+", line):
- compiler_stdout.append(line)
- # this will be for a final message, after compiling everything
- if not errors and re.findall(" : (fatal )?error [0-9]+:", line):
- errors = True
- elif not warnings and re.findall(" : warning [0-9]+:", line):
- warnings = True
- # check return code
- if run_compiler.wait() == 0:
- compiled = True
- else:
- compiled = False
- # make it more beautiful
- format_logs(compiler_stdout, sp_file)
- # print results
- if errors or warnings:
- if not separator_printed:
- print(colors.gray + ("─" * 64) + colors.default)
- # compilation result
- print(sp_file_relative + ":", f"{colors.red}ERROR" if errors else f"{colors.yellow}WARNING", colors.default)
- # compiler logs
- for line in compiler_stdout:
- print(line)
- print(colors.gray + ("─" * 64) + colors.default)
- separator_printed = True
- else:
- print(sp_file_relative + ":", f"{colors.green}OK", f"{colors.default}")
- separator_printed = False
- return errors, warnings, compiled # return if this compilation had errors, warnings, and if it was compiled or not (all booleans)
- # get includes from just this file (not recursively)
- def get_includes(file):
- # store the file as a string
- file_as_str = ""
- with open(file, "r") as opened_file:
- for line in opened_file:
- file_as_str += line
- # remove comments
- file_as_str = re.sub(r"/\*.*?\*/", "", file_as_str, flags=re.DOTALL)
- file_as_str = re.sub(r"(//[^\n]*)\n", "", file_as_str)
- # get includes
- file_includes = re.findall(r'#include ?([<"][^>"]+[>"])', file_as_str)
- if file_includes is not None:
- file_includes = [string.replace('"', '') for string in file_includes]
- file_includes = [string.replace('<', '') for string in file_includes]
- file_includes = [string.replace('>', '.inc') for string in file_includes]
- return file_includes
- # check if any include was updated (recursively)
- def has_includes_updated(sp_file):
- # first, lets get all included files, recursively
- files_to_parse = ["sourcemod.inc"] # we will search files included on these files
- sp_file_includes = get_includes(sp_file) # get included files on the main sp file
- if sp_file_includes is not None:
- files_to_parse += sp_file_includes
- while files_to_parse:
- already_parsed = []
- includes_found = []
- for inc_file in files_to_parse:
- for inc_folder in include_folders: # search on all include folders, in order
- inc_file_path = os.path.join(inc_folder, inc_file)
- if os.path.isfile(inc_file_path):
- # check if the file was updated
- if file_was_updated[inc_file_path]:
- return True
- # if not, keep searching for includes
- includes_found = get_includes(inc_file_path) # get includes on this file...
- if includes_found is not None:
- sp_file_includes += includes_found # ...and add them into the included files found
- break # we don't need to keep searching in the next folders, since spcomp gives priority to the folders...
- # ...in the order that they were added in the parameters
- already_parsed.append(inc_file) # we don't want to parse it again
- files_to_parse = [x for x in files_to_parse if x not in already_parsed]
- if includes_found is not None:
- files_to_parse += includes_found
- return False
- # define whether it has to be compiled or not
- def should_be_compiled(file):
- # check if the file was updated
- if file_was_updated[file]:
- return True
- # check if the smx file exists
- if not os.path.isfile(output_path(file)):
- return True
- # check if the file was compiled the last time
- if not json_dictionary["src"][file][COMPILED]:
- return True
- if has_includes_updated(file):
- return True
- return False
- # entry point
- def start_compiler():
- # print compiler header (AM copyright and stuff)
- compiler_head = subprocess.Popen(["spcomp.exe"], stdout=subprocess.PIPE)
- for line in compiler_head.stdout:
- line = (line.decode("utf-8")).rstrip()
- if line.startswith("S") or line.startswith("C"):
- print(colors.liblue + line + colors.default)
- print("")
- # keep track of compilations
- all_plugins, all_errors, all_warnings, all_compiled = 0, 0, 0, 0
- for sp_file in all_sp_files:
- if should_be_compiled(sp_file):
- errors, warnings, compiled = compile_plugin(sp_file, output_path(sp_file))
- if compiled: all_compiled += 1
- if errors: all_errors += 1
- elif warnings: all_warnings += 1
- all_plugins += 1
- successfully_compiled[sp_file] = compiled
- print("\n")
- # print a summary of all compilations
- if not all_plugins:
- print(f"{colors.gray}Nothing to compile.{colors.default}")
- else:
- if not all_compiled:
- print(f"{colors.red}No plugin has been compiled.{colors.default} ", end="")
- else:
- if all_plugins == all_compiled:
- if not all_warnings:
- print(f"{colors.green}Everything was successfully compiled.{colors.default}")
- else:
- print(f"{colors.green}Everything was compiled.{colors.default} ", end="")
- else:
- print(f"{colors.yellow}{all_compiled}/{all_plugins} plugin" + ("s" if all_plugins > 1 else "") + f" compiled.{colors.default} ", end="")
- if all_warnings and all_errors:
- print(f"{colors.gray}({colors.yellow}{all_warnings} warning" + ("s" if all_warnings > 1 else "") + f" {colors.gray}and {colors.red}{all_errors} error" + ("s" if all_errors > 1 else "") + f"{colors.gray}){colors.default}",)
- else:
- if all_warnings:
- print(f"{colors.gray}({colors.yellow}{all_warnings} warning" + ("s" if all_warnings > 1 else "") + f"{colors.gray}){colors.default}")
- elif all_errors:
- print(f"{colors.gray}({colors.red}{all_errors} error" + ("s" if all_errors > 1 else "") + f"{colors.gray}){colors.default}")
- print("")
- start_compiler()
- # ========== preparing for the next compilation ========== #
- # update json file
- new_json_dictionary = {"src": {}, "inc": {}}
- for sp_file in all_sp_files:
- new_json_dictionary["src"][sp_file] = [0.0, False]
- new_json_dictionary["src"][sp_file][UPD_TIME] = os.path.getmtime(sp_file)
- if sp_file in successfully_compiled:
- new_json_dictionary["src"][sp_file][COMPILED] = successfully_compiled[sp_file]
- else:
- new_json_dictionary["src"][sp_file][COMPILED] = json_dictionary["src"][sp_file][COMPILED]
- for inc_file in all_inc_files:
- new_json_dictionary["inc"][inc_file] = os.path.getmtime(inc_file)
- # write to file
- with open("compile.json", 'w') as json_file:
- json.dump(new_json_dictionary, json_file, indent=4)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement