Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import os
- import re
- import json
- import shutil
- import subprocess
- from pathlib import Path
- # some colors
- class colors:
- default = "\u001b[0m"
- red = "\u001b[31m"
- yellow = "\u001b[33m"
- liblue = "\u001b[94m"
- green = "\u001b[32m"
- gray = "\u001b[90m"
- # ========== preparation ========== #
- # get directories
- input_dir = os.getcwd() + "\\scripting"
- output_dir = os.getcwd() + "\\compiled"
- include_dir = os.getcwd() + "\\include"
- # duplicate scripting folder tree structure, for compiled smx files
- def ig_f(dir, files):
- return [f for f in files if os.path.isfile(os.path.join(dir, f))]
- shutil.copytree(input_dir, output_dir, ignore=ig_f, dirs_exist_ok=True)
- # ========== checking stuff ========== #
- # check if the json file exists
- json_exists = os.path.isfile(os.getcwd() + "\\" + "compile.json")
- # this will be the new json file
- json_dict = {"srcs": {}, "inc": {}}
- # get all existing sp files
- sp_list = Path(input_dir).glob('**/*.sp')
- sp_list = list(map(str, sp_list)) # convert WindowsPath type to string type
- # get all includes of a file
- def get_includes(src_file):
- need_parsing = [src_file, include_dir + "\\sourcemod.inc"] # everything here will be parsed for includes
- already_parsed = [] # we don't want to parse a file more than once
- all_includes = [] # all includes found
- while len(need_parsing): # as long as there is some include without having been parsed...
- add_to_parsing = [] # files that we will have to parse, but we can't add them to the list during it's own iteration
- for file in need_parsing:
- # only parse existing files, and only once
- if os.path.isfile(file) and not file in already_parsed:
- file_as_str = "" # we will temporarily store the entire file as a string here
- with open(file, "r") as opened_file:
- for line in opened_file:
- file_as_str += line
- # strip comments
- file_as_str = re.sub(r"/\*.*?\*/", "", file_as_str, flags=re.DOTALL)
- file_as_str = re.sub(r"(/[^\n]*)\n", "", file_as_str)
- # get includes
- included_files = re.findall(r'#include ?[<"]([^>"]+)[>"]', file_as_str)
- if included_files != None:
- # format include string
- for i, inc in enumerate(included_files):
- included_files[i] = include_dir + "\\" + included_files[i]
- # add inc extension if necessary
- if not inc.endswith(".inc") and not inc.endswith(".sp"):
- included_files[i] += ".inc"
- # we will have to parse these files too
- add_to_parsing.extend(included_files)
- # add them to all includes found
- all_includes.extend(included_files)
- # we don't want to parse this file again
- already_parsed.append(file)
- # end of iteration, now we can modify need_parsing
- need_parsing.extend(add_to_parsing)
- need_parsing = [x for x in need_parsing if x not in already_parsed] # remove already parsed files
- return list(set(all_includes)) # return the list without duplicates
- # check if any include was updated
- def includes_updated(file):
- includes = get_includes(file)
- for inc in includes:
- inc_time = os.path.getmtime(inc)
- if inc_time > json_dict["inc"][inc]:
- return True
- return False
- # check which files should be compiled
- if json_exists:
- # load current json file
- with open("compile.json") as json_file:
- json_dict = json.load(json_file)
- # iterate over every sp file on scripting folder
- for file in sp_list:
- smx_file = (file.replace(input_dir, output_dir)).replace(".sp", ".smx")
- time = os.path.getmtime(file)
- # if the smx file doesn't exists, we have to compile it anyway
- if not os.path.isfile(smx_file):
- json_dict["srcs"][file] = [False, time]
- # if the smx file exists, then...
- # we have to check if the sp file was updated since the last time this script was executed
- elif time != json_dict["srcs"][file][1]:
- json_dict["srcs"][file] = [False, time]
- # if the update time is the same, then we just have to check if it was successfully compiled the last time
- elif not json_dict["srcs"][file][0]:
- json_dict["srcs"][file] = [False, time]
- # otherwise...
- else:
- # lets check if any include file was updated
- if includes_updated(file):
- json_dict["srcs"][file] = [False, time]
- # if not, then don't compile this
- else:
- json_dict["srcs"][file] = [True, time]
- # remove files that don't exist from the dictionary, to save disk space
- delete = []
- for key in json_dict["srcs"]:
- if not key in sp_list:
- delete.append(key)
- for key in delete:
- del(json_dict["srcs"][key])
- # if there is no json file, we should compile everything
- else:
- # store update time
- for file in sp_list:
- json_dict["srcs"][file] = [False, os.path.getmtime(file)]
- # store includes update time
- includes = get_includes(file)
- for inc in includes:
- json_dict["inc"][inc] = os.path.getmtime(inc)
- # ========== compiling ========== #
- sp_amount = 0
- compiled_amount = 0
- errors_amount = 0
- warnings_amount = 0
- last_printed_line = False
- # compile files that need to be compiled
- for file in json_dict["srcs"]:
- if not json_dict["srcs"][file][0]:
- sp_amount += 1
- errors = False
- warnings = False
- smx_file = (file.replace(input_dir, output_dir)).replace(".sp", ".smx")
- # run the compiler
- compiler = subprocess.Popen(["spcomp.exe", file, "-o", smx_file, "-v=0"], stdout=subprocess.PIPE)
- # check for errors and warnings
- logs_list = []
- for line in compiler.stdout:
- line = line.decode("utf-8")
- if len(re.findall(" : (fatal )?error [0-9]+:", line)):
- logs_list.append(line)
- errors = True
- elif len(re.findall(" : warning [0-9]+:", line)):
- logs_list.append(line)
- warnings = True
- # print errors and warnings
- source_file = (file.replace(input_dir, "")).replace("\\", "", 1) + f": "
- if not errors and not warnings:
- print(" " + source_file + f"{colors.green}OK{colors.default}")
- elif errors:
- if not last_printed_line: print(f"{colors.gray}", "─" * 64 + f"{colors.default}")
- print(" " + source_file + f"{colors.red}ERROR{colors.default}:")
- elif warnings:
- if not last_printed_line: print(f"{colors.gray}", "─" * 64 + f"{colors.default}")
- print(" " + source_file + f"{colors.yellow}WARNING{colors.default}:")
- if errors or warnings:
- for line in logs_list:
- print(" " + line.replace(file, "line "), end="")
- print(f"{colors.gray}", "─" * 64 + f"{colors.default}")
- last_printed_line = True
- else:
- last_printed_line = False
- time = os.path.getmtime(file)
- # check return code
- if compiler.wait() == 0: # compiled successfully
- json_dict["srcs"][file] = [True, time]
- compiled_amount += 1
- # check includes update time
- includes = get_includes(file)
- for inc in includes:
- json_dict["inc"][inc] = os.path.getmtime(inc)
- if errors:
- errors_amount += 1
- elif warnings:
- warnings_amount += 1
- # store everything in the json file
- with open("compile.json", 'w') as json_file:
- json.dump(json_dict, json_file, indent=4)
- # print results
- if sp_amount == 0:
- print(f"{colors.gray}Nothing to compile.{colors.default}")
- elif sp_amount == compiled_amount:
- print(f"\n\n {colors.green}All (" + str(sp_amount) + f") plugins have been successfully compiled.{colors.default}", end="")
- elif compiled_amount > 0:
- print(f"{colors.liblue}\n\n " + str(compiled_amount) + " of " + str(sp_amount) + f" plugins have been successfully compiled.{colors.default}", end="")
- else:
- print(f"\n\n {colors.red}No plugin has been compiled.{colors.default}", end="")
- if warnings_amount and errors_amount:
- print(f" {colors.gray}({warnings_amount} plugin" + ("s" if warnings_amount > 1 else "") + f" with warnings and {errors_amount} with errors){colors.default}")
- else:
- if warnings_amount:
- print(f" {colors.gray}({warnings_amount} plugin" + ("s" if warnings_amount > 1 else "") + f" compiled with warnings){colors.default}")
- if errors_amount:
- print(f" {colors.gray}({errors_amount} plugin" + ("s" if errors_amount > 1 else "") + f" with errors){colors.default}")
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement