From db8b9fd0d0e7c373d4aff37ebd9d67f9f95d886c Mon Sep 17 00:00:00 2001 From: Brett Mayson Date: Sun, 20 Oct 2024 23:01:30 +0000 Subject: [PATCH 1/2] first pass --- .github/workflows/arma.yml | 2 - .../setting-up-the-development-environment.md | 62 +----- tools/.vscode/tasks.json | 10 - tools/build.py | 104 ---------- tools/sqf_linter.py | 83 -------- tools/sqf_validator.py | 195 ------------------ 6 files changed, 1 insertion(+), 455 deletions(-) delete mode 100644 tools/build.py delete mode 100644 tools/sqf_linter.py delete mode 100644 tools/sqf_validator.py diff --git a/.github/workflows/arma.yml b/.github/workflows/arma.yml index 1af2e7e0602..b03ac3f6b58 100644 --- a/.github/workflows/arma.yml +++ b/.github/workflows/arma.yml @@ -12,8 +12,6 @@ jobs: steps: - name: Checkout the source code uses: actions/checkout@v4 - - name: Validate SQF - run: python3 tools/sqf_validator.py - name: Validate Config run: python3 tools/config_style_checker.py - name: Validate Stringtables diff --git a/docs/wiki/development/setting-up-the-development-environment.md b/docs/wiki/development/setting-up-the-development-environment.md index 3dcb815153a..7b2933891e0 100644 --- a/docs/wiki/development/setting-up-the-development-environment.md +++ b/docs/wiki/development/setting-up-the-development-environment.md @@ -88,66 +88,6 @@ To start the game using this build, you can use the following modline: To create a complete build that you can use without the source files, with full binarization and all optimizations, run `$ hemtt release` in the root folder. This will populate the `.hemttout/release` folder with binarized PBOs and an archive in `releases` that you can redistribute. These handle like those of any other mod. - -## 4. Setup and Building (Mikero Tools) - -### 4.1 Initial Setup - -Navigate to `tools` folder in command line. - -``` -cd "[location of the ACE3 project]\tools" -``` - -Execute `setup.py` to create symbolic links to P-drive and Arma 3 directory required for building. - - -Should the script fail, you can create the required links manually. First, create `z` folders both in your Arma 3 directory and on your P-drive. Then run the following commands as admin, replacing the text in brackets with the appropriate paths: - -```bat -mklink /J "[Arma 3 installation folder]\z\ace" "[location of the ACE3 project]" -mklink /J "P:\z\ace" "[location of the ACE3 project]" -``` - -Then, copy the `cba` folder from the `include\x` folder to `P:\x\cba`. Create the `x` folder if needed. That folder contains the parts of the CBA source code that are required for the macros to work. - - -## 4.2 Creating a Test Build - -To create a development build of ACE3 to test changes or to debug something, run the `build.py` file in the `tools` folder. This will populate the `addons` folder with binarized PBOs. These PBOs still point to the source files in their respective folders however, which allows you to use [file patching](#file-patching). This also means that you cannot distribute this build to others. - -To start the game using this build, you can use the following modline: - -```sh --mod=@CBA_A3;z\ace -``` - -## 4.3 Creating a Release Build - -To create a complete build of ACE3 that you can use without the source files you will need to: - -- Ensure `.hpp` is **NOT** in pboProject's "Exclude From Pbo" list - -When the requirements are met: - -- Execute `make.py version increment_build force checkexternal release` in the `tools` folder, replacing `` with the part of version you want to increment (options described below) - -This will populate the `release` folder with binarized PBOs, compiled extensions, copied extras, bisigns and a bikey. Additionally, an archive file will also be created in the folder. The folder and archive handle like those of any other mod. - -Different `make.py` command line options include: - -- `version` - update version number in all files and leave them in working directory (leaving this out will still update the version in all files present in the `release` folder, but they will be reverted to not disturb the working directory) -- `increment_build` - increments _build_ version number -- `increment_patch` - increments _patch_ version number (ignored with `increment_minor` or `increment_major`) -- `increment_minor` - increments _minor_ version number and resets _patch_ version number to `0` (ignored with `increment_major`) -- `increment_major` - increments _major_ version number and resets _minor_ and _patch_ version numbers to `0` -- `force` - force rebuild all PBOs, even those already present in the `release` directory -- `checkexternal` - check external references (incompatible only with ` ` and `force `) -- `release` - create release packages/archives -- ` ` - build only specified component(s) (incompatible with `release`) -- `force ` - force rebuild specified component(s) (incompatible with `release`) - - ## 7. File Patching File Patching allows you to change the files in an addon while the game is running, requiring only a restart of the mission. This makes it great for debugging, as it cuts down the time required between tests. @@ -187,4 +127,4 @@ Files must exist in the built PBOs for file patching to work. If you create a ne Configs are not patched during run time, only at load time. You do not have to rebuild a PBO to make config changes, just restart Arma. You can get around this though if you are on the dev branch of Arma 3 and running the [diagnostic exe](https://community.bistudio.com/wiki/Arma_3_Diagnostics_Exe). That includes `diag_mergeConfigFile` which takes a full system path (as in `diag_mergeConfigFile ["p:\z\ace\addons\my_module\config.cpp"]`) and allows you selectively reload config files. -If you need to add/remove files, then you'll need to run HEMTT/`build.py` again without the game running, and restart. That is all that is required to add new files for further use in testing. +If you need to add/remove files, then you'll need to run HEMTT again without the game running, and restart. That is all that is required to add new files for further use in testing. diff --git a/tools/.vscode/tasks.json b/tools/.vscode/tasks.json index 277d4bcc85b..3307b63875e 100644 --- a/tools/.vscode/tasks.json +++ b/tools/.vscode/tasks.json @@ -1,16 +1,6 @@ { "version": "2.0.0", "tasks": [ - { - "label": "Validate SQF", - "command": "${config:python.pythonPath}", - "options": { - "cwd": "${workspaceFolder}/tools" - }, - "args": [ - "sqf_validator.py" - ] - }, { "label": "Validate Config", "command": "${config:python.pythonPath}", diff --git a/tools/build.py b/tools/build.py deleted file mode 100644 index 49887d91cdc..00000000000 --- a/tools/build.py +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/env python3 - -import os -import sys -import subprocess -import shutil - -######## GLOBALS ######### -MAINPREFIX = "z" -PREFIX = "ace_" -########################## - -def tryHemttBuild(projectpath): - if shutil.which("hemtt"): - os.chdir(projectpath) - ret = subprocess.call(["hemtt", "pack"], stderr=subprocess.STDOUT) - print("Using hemtt: {}".format(ret)) - return True - else: - print("hemtt not installed") - return False - -def mod_time(path): - if not os.path.isdir(path): - return os.path.getmtime(path) - maxi = os.path.getmtime(path) - for p in os.listdir(path): - maxi = max(mod_time(os.path.join(path, p)), maxi) - return maxi - - -def check_for_changes(addonspath, module): - if not os.path.exists(os.path.join(addonspath, "{}{}.pbo".format(PREFIX,module))): - return True - return mod_time(os.path.join(addonspath, module)) > mod_time(os.path.join(addonspath, "{}{}.pbo".format(PREFIX,module))) - -def check_for_obsolete_pbos(addonspath, file): - module = file[len(PREFIX):-4] - if not os.path.exists(os.path.join(addonspath, module)): - return True - return False - -def main(): - print(""" - #################### - # ACE3 Debug Build # - #################### -""") - - scriptpath = os.path.realpath(__file__) - projectpath = os.path.dirname(os.path.dirname(scriptpath)) - addonspath = os.path.join(projectpath, "addons") - - if (tryHemttBuild(projectpath)): return - - os.chdir(addonspath) - - made = 0 - failed = 0 - skipped = 0 - removed = 0 - - for file in os.listdir(addonspath): - if os.path.isfile(file): - if check_for_obsolete_pbos(addonspath, file): - removed += 1 - print(" Removing obsolete file => " + file) - os.remove(file) - print("") - - for p in os.listdir(addonspath): - path = os.path.join(addonspath, p) - if not os.path.isdir(path): - continue - if p[0] == ".": - continue - if not check_for_changes(addonspath, p): - skipped += 1 - print(" Skipping {}.".format(p)) - continue - - print("# Making {} ...".format(p)) - - try: - subprocess.check_output([ - "makepbo", - "-NUP", - "-@={}\\{}\\addons\\{}".format(MAINPREFIX,PREFIX.rstrip("_"),p), - p, - "{}{}.pbo".format(PREFIX,p) - ], stderr=subprocess.STDOUT) - except: - failed += 1 - print(" Failed to make {}.".format(p)) - else: - made += 1 - print(" Successfully made {}.".format(p)) - - print("\n# Done.") - print(" Made {}, skipped {}, removed {}, failed to make {}.".format(made, skipped, removed, failed)) - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/tools/sqf_linter.py b/tools/sqf_linter.py deleted file mode 100644 index c71c72094ee..00000000000 --- a/tools/sqf_linter.py +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env python3 -# Requires: https://github.com/LordGolias/sqf - -import os -import sys -import argparse -import concurrent.futures -from sqf.parser import parse -import sqf.analyzer -from sqf.exceptions import SQFParserError - -addon_base_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - - -def get_files_to_process(basePath): - arma_files = [] - for (root, _dirs, files) in os.walk(basePath): - for file in files: - if file.endswith(".sqf"): - if file.endswith(".inc.sqf"): - continue - filePath = os.path.join(root, file) - arma_files.append(filePath) - return arma_files - - -def process_file(filePath): - errors = [] - warnings = [] - try: - with open(filePath, "r", encoding="utf-8", errors="ignore") as file: - content = file.read() - if "#ASC_ignoreFile" in content: - return (filePath, errors, warnings) - sqfLintParse = parse(content) - exceptions = sqf.analyzer.analyze(sqfLintParse).exceptions - if (exceptions): - for e in exceptions: - if ("assigned to an outer scope" in e.message): - warnings.append(f"[{e.position[0]},{e.position[1]}] {e.message}") - if ("is not from this scope" in e.message): - warnings.append(f"[{e.position[0]},{e.position[1]}] {e.message}") - if ("not used" in e.message): - warnings.append(f"[{e.position[0]},{e.position[1]}] {e.message}") - - # most of this is just noise about macro parsing: - # if (e.message.startswith("error")): - # errors.append(f"[{e.position[0]},{e.position[1]}] {e.message}") - # else: - # warnings.append(f"[{e.position[0]},{e.position[1]}] {e.message}") - except Exception as e: - # errors.append(f"Exception {e}") - pass - return (filePath, errors, warnings) - - -def main(): - - parser = argparse.ArgumentParser() - parser.add_argument('-m', '--module', help='only search specified module addon folder', required=False, default=".") - args = parser.parse_args() - - error_count = 0 - addon_base_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - if (args.module): addon_base_path = os.path.join(addon_base_path, "addons", args.module) - arma_files = get_files_to_process(addon_base_path) - print(f"Checking {len(arma_files)} files from {addon_base_path}") - with concurrent.futures.ThreadPoolExecutor(max_workers=12) as executor: - for (filePath, errors, warnings) in executor.map(process_file, arma_files): - if errors or warnings: - error_count += 1 - print(f"{filePath}") - for e in errors: - print(f" {e}") - for e in warnings: - print(f" {e}") - - print("Errors: {}".format(error_count)) - return error_count - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/tools/sqf_validator.py b/tools/sqf_validator.py deleted file mode 100644 index facdb1142d6..00000000000 --- a/tools/sqf_validator.py +++ /dev/null @@ -1,195 +0,0 @@ -#!/usr/bin/env python3 - -import fnmatch -import os -import re -import ntpath -import sys -import argparse - -def validKeyWordAfterCode(content, index): - keyWords = ["for", "do", "count", "each", "forEach", "else", "and", "not", "isEqualTo", "in", "call", "spawn", "execVM", "catch", "param", "select", "apply", "findIf", "remoteExec"]; - for word in keyWords: - try: - subWord = content.index(word, index, index+len(word)) - return True; - except: - pass - return False - -def check_sqf_syntax(filepath): - bad_count_file = 0 - def pushClosing(t): - closingStack.append(closing.expr) - closing << Literal( closingFor[t[0]] ) - - def popClosing(): - closing << closingStack.pop() - - with open(filepath, 'r', encoding='utf-8', errors='ignore') as file: - content = file.read() - - # Store all brackets we find in this file, so we can validate everything on the end - brackets_list = [] - - # To check if we are in a comment block - isInCommentBlock = False - checkIfInComment = False - # Used in case we are in a line comment (//) - ignoreTillEndOfLine = False - # Used in case we are in a comment block (/* */). This is true if we detect a * inside a comment block. - # If the next character is a /, it means we end our comment block. - checkIfNextIsClosingBlock = False - - # We ignore everything inside a string - isInString = False - # Used to store the starting type of a string, so we can match that to the end of a string - inStringType = ''; - - lastIsCurlyBrace = False - checkForSemicolon = False - onlyWhitespace = True - - # Extra information so we know what line we find errors at - lineNumber = 1 - - indexOfCharacter = 0 - # Parse all characters in the content of this file to search for potential errors - for c in content: - if (lastIsCurlyBrace): - lastIsCurlyBrace = False - # Test generates false positives with binary commands that take CODE as 2nd arg (e.g. findIf) - checkForSemicolon = not re.search('findIf', content, re.IGNORECASE) - - if c == '\n': # Keeping track of our line numbers - onlyWhitespace = True # reset so we can see if # is for a preprocessor command - lineNumber += 1 # so we can print accurate line number information when we detect a possible error - if (isInString): # while we are in a string, we can ignore everything else, except the end of the string - if (c == inStringType): - isInString = False - # if we are not in a comment block, we will check if we are at the start of one or count the () {} and [] - elif (isInCommentBlock == False): - - # This means we have encountered a /, so we are now checking if this is an inline comment or a comment block - if (checkIfInComment): - checkIfInComment = False - if c == '*': # if the next character after / is a *, we are at the start of a comment block - isInCommentBlock = True - elif (c == '/'): # Otherwise, will check if we are in an line comment - ignoreTillEndOfLine = True # and an line comment is a / followed by another / (//) We won't care about anything that comes after it - - if (isInCommentBlock == False): - if (ignoreTillEndOfLine): # we are in a line comment, just continue going through the characters until we find an end of line - if (c == '\n'): - ignoreTillEndOfLine = False - else: # validate brackets - if (c == '"' or c == "'"): - isInString = True - inStringType = c - elif (c == '#' and onlyWhitespace): - ignoreTillEndOfLine = True - elif (c == '/'): - checkIfInComment = True - elif (c == '('): - brackets_list.append('(') - elif (c == ')'): - if (brackets_list[-1] in ['{', '[']): - print("ERROR: Possible missing round bracket ')' detected at {0} Line number: {1}".format(filepath,lineNumber)) - bad_count_file += 1 - brackets_list.append(')') - elif (c == '['): - brackets_list.append('[') - elif (c == ']'): - if (brackets_list[-1] in ['{', '(']): - print("ERROR: Possible missing square bracket ']' detected at {0} Line number: {1}".format(filepath,lineNumber)) - bad_count_file += 1 - brackets_list.append(']') - elif (c == '{'): - brackets_list.append('{') - elif (c == '}'): - lastIsCurlyBrace = True - if (brackets_list[-1] in ['(', '[']): - print("ERROR: Possible missing curly brace '}}' detected at {0} Line number: {1}".format(filepath,lineNumber)) - bad_count_file += 1 - brackets_list.append('}') - elif (c== '\t'): - print("ERROR: Tab detected at {0} Line number: {1}".format(filepath,lineNumber)) - bad_count_file += 1 - - if (c not in [' ', '\t', '\n']): - onlyWhitespace = False - - if (checkForSemicolon): - if (c not in [' ', '\t', '\n', '/']): # keep reading until no white space or comments - checkForSemicolon = False - if (c not in [']', ')', '}', ';', ',', '&', '!', '|', '='] and not validKeyWordAfterCode(content, indexOfCharacter)): # , 'f', 'd', 'c', 'e', 'a', 'n', 'i']): - print("ERROR: Possible missing semicolon ';' detected at {0} Line number: {1}".format(filepath,lineNumber)) - bad_count_file += 1 - - else: # Look for the end of our comment block - if (c == '*'): - checkIfNextIsClosingBlock = True; - elif (checkIfNextIsClosingBlock): - if (c == '/'): - isInCommentBlock = False - elif (c != '*'): - checkIfNextIsClosingBlock = False - indexOfCharacter += 1 - - if brackets_list.count('[') != brackets_list.count(']'): - print("ERROR: A possible missing square bracket [ or ] in file {0} [ = {1} ] = {2}".format(filepath,brackets_list.count('['),brackets_list.count(']'))) - bad_count_file += 1 - if brackets_list.count('(') != brackets_list.count(')'): - print("ERROR: A possible missing round bracket ( or ) in file {0} ( = {1} ) = {2}".format(filepath,brackets_list.count('('),brackets_list.count(')'))) - bad_count_file += 1 - if brackets_list.count('{') != brackets_list.count('}'): - print("ERROR: A possible missing curly brace {{ or }} in file {0} {{ = {1} }} = {2}".format(filepath,brackets_list.count('{'),brackets_list.count('}'))) - bad_count_file += 1 - pattern = re.compile('\s*(/\*[\s\S]+?\*/)\s*#include') - if pattern.match(content): - print("ERROR: A found #include after block comment in file {0}".format(filepath)) - bad_count_file += 1 - if ("functions" in filepath): - if (content.startswith("#include \"script_component.hpp\"")): - print(f"ERROR: Using old script_component.hpp in {filepath}") - bad_count_file += 1 - - - - return bad_count_file - -def main(): - - print("Validating SQF") - - sqf_list = [] - bad_count = 0 - - parser = argparse.ArgumentParser() - parser.add_argument('-m','--module', help='only search specified module addon folder', required=False, default="") - args = parser.parse_args() - - for folder in ['addons', 'optionals']: - # Allow running from root directory as well as from inside the tools directory - rootDir = "../" + folder - if (os.path.exists(folder)): - rootDir = folder - - for root, dirnames, filenames in os.walk(rootDir + '/' + args.module): - for filename in fnmatch.filter(filenames, '*.sqf'): - sqf_list.append(os.path.join(root, filename)) - - for filename in sqf_list: - bad_count = bad_count + check_sqf_syntax(filename) - - - print("------\nChecked {0} files\nErrors detected: {1}".format(len(sqf_list), bad_count)) - if (bad_count == 0): - print("SQF validation PASSED") - else: - print("SQF validation FAILED") - - return bad_count - -if __name__ == "__main__": - sys.exit(main()) From 69c7a1f15a83eb2e074a0127d25cbd13f11c3b92 Mon Sep 17 00:00:00 2001 From: Brett Mayson Date: Mon, 21 Oct 2024 23:46:54 +0000 Subject: [PATCH 2/2] pass two --- tools/config_style_checker.py | 19 ++--- tools/config_validator.py | 131 ----------------------------- tools/github_privates_bot.py | 22 ----- tools/search_privates.py | 12 +-- tools/search_undefinedFunctions.py | 14 ++- tools/search_unused_privates.py | 20 ++--- tools/stringtable_sort.py | 110 ------------------------ 7 files changed, 20 insertions(+), 308 deletions(-) delete mode 100644 tools/config_validator.py delete mode 100644 tools/github_privates_bot.py delete mode 100644 tools/stringtable_sort.py diff --git a/tools/config_style_checker.py b/tools/config_style_checker.py index afa78a2d6b2..fd04c8fa990 100644 --- a/tools/config_style_checker.py +++ b/tools/config_style_checker.py @@ -3,18 +3,11 @@ import fnmatch import os import re -import ntpath import sys import argparse def check_config_style(filepath): bad_count_file = 0 - def pushClosing(t): - closingStack.append(closing.expr) - closing << Literal( closingFor[t[0]] ) - - def popClosing(): - closing << closingStack.pop() reIsClass = re.compile(r'^\s*class(.*)') reIsClassInherit = re.compile(r'^\s*class(.*):') @@ -42,7 +35,7 @@ def popClosing(): # We ignore everything inside a string isInString = False # Used to store the starting type of a string, so we can match that to the end of a string - inStringType = ''; + inStringType = '' lastIsCurlyBrace = False checkForSemiColumn = False @@ -109,7 +102,7 @@ def popClosing(): else: # Look for the end of our comment block if (c == '*'): - checkIfNextIsClosingBlock = True; + checkIfNextIsClosingBlock = True elif (checkIfNextIsClosingBlock): if (c == '/'): isInCommentBlock = False @@ -163,10 +156,10 @@ def main(): rootDir = folder for root, dirnames, filenames in os.walk(rootDir + '/' + args.module): - for filename in fnmatch.filter(filenames, '*.cpp'): - sqf_list.append(os.path.join(root, filename)) - for filename in fnmatch.filter(filenames, '*.hpp'): - sqf_list.append(os.path.join(root, filename)) + for filename in fnmatch.filter(filenames, '*.cpp'): + sqf_list.append(os.path.join(root, filename)) + for filename in fnmatch.filter(filenames, '*.hpp'): + sqf_list.append(os.path.join(root, filename)) for filename in sqf_list: bad_count = bad_count + check_config_style(filename) diff --git a/tools/config_validator.py b/tools/config_validator.py deleted file mode 100644 index 50722319bed..00000000000 --- a/tools/config_validator.py +++ /dev/null @@ -1,131 +0,0 @@ -#!/usr/bin/env python3 - -#by PabstMirror - python script to verify all addons using MakePbo's lint checking and extFile Checking -#Arguments (eg: `config_validator.py full`): -#full dump full deRaped config of problem -#skipExt skips checking external file references - -import os -import sys -import subprocess -import timeit -import time - -######## GLOBALS ######### -MAINPREFIX = "Z" -PREFIX = "ACE" -########################## - -def Fract_Sec(s): - temp = float() - temp = float(s) / (60*60*24) - d = int(temp) - temp = (temp - d) * 24 - h = int(temp) - temp = (temp - h) * 60 - m = int(temp) - temp = (temp - m) * 60 - sec = temp - return d,h,m,sec - -def CheckPBO(p,useMakePbo,checkExternalFiles,errors): - try: - if useMakePbo: - makePboArgs = "-PGU" - if not checkExternalFiles: - makePboArgs = "-PU" - subprocess.run([ - "makepbo", - makePboArgs, - "-@={}\\{}\\addons\\{}".format(MAINPREFIX,PREFIX.rstrip("_"),p), - p, - "{}_{}.pbo".format(PREFIX,p) - ], stdin=None, input=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) - else: - makePboArgs = "-LEP" - if not checkExternalFiles: - makePboArgs = "-LP" - subprocess.run([ - "rapify", - makePboArgs, - p - ], stdin=None, input=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True) - - except subprocess.CalledProcessError as e: - print("!! Problem With {} ret {} !!".format(p, e.returncode)) - print(" stderr: {}".format(e.stderr)) - errors.append(p) - else: - print(" Checked ok {}".format(p)) - return - -def fullDump(p): - try: - subprocess.run([ - "makepbo", - "-PGUS", #G Check external references -S show deRap - P dont pause - "-@={}\\{}\\addons\\{}".format(MAINPREFIX,PREFIX.rstrip("_"),p), - p, - "{}_{}.pbo".format(PREFIX,p) - ], stdin=None, input=None, check=True) - except subprocess.CalledProcessError as e: - input("Press Enter to continue...") - return - -def main(argv): - print(""" - #################### - # ACE3 Config Check # - #################### -""") - - start_time = timeit.default_timer() - - addonspath = os.path.join("P:\\",MAINPREFIX,PREFIX,"addons") - - print("Switching to dir: {}".format(addonspath)) - try: - os.chdir(addonspath) - except: - raise Exception("Failed to switch to addon dir on P:") - - useMakePbo = False - checkExternalFiles = True - - if "skipExt" in argv: - print("Skipping External Files Check"); - checkExternalFiles = False - if "make" in argv: - # will check more files like RTM and RVMats but twice as slow - # This also actually builds a pbo (in same spot as build.py) - print("Using makePbo to verify all files"); - useMakePbo = True - - errors = [] - - for p in os.listdir(addonspath): - path = os.path.join(addonspath, p) - if not os.path.isdir(path): - continue - if p[0] == ".": - continue - CheckPBO(p,useMakePbo,checkExternalFiles,errors) - - - d,h,m,s = Fract_Sec(timeit.default_timer() - start_time) - print("\n# Done with {0} errrors [took: {1:2}h {2:2}m {3:4.5f}s]".format(len(errors),h,m,s)) - - if (len(errors) > 0): - if "full" in argv: - input("Dumping Full DeRap: Press Enter to continue...") - for p in errors: - fullDump(p) - else: - print("use 'full' arg to show derap") - - ret = len(errors) - print("return {}".format(ret)) - return ret - -if __name__ == "__main__": - main(sys.argv) diff --git a/tools/github_privates_bot.py b/tools/github_privates_bot.py deleted file mode 100644 index 4496147b04e..00000000000 --- a/tools/github_privates_bot.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python3 - -import os -import argparse - -from pygithub3 import Github - -def main(): - gh = Github(user='acemod', repo='ACE3') - - pull_requests = gh.pull_requests.list().all() - - for request in pull_requests: - files = gh.pull_requests.list_files(request.number).all() - - for file in files: - # print file.filename - if '.sqf' in file.filename: - print file - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/tools/search_privates.py b/tools/search_privates.py index 1fe5c14887b..8f5ce68d888 100644 --- a/tools/search_privates.py +++ b/tools/search_privates.py @@ -3,8 +3,6 @@ import fnmatch import os import re -import ntpath -import sys import argparse def get_private_declare(content): @@ -44,12 +42,6 @@ def get_private_declare(content): def check_privates(filepath): bad_count_file = 0 - def pushClosing(t): - closingStack.append(closing.expr) - closing << Literal( closingFor[t[0]] ) - - def popClosing(): - closing << closingStack.pop() with open(filepath, 'r') as file: content = file.read() @@ -122,8 +114,8 @@ def main(): args = parser.parse_args() for root, dirnames, filenames in os.walk('../addons' + '/' + args.module): - for filename in fnmatch.filter(filenames, '*.sqf'): - sqf_list.append(os.path.join(root, filename)) + for filename in fnmatch.filter(filenames, '*.sqf'): + sqf_list.append(os.path.join(root, filename)) for filename in sqf_list: bad_count = bad_count + check_privates(filename) diff --git a/tools/search_undefinedFunctions.py b/tools/search_undefinedFunctions.py index 6789bbe93d5..99d02701253 100644 --- a/tools/search_undefinedFunctions.py +++ b/tools/search_undefinedFunctions.py @@ -3,8 +3,6 @@ import fnmatch import os import re -import ntpath -import sys import argparse # handle x64 python clipboard, ref https://forums.autodesk.com/t5/maya-programming/ctypes-bug-cannot-copy-data-to-clipboard-via-python/m-p/9197068/highlight/true#M10992 @@ -125,12 +123,12 @@ def main(): addon_base_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) for root, dirnames, filenames in os.walk(addon_base_path +"/" + 'addons' + '/' + args.module): - for filename in fnmatch.filter(filenames, '*.sqf'): - sqf_list.append(os.path.join(root, filename)) - for filename in fnmatch.filter(filenames, '*.cpp'): - sqf_list.append(os.path.join(root, filename)) - for filename in fnmatch.filter(filenames, '*.hpp'): - sqf_list.append(os.path.join(root, filename)) + for filename in fnmatch.filter(filenames, '*.sqf'): + sqf_list.append(os.path.join(root, filename)) + for filename in fnmatch.filter(filenames, '*.cpp'): + sqf_list.append(os.path.join(root, filename)) + for filename in fnmatch.filter(filenames, '*.hpp'): + sqf_list.append(os.path.join(root, filename)) for filename in sqf_list: allFunctions = allFunctions + getFunctions(filename) diff --git a/tools/search_unused_privates.py b/tools/search_unused_privates.py index 04a1fc977d7..29d44b2d46c 100644 --- a/tools/search_unused_privates.py +++ b/tools/search_unused_privates.py @@ -3,8 +3,6 @@ import fnmatch import os import re -import ntpath -import sys import argparse def get_private_declare(content): @@ -19,7 +17,7 @@ def get_private_declare(content): srch = re.compile('(? 0: print (filepath) - private_output = 'private['; + private_output = 'private[' first = True for bad_priv in unused: if first: @@ -90,7 +82,7 @@ def popClosing(): else: private_output = private_output + '", "' + bad_priv - private_output = private_output + '"];'; + private_output = private_output + '"];' print (private_output) for bad_priv in unused: @@ -115,8 +107,8 @@ def main(): args = parser.parse_args() for root, dirnames, filenames in os.walk('../addons' + '/' + args.module): - for filename in fnmatch.filter(filenames, '*.sqf'): - sqf_list.append(os.path.join(root, filename)) + for filename in fnmatch.filter(filenames, '*.sqf'): + sqf_list.append(os.path.join(root, filename)) for filename in sqf_list: bad_count = bad_count + check_privates(filename) diff --git a/tools/stringtable_sort.py b/tools/stringtable_sort.py deleted file mode 100644 index 8610f558c50..00000000000 --- a/tools/stringtable_sort.py +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env python3 - -import fnmatch -import os -import xml.etree.ElementTree as ET - -# STRINGTABLE SORTER -# Author: kymckay, johnb43, mharis001 -# --------------------- -# Sorts all stringtable.xml files in the project. -# - Sorts both the entries and each entry's attributes (languages). -# - English is placed as the first translation. -# -# Important: This deletes all comments in the stringtable.xml files! - -def sort_children(parent): - parent[:] = sorted(parent, key=lambda child: child.get('ID') if child.tag == 'Key' else child.get('name')) - - for child in parent: - if child.tag in ('Package','Container'): - sort_children(child) - continue - - # This part sorts languages alphabetically - if child.tag == 'Key': - elements = [] - - for element in child.iter(): - if element.tag != 'Key': - elements.append(element) - - if len(elements) == 0: - continue - - # Sort languages alphabetically - elements = sorted(elements, key=lambda element: element.tag) - - # https://stackoverflow.com/questions/51410881/python-equivalent-of-c-find-if - try: - index = next(idx for idx, n in enumerate(elements) if n.tag == 'English') - except StopIteration: - print('ERROR - English missing in "{}"'.format(child.attrib.get('ID'))) - continue - - # Move English to the top - elementEnglish = elements[index] - elements.pop(index) - elements.insert(0, elementEnglish) - - # Remove old element and append it again - for element in elements: - child.remove(element) - child.append(element) - - -def indent_pretty(parent, level=0): - parent.tail = '\n' + "".join([' '] * level) - - if len(parent) == 0: - return - - last_child = parent[len(parent) - 1] - - for child in parent: - indent_pretty(child, level+1) - - if child == last_child: - child.tail = parent.tail - - -def main(): - print("Sorting Stringtables") - print("-----------------------") - - # Allow running from root directory and tools directory - root_dir = ".." - - if os.path.exists("addons"): - root_dir = "." - - # Check all stringtable.xml files in the project directory - stringtable_files = [] - - for root, _, files in os.walk(root_dir): - for file in fnmatch.filter(files, "stringtable.xml"): - if (".hemttout" in root): - continue - - stringtable_files.append(os.path.join(root, file)) - - stringtable_files.sort() - - for filepath in stringtable_files: - tree = ET.parse(filepath) - xml_root = tree.getroot() - - # Verify that stringtable is structured as expected - if xml_root.tag != 'Project': - print('Missing "Project" root tag: {}'.format(os.path.relpath(filepath, root_dir))) - continue - - sort_children(xml_root) - indent_pretty(xml_root) - print('Sorted: {}'.format(os.path.relpath(filepath, root_dir))) - - tree.write(filepath, encoding="utf-8", xml_declaration=True, method='xml') - - -if __name__ == "__main__": - main()