diff --git a/PantheonCMD/buildyml-generator.sh b/PantheonCMD/buildyml-generator.sh new file mode 100644 index 0000000..4bddec2 --- /dev/null +++ b/PantheonCMD/buildyml-generator.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +ROOT_REPO=$(git rev-parse --show-toplevel) +SCRIPT_DIR=$(realpath $(dirname "$0")) +REPO_NAME=$(basename $ROOT_REPO) + +if [[ -f "$ROOT_REPO/build.yml" ]]; then + read -p "build.yml already exists. Do you want to overwrite it? [y/N] " -n 1 + echo + if [[ ! "$REPLY" =~ ^[yY]$ ]]; then + exit + fi +fi + +cat >$ROOT_REPO/build.yml << EOF +# Your repository name goes here +repository: $REPO_NAME +variants: + # Your variant name goes here + - name: PLACEHOLDER + # Path to your attributes file goes here + attributes: + - PATH/TO/_attributes.adoc + nav: PATH/TO/nav.yml + build: true + files: + # Path to your assemblies, modules, and images go here + included: + - PATH/TO/ASSEMBLIES/*.adoc + - PATH/TO/MODULES/**/*.adoc + - PATH/TO/images/*.png + +EOF + +echo "build.yml successfully generated" diff --git a/PantheonCMD/enki.py b/PantheonCMD/enki.py new file mode 100644 index 0000000..27f7fa1 --- /dev/null +++ b/PantheonCMD/enki.py @@ -0,0 +1,31 @@ +#!/usr/bin/python3 + +import argparse +from pathlib import Path +import os +from enki_yaml_valiadtor import yaml_validation +from enki_files_valiadtor import multi_file_validation, single_file_validation + +parser = argparse.ArgumentParser() +parser.add_argument("path", type=Path) + +p = parser.parse_args() + + +user_input = p.path + +if user_input.is_file(): + file_name = os.path.basename(user_input) + file_path = os.path.dirname(user_input) + if file_name == 'build.yml': + file_name = os.path.basename(user_input) + yaml_validation(user_input, file_path) + else: + str = str(user_input) + list = str.split() + single_file_validation(list) + +elif user_input.is_dir(): + multi_file_validation(user_input) +else: + print("ERROR: Provided path doesn't exist.") diff --git a/PantheonCMD/pcchecks.py b/PantheonCMD/enki_checks.py similarity index 90% rename from PantheonCMD/pcchecks.py rename to PantheonCMD/enki_checks.py index 207fd6b..e9b93a5 100644 --- a/PantheonCMD/pcchecks.py +++ b/PantheonCMD/enki_checks.py @@ -9,6 +9,8 @@ class Tags: ABSTRACT = '[role="_abstract"]' ADD_RES = '[role="_additional-resources"]' EXPERIMENTAL = ':experimental:' + NBSP_ATT = ':nbsp:  ' + NBSP_VAR = '{nbsp}' LVLOFFSET = ':leveloffset:' ICONS = ':icons:' TOC = ':toc:' @@ -18,7 +20,8 @@ class Regex: """Define regular expresiions for the checks.""" INCLUDE = re.compile(r'include::.*\]\n') - MODULE_TYPE = re.compile(r':_module-type: (PROCEDURE|CONCEPT|REFERENCE)') + MODULE_TYPE = re.compile(r':_content-type: (PROCEDURE|CONCEPT|REFERENCE)') + ASSEMBLY_TYPE = re.compile(r':_content-type: ASSEMBLY') PREFIX_ASSEMBLIES = re.compile(r'.*\/assembly.*\.adoc') PREFIX_MODULES = re.compile(r'.*\/con.*\.adoc|.*\/proc.*\.adoc|.*\/ref.*\.adoc') # should exclude pseudo vanilla like <> @@ -36,6 +39,7 @@ class Regex: INTERNAL_IFDEF = re.compile(r'(ifdef::internal\[\])(.*\n)*?(endif::\[\])') CODE_BLOCK_DASHES = re.compile(r'(-{4,})(.*\n)*?(-{4,})') CODE_BLOCK_DOTS = re.compile(r'(\.{4,})(.*\n)*?(\.{4,})') + CODE_BLOCK_TWO_DASHES = re.compile(r'(-{2,})(.*\n)*?(-{2,})') HUMAN_READABLE_LABEL_XREF = re.compile(r'xref:.*\[]') HUMAN_READABLE_LABEL_LINK = re.compile(r'\b(?:https?|file|ftp|irc):\/\/[^\s\[\]<]*\[\]') NESTED_ASSEMBLY = re.compile(r'include.*assembly([a-z|0-9|A-Z|\-|_]+)\.adoc(\[.*\])') @@ -62,6 +66,13 @@ def toc_check(report, stripped_file, file_path): report.create_report('toc attribute', file_path) +def nbsp_check(report, stripped_file, file_path): + if re.findall(Tags.NBSP_ATT, stripped_file): + return + elif re.findall(Tags.NBSP_VAR, stripped_file): + report.create_report('`{nsbp}` attribute is used but not defined. `:nbsp:  ` attribute is not', file_path) + + def vanilla_xref_check(stripped_file): """Check if the file contains vanilla xrefs.""" if re.findall(Regex.VANILLA_XREF, stripped_file): @@ -74,12 +85,6 @@ def inline_anchor_check(stripped_file): return True -def var_in_title_check(stripped_file): - """Check if the file contains a variable in the level 1 heading.""" - if re.findall(Regex.VAR_IN_TITLE, stripped_file): - return True - - def experimental_tag_check(stripped_file): """Check if the experimental tag is set.""" if stripped_file.count(Tags.EXPERIMENTAL) > 0: @@ -121,14 +126,6 @@ def add_res_section_module_check(report, stripped_file, file_path): if not re.findall(Regex.ADD_RES_MODULE, stripped_file): report.create_report("Additional resources section for modules should be `.Additional resources`. Wrong section name was", file_path) - -# Standalone check on assemblies_found -def nesting_in_assemblies_check(report, stripped_file, file_path): - """Check if file contains nested assemblies.""" - if re.findall(Regex.NESTED_ASSEMBLY, stripped_file): - return report.create_report('nesting in assemblies. nesting', file_path) - - # Standalone check on assemblies_found def add_res_section_assembly_check(report, stripped_file, file_path): if re.findall(Regex.ADDITIONAL_RES, stripped_file): @@ -142,9 +139,9 @@ def lvloffset_check(stripped_file): return True -def abstarct_tag_none_or_multiple_check(stripped_file): +def abstarct_tag_multiple_check(stripped_file): """Checks if the abstract tag is not set or set more than once.""" - if stripped_file.count(Tags.ABSTRACT) != 1: + if stripped_file.count(Tags.ABSTRACT) > 1: return True @@ -224,9 +221,6 @@ def checks(report, stripped_file, original_file, file_path): if inline_anchor_check(stripped_file): report.create_report('in-line anchors', file_path) - if var_in_title_check(stripped_file): - report.create_report('variable in the level 1 heading', file_path) - if experimental_tag_check(stripped_file): report.create_report('files contain UI macros but the :experimental: tag not', file_path) @@ -242,11 +236,8 @@ def checks(report, stripped_file, original_file, file_path): if lvloffset_check(stripped_file): report.create_report('unsupported use of :leveloffset:. unsupported includes', file_path) - if abstarct_tag_none_or_multiple_check(stripped_file): - if stripped_file.count(Tags.ABSTRACT) == 0: - report.create_report('abstract tag not', file_path) - else: - report.create_report('multiple abstract tags', file_path) + if abstarct_tag_multiple_check(stripped_file): + report.create_report('multiple abstract tags', file_path) if abstract_tag_check(original_file): if re.findall(Regex.FIRST_PARA, original_file): diff --git a/PantheonCMD/enki_files_valiadtor.py b/PantheonCMD/enki_files_valiadtor.py new file mode 100644 index 0000000..547e40c --- /dev/null +++ b/PantheonCMD/enki_files_valiadtor.py @@ -0,0 +1,130 @@ +#!/usr/bin/python3 +import os +import sys +from enki_checks import Regex, icons_check, toc_check, nbsp_check, checks, nesting_in_modules_check, add_res_section_module_check, add_res_section_assembly_check +import re +import subprocess +from enki_yaml_valiadtor import Report, printing_build_yml_error + + +def get_files_bash(file_path): + """Expand filepaths.""" + command = "find " + str(file_path) + " -type f -name \*adoc 2>/dev/null" + process = subprocess.run(command, stdout=subprocess.PIPE, shell=True).stdout + files = process.strip().decode('utf-8').split('\n') + + return files + + +def sort_prefix_files(files): + """Get a list of assemblies, modulesa, and unidentifiyed files.""" + prefix_assembly = [] + prefix_modules = [] + undefined_content = [] + attribute_file = [] + + for item in files: + file_name = os.path.basename(item) + file_path = os.path.basename(item) + + if file_path.startswith("_"): + attribute_file.append(item) + elif "/_" in file_path: + attribute_file.append(item) + + if file_name.startswith('assembly'): + prefix_assembly.append(item) + elif file_name.startswith(("proc_", "con_", "ref_", "proc-", "con-", "ref-")): + prefix_modules.append(item) + elif file_name.startswith("_"): + attribute_file.append(item) + elif file_name.startswith(("snip_", "snip-")): + continue + elif file_name == 'master.adoc': + continue + else: + undefined_content.append(item) + + return attribute_file, prefix_assembly, prefix_modules, undefined_content + + +def file_validation(files): + """Validate all files.""" + attribute_file, prefix_assembly, prefix_modules, undefined_content = sort_prefix_files(files) + + all_files = attribute_file + prefix_assembly + prefix_modules + undefined_content + + undetermined_file_type = [] + confused_files = [] + + report = Report() + + for path in all_files: + with open(path, 'r') as file: + original = file.read() + stripped = Regex.MULTI_LINE_COMMENT.sub('', original) + stripped = Regex.SINGLE_LINE_COMMENT.sub('', stripped) + stripped = Regex.CODE_BLOCK_DASHES.sub('', stripped) + stripped = Regex.CODE_BLOCK_TWO_DASHES.sub('', stripped) + stripped = Regex.CODE_BLOCK_DOTS.sub('', stripped) + stripped = Regex.INTERNAL_IFDEF.sub('', stripped) + + icons_check(report, stripped, path) + toc_check(report, stripped, path) + + if path in attribute_file: + nbsp_check(report, stripped, path) + else: + checks(report, stripped, original, path) + + if path in undefined_content: + if re.findall(Regex.MODULE_TYPE, stripped): + nesting_in_modules_check(report, stripped, path) + add_res_section_module_check(report, stripped, path) + + elif re.findall(Regex.ASSEMBLY_TYPE, stripped): + add_res_section_assembly_check(report, stripped, path) + + else: + undetermined_file_type.append(path) + + if path in prefix_assembly: + if re.findall(Regex.MODULE_TYPE, stripped): + confused_files.append(path) + nesting_in_modules_check(report, stripped, path) + add_res_section_module_check(report, stripped, path) + else: + add_res_section_assembly_check(report, stripped, path) + + if path in prefix_modules: + if re.findall(Regex.ASSEMBLY_TYPE, stripped): + confused_files.append(path) + add_res_section_assembly_check(report, stripped, path) + else: + nesting_in_modules_check(report, stripped, path) + add_res_section_module_check(report, stripped, path) + + if confused_files: + printing_build_yml_error("files that have mismatched name prefix and content type tag. Content type tag takes precedence. The files were checked according to the tag", confused_files) + + if undetermined_file_type: + printing_build_yml_error('files that can not be classified as modules or assemblies', undetermined_file_type) + + return report + + +def multi_file_validation(file_path): + files = get_files_bash(file_path) + validation = file_validation(files) + + if validation.count != 0: + validation.print_report() + sys.exit(2) + + +def single_file_validation(files): + validation = file_validation(files) + + if validation.count != 0: + validation.print_report() + sys.exit(2) diff --git a/PantheonCMD/enki_yaml_valiadtor.py b/PantheonCMD/enki_yaml_valiadtor.py new file mode 100644 index 0000000..77be410 --- /dev/null +++ b/PantheonCMD/enki_yaml_valiadtor.py @@ -0,0 +1,351 @@ +#!/usr/bin/python3 + +import os +import sys +import yaml +from cerberus import Validator, errors +from cerberus.errors import BasicErrorHandler +from enki_checks import Regex, icons_check, toc_check, nbsp_check, checks, nesting_in_modules_check, add_res_section_module_check, add_res_section_assembly_check +import re +import subprocess + + +class CustomErrorHandler(BasicErrorHandler): + """Custom error messages.""" + + messages = errors.BasicErrorHandler.messages.copy() + messages[errors.REQUIRED_FIELD.code] = "key is missing" + messages[errors.UNKNOWN_FIELD.code] = "unsupported key" + messages[errors.NOT_NULLABLE.code] = "value can't be empty" + + +def printing_build_yml_error(msg, *files): + """Print error message.""" + print('\nERROR: Your build.yml contains the following {}:\n'.format(msg)) + for file in files: + if file: + print('\t', file) + + +class Report(): + """Create and print report. thank u J.""" + + def __init__(self): + """Create placeholder for problem description.""" + self.report = {} + self.count = 0 + + def create_report(self, category, file_path): + """Generate report.""" + self.count += 1 + if not category in self.report: + self.report[category] = [] + self.report[category].append(file_path) + + def print_report(self): + + """Print report.""" + separator = "\n\t" + + for category, files in self.report.items(): + print("\nERROR: {} found in the following files:".format(category)) + print('\t' + separator.join(files)) + + +def get_yaml_size(yaml_file): + """Test if build.yml is empty.""" + if os.path.getsize(yaml_file) == 0: + print("\nYour build.yml file is empty; exiting...") + sys.exit(2) + + +def load_doc(yaml_file): + """Load build.yml and test for syntax errors.""" + with open(yaml_file, 'r') as file: + try: + return yaml.safe_load(file) + except yaml.YAMLError: + print("There's a syntax error in your build.yml file. Please fix it and try again.\nTo detect an error try running yaml lint on your build.yml file.") + sys.exit(2) + + +def get_yaml_errors(yaml_schema, yaml_doc): + """Validate build.yml against a schema abd report errors.""" + # load validator with custom error handler + v = Validator(yaml_schema, error_handler=CustomErrorHandler()) + # validate the build.yml with schema + v.validate(yaml_doc, yaml_schema) + + if v.errors: + print("ERROR: there is an error in your yaml file:") + for key in v.errors.keys(): + print("\n\t'{}' {}".format(key, ', '.join(str(item) for item in v.errors[key]))) + sys.exit(2) + + +def get_attribute_files(yaml_doc): + """Get attribute files specifiyed in build.yml.""" + attribute_files = [] + + for variant in yaml_doc['variants']: + for item in variant['attributes']: + attribute_files.append(item) + + return attribute_files + + +def get_existence(path, files): + """Return a list of found files and a list of not found files.""" + missing_files = [] + exiting_files = [] + + for item in files: + file = path + '/' + item + if os.path.exists(file): + exiting_files.append(item) + else: + missing_files.append(item) + + return exiting_files, missing_files + + +def get_files_bash(yaml_path, file_path): + """Expand filepaths.""" + command = ("find " + yaml_path + '/' + file_path + " -type f 2>/dev/null") + process = subprocess.run(command, stdout=subprocess.PIPE, shell=True).stdout + files = process.strip().decode('utf-8').split('\n') + + return files + + +def get_files(yaml_path, yaml_doc, var): + """Get files listed in the build.yml.""" + content_list = [] + missing_files = [] + + for yaml_dict in yaml_doc['variants']: + for subkey in yaml_dict['files']: + if subkey != var: + continue + + for include in yaml_dict['files'][var]: + content = get_files_bash(yaml_path, include) + if not content: + continue + + if '' in content: + missing_files.append(include) + continue + + for i in content: + if i not in content_list: + content_list.append(i) + + return content_list, missing_files + + +def validate_attribute_files(path, attribute_files): + """Validate attributes file.""" + report = Report() + + for item in attribute_files: + file = path + '/' + item + with open(file, 'r') as f: + original = f.read() + stripped = Regex.MULTI_LINE_COMMENT.sub('', original) + stripped = Regex.SINGLE_LINE_COMMENT.sub('', stripped) + + icons_check(report, stripped, item) + toc_check(report, stripped, item) + nbsp_check(report, stripped, item) + + return report + + +def get_attribute_file_errors(yaml_doc, path): + """Report errors found with attribute files.""" + attribute_files = get_attribute_files(yaml_doc) + exiting_attribute_files, missing_attribute_files = get_existence(path, attribute_files) + + if missing_attribute_files: + printing_build_yml_error("attribute files that do not exist in your repository", missing_attribute_files) + + if exiting_attribute_files: + for item in exiting_attribute_files: + file_name = os.path.basename(item) + file_path = os.path.dirname(item) + if not file_path.startswith("_"): + if "/_" not in file_path: + if not file_name.startswith("_"): + printing_build_yml_error("files or directories that do not follow the attribute naming conventions. Attribute files or directory they are stored in should start with an underscore", item) + + attribute_validation = validate_attribute_files(path, exiting_attribute_files) + + if attribute_validation.count != 0: + attribute_validation.print_report() + + +def get_realpath(files): + """Get unique file list of content excluding attributes""" + # get unique file list through realpath + unique_files = [] + + for file in files: + if file.endswith('adoc'): + real_path = os.path.realpath(file) + if real_path not in unique_files: + unique_files.append(real_path) + + # convert realpath back to relative path + relative_path_files = [] + + pwd = os.getcwd() + for i in unique_files: + relative_path = os.path.relpath(i, pwd) + relative_path_files.append(relative_path) + + # remove attribute files from list + files = [] + + for item in relative_path_files: + file_name = os.path.basename(item) + file_path = os.path.dirname(item) + if file_path.startswith("_"): + continue + elif "/_" in file_path: + continue + elif file_name.startswith("_"): + continue + else: + files.append(item) + + return files + + +def get_content_list(yaml_path, yaml_doc): + """Get a unique list of included files with removed excludes.""" + included, fake_path_includes = get_files(yaml_path, yaml_doc, 'included') + excluded, fake_path_excludes = get_files(yaml_path, yaml_doc, 'excluded') + + unique_includes = get_realpath(included) + unique_excludes = get_realpath(excluded) + + for item in unique_includes: + if item in unique_excludes: + unique_includes.remove(item) + + return unique_includes + + +def get_fake_path_files(yaml_path, yaml_doc): + """Error out on fake filepaths in build.yml""" + included, fake_path_includes = get_files(yaml_path, yaml_doc, 'included') + excluded, fake_path_excludes = get_files(yaml_path, yaml_doc, 'excluded') + + missing_files = fake_path_excludes + fake_path_includes + if missing_files: + printing_build_yml_error("files or directories that do not exist in your repository", missing_files) + + +def sort_prefix_files(yaml_path, yaml_doc): + """Get a list of assemblies, modulesa, and unidentifiyed files.""" + prefix_assembly = [] + prefix_modules = [] + undefined_content = [] + + content_list = get_content_list(yaml_path, yaml_doc) + + for item in content_list: + if item.endswith('.adoc'): + file_name = os.path.basename(item) + if file_name.startswith('assembly'): + prefix_assembly.append(item) + elif file_name.startswith(("proc_", "con_", "ref_", "proc-", "con-", "ref-")): + prefix_modules.append(item) + elif file_name.startswith(("snip_", "snip-")): + continue + else: + undefined_content.append(item) + + return prefix_assembly, prefix_modules, undefined_content + + +def file_validation(yaml_path, yaml_doc): + """Validate all files.""" + prefix_assembly, prefix_modules, undefined_content = sort_prefix_files(yaml_path, yaml_doc) + + all_files = prefix_assembly + prefix_modules + undefined_content + + undetermined_file_type = [] + confused_files = [] + + report = Report() + + for path in all_files: + with open(path, 'r') as file: + original = file.read() + stripped = Regex.MULTI_LINE_COMMENT.sub('', original) + stripped = Regex.SINGLE_LINE_COMMENT.sub('', stripped) + stripped = Regex.CODE_BLOCK_DASHES.sub('', stripped) + stripped = Regex.CODE_BLOCK_TWO_DASHES.sub('', stripped) + stripped = Regex.CODE_BLOCK_DOTS.sub('', stripped) + stripped = Regex.INTERNAL_IFDEF.sub('', stripped) + + checks(report, stripped, original, path) + icons_check(report, stripped, path) + toc_check(report, stripped, path) + + if path in undefined_content: + if re.findall(Regex.MODULE_TYPE, stripped): + nesting_in_modules_check(report, stripped, path) + add_res_section_module_check(report, stripped, path) + + elif re.findall(Regex.ASSEMBLY_TYPE, stripped): + add_res_section_assembly_check(report, stripped, path) + + else: + undetermined_file_type.append(path) + + if path in prefix_assembly: + if re.findall(Regex.MODULE_TYPE, stripped): + confused_files.append(path) + nesting_in_modules_check(report, stripped, path) + add_res_section_module_check(report, stripped, path) + else: + add_res_section_assembly_check(report, stripped, path) + + if path in prefix_modules: + if re.findall(Regex.ASSEMBLY_TYPE, stripped): + confused_files.append(path) + add_res_section_assembly_check(report, stripped, path) + else: + nesting_in_modules_check(report, stripped, path) + add_res_section_module_check(report, stripped, path) + + if confused_files: + printing_build_yml_error("files that have mismatched name prefix and content type tag. Content type tag takes precedence. The files were checked according to the tag", confused_files) + + if undetermined_file_type: + printing_build_yml_error('files that can not be classified as modules or assemblies', undetermined_file_type) + + return report + + +def yaml_validation(yaml_file, path_to_yaml): + """Execute yml and general validation and report errors.""" + # define path to script + path_to_script = os.path.dirname(os.path.realpath(__file__)) + # load schema + schema = eval(open(path_to_script + '/schema.py', 'r').read()) + # load build.yml + loaded_yaml = load_doc(yaml_file) + + get_yaml_size(yaml_file) + get_yaml_errors(schema, loaded_yaml) + get_attribute_file_errors(loaded_yaml, path_to_yaml) + get_fake_path_files(path_to_yaml, loaded_yaml) + validation = file_validation(path_to_yaml, loaded_yaml) + + if validation.count != 0: + validation.print_report() + sys.exit(2) diff --git a/PantheonCMD/generate-pv2-yml.sh b/PantheonCMD/generate-pv2-yml.sh deleted file mode 100644 index e0b4d77..0000000 --- a/PantheonCMD/generate-pv2-yml.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/bash - -#define parameters -repo_name=$(basename $PWD) - -find_images_dir=$(find . -type d -name "images") - -#if images dir is detected record, otherwise create a placeholder -if [ ! -z "$find_images_dir" ]; then - images_dir=$(for i in $find_images_dir; do echo " - ${i/\.\/}/*.png" && echo " - ${i/\.\/}/*.svg"; done) -else - images_dir=$(printf ' - PATH/TO/YOUR/IMAGES/DIRECTORY/*.png\n - PATH/TO/YOUR/IMAGES/DIRECTORY/*.svg') -fi - -#define the template -cat << EOF -server: https://pantheon.corp.redhat.com -# Your repository name goes here -repository: $repo_name -variants: - # Your chosen name goes here - - name: PLACEHOLDER - # Path to your attributes file goes here - path: PATH/TO/attributes.adoc - canonical: true - -assemblies: - # Your assemblies go here - - PATH/TO/ASSEMBLIES/assembly_TEMPLATE-ASSEMBLY.adoc - - -modules: - # Your modules go here - - PATH/TO/MODULES/con_TEMPLATE_CONCEPT.adoc - - PATH/TO/MODULES/proc_TEMPLATE_PROCEDURE.adoc - - PATH/TO/MODULES/ref_TEMPLATE_REFERENCE.adoc - - -resources: - # Path to your images directory goes here -$images_dir -EOF diff --git a/PantheonCMD/pcmd.py b/PantheonCMD/pcmd.py index 35dda17..e218dde 100644 --- a/PantheonCMD/pcmd.py +++ b/PantheonCMD/pcmd.py @@ -9,7 +9,7 @@ from pcutil import PantheonRepo, get_not_exist, get_exist, is_pantheon_repo from pcvalidator import validation -from pcyamlchecks import yaml_validation +from enki_yaml_valiadtor import yaml_validation from subprocess import call from pcprvalidator import get_changed_files, get_all_modules, get_all_assemblies, get_undetermined_files, get_no_prefix_files @@ -42,14 +42,14 @@ def parse_args(): parser_b = subparsers.add_parser('clean', help='Clean the build directory.') # 'Duplicates' command - parser_c = subparsers.add_parser('duplicates', help='Enumerate duplicate entries in your pantheon2.yml file.') + parser_c = subparsers.add_parser('duplicates', help='Enumerate duplicate entries in your build.yml file.') # 'Validate' command - parser_d = subparsers.add_parser('validate', help='Validate entries in your pantheon2.yml file.') + parser_d = subparsers.add_parser('validate', help='Validate entries in your build.yml file.') parser_d.add_argument('--mr', action='store_true', help='Validate files commited on a merge request.') # 'Generate' command - parser_e = subparsers.add_parser('generate', help='Generate pantheon2.yml file from a template.') + parser_e = subparsers.add_parser('generate', help='Generate build.yml file from a template.') return parser.parse_args() @@ -65,10 +65,10 @@ def parse_args(): repo_location = is_pantheon_repo() - # Action - generate a pantheon2.yml file + # Action - generate a build.yml file if args.command == 'generate': path_to_script = os.path.dirname(os.path.realpath(__file__)) - call("sh " + path_to_script + "/pv2yml-generator.sh", shell=True) + call("bash " + path_to_script + "/buildyml-generator.sh", shell=True) sys.exit(0) # Action - validate yaml syntax, validate yaml keys and values @@ -107,40 +107,11 @@ def parse_args(): pantheon_repo = PantheonRepo(repo_location) - if os.path.exists('pantheon2.yml'): + if os.path.exists('build.yml'): # call yaml file validation + attribute file validation - yaml_validation('pantheon2.yml') + yaml_validation('build.yml') - exists = get_not_exist(pantheon_repo.get_content()) - - if exists: - - print("\nYour pantheon2.yml contains the following files that do not exist in your repository:\n") - - for exist in exists: - - print('\t' + exist) - - print("\nTotal: ", str(len(exists))) - - files_found = get_exist(pantheon_repo.get_content()) - modules_found = pantheon_repo.get_existing_content("modules") - assemblies_found = pantheon_repo.get_existing_content("assemblies") - - validate = validation(files_found, modules_found, assemblies_found) - - if validate.count != 0: - print("\nYour pantheon2.yml contains the following files that did not pass validation:\n") - validate.print_report() - sys.exit(2) - else: - print("All files passed validation.") - - else: - - print("ERROR: You must run this command from the same directory as the pantheon2.yml file.\n") - sys.exit(1) # Exit if not a Pantheon V2 repository if repo_location: @@ -154,15 +125,11 @@ def parse_args(): # Action - preview if args.command == 'preview': - # Validate the pantheon2.yml file - yaml_validation(pantheon_repo.yaml_file_location) - - # Set the output format if args.format == 'pdf': output_format = 'pdf' else: output_format = 'html' - + # Did a user specify a set of files? If so, only build those. if args.files: # Handle different interpretations of directories @@ -183,9 +150,9 @@ def parse_args(): pcbuild.prepare_build_directory() pcbuild.copy_resources(pantheon_repo.get_existing_content("resources")) pcbuild.build_content(content_subset, args.lang, output_format, pantheon_repo.repo_location, pantheon_repo.yaml_file_location) - # Otherwise, attempt to build all files in the pantheon2.yml file. + # Otherwise, attempt to build all files in the build.yml file. else: - if os.path.exists('pantheon2.yml'): + if os.path.exists('build.yml'): content_types = ['assemblies','modules'] continue_run = True @@ -197,7 +164,7 @@ def parse_args(): print("Building %s...\n" % content_type) continue_run = pcbuild.build_content(pantheon_repo.get_existing_content(content_type), args.lang, output_format, pantheon_repo.repo_location, pantheon_repo.yaml_file_location) else: - print("ERROR: You must run this command from the same directory as the pantheon2.yml file.\n") + print("ERROR: You must run this command from the same directory as the build.yml file.\n") sys.exit(1) # Action - clean @@ -216,7 +183,7 @@ def parse_args(): if duplicates: - print("Your pantheon2.yml contains the following duplicate entries:\n") + print("Your build.yml contains the following duplicate entries:\n") for duplicate in duplicates: print(duplicate) diff --git a/PantheonCMD/pcprvalidator.py b/PantheonCMD/pcprvalidator.py index adc7873..a546081 100644 --- a/PantheonCMD/pcprvalidator.py +++ b/PantheonCMD/pcprvalidator.py @@ -6,7 +6,7 @@ import sys import subprocess import re -from pcchecks import Regex +from enki_checks import Regex if subprocess.call(["git", "branch"], stderr=subprocess.STDOUT, stdout=open(os.devnull, 'w')) != 0: diff --git a/PantheonCMD/pcutil.py b/PantheonCMD/pcutil.py index a91274b..783f972 100644 --- a/PantheonCMD/pcutil.py +++ b/PantheonCMD/pcutil.py @@ -15,16 +15,16 @@ def __init__(self, repo_location): """Default constructor; accepts repo location and initializes YAML file location.""" self.repo_location = repo_location - self.yaml_file_location = repo_location + "pantheon2.yml" + self.yaml_file_location = repo_location + "build.yml" def count_content(self): - """Counts the number of assemblies and modules in a pantheon2.yml file.""" + """Counts the number of assemblies and modules in a build.yml file.""" # Initialize dictionary content_counts = {'assemblies': 0, 'modules': 0} # Parse the main YAML file - with open(self.yaml_file_location + 'pantheon2.yml', 'r') as f: + with open(self.yaml_file_location + 'build.yml', 'r') as f: yaml_file = yaml.safe_load(f) # Count assemblies @@ -39,7 +39,7 @@ def count_content(self): def get_content(self): - """Returns a sorted list of the modules and assemblies specified in a pantheon2.yml file.""" + """Returns a sorted list of the modules and assemblies specified in a build.yml file.""" with open(self.yaml_file_location, 'r') as f: yaml_file = yaml.safe_load(f) content_list, content_duplicates = self.get_files(yaml_file, "assemblies", "modules") @@ -48,7 +48,7 @@ def get_content(self): def get_duplicates(self): - """Returns duplicate entries of modules and assemblies found in a pantheon2.yml file.""" + """Returns duplicate entries of modules and assemblies found in a build.yml file.""" with open(self.yaml_file_location, 'r') as f: yaml_file = yaml.safe_load(f) content_list, content_duplicates = self.get_files(yaml_file, "assemblies", "modules") @@ -57,7 +57,7 @@ def get_duplicates(self): def get_existing_content(self, content_type): - """Returns content found in a pantheon2.yml file that exist as files.""" + """Returns content found in a build.yml file that exist as files.""" content_found = [] with open(self.yaml_file_location, 'r') as f: @@ -73,7 +73,7 @@ def get_existing_content(self, content_type): def get_files(self, main_yaml_file, *arguments): - """Returns a sorted list of the modules and assemblies specified in a pantheon2.yml file.""" + """Returns a sorted list of the modules and assemblies specified in a build.yml file.""" content_files = [] content_list = [] content_duplicates = [] @@ -166,7 +166,7 @@ def is_pantheon_repo(): repo_location = None while path_components: - if os.path.exists(os.sep.join(path_components) + os.sep + 'pantheon2.yml'): + if os.path.exists(os.sep.join(path_components) + os.sep + 'build.yml'): repo_location = os.sep.join(path_components) + os.sep break path_components.pop() diff --git a/PantheonCMD/pcvalidator.py b/PantheonCMD/pcvalidator.py deleted file mode 100644 index 31f32b4..0000000 --- a/PantheonCMD/pcvalidator.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/python3 - -from pcchecks import Regex, checks, nesting_in_modules_check, nesting_in_assemblies_check, add_res_section_module_check, add_res_section_assembly_check, icons_check, toc_check -import sys - - -class Report(): - """Create and print report. thank u J.""" - - def __init__(self): - """Create placeholder for problem description.""" - self.report = {} - self.count = 0 - - def create_report(self, category, file_path): - """Generate report.""" - self.count += 1 - if not category in self.report: - self.report[category] = [] - self.report[category].append(file_path) - - def print_report(self): - - """Print report.""" - separator = "\n\t" - - for category, files in self.report.items(): - print("\nERROR: {} found in the following files:".format(category)) - print('\t' + separator.join(files)) - - -def validation(files_found, modules_found, assemblies_found): - """Validate files.""" - report = Report() - - for path in files_found: - with open(path, "r") as file: - original = file.read() - stripped = Regex.MULTI_LINE_COMMENT.sub('', original) - stripped = Regex.SINGLE_LINE_COMMENT.sub('', stripped) - stripped = Regex.CODE_BLOCK_DASHES.sub('', stripped) - stripped = Regex.CODE_BLOCK_DOTS.sub('', stripped) - stripped = Regex.INTERNAL_IFDEF.sub('', stripped) - checks(report, stripped, original, path) - icons_check(report, stripped, path) - toc_check(report, stripped, path) - - for path in modules_found: - with open(path, "r") as file: - original = file.read() - stripped = Regex.MULTI_LINE_COMMENT.sub('', original) - stripped = Regex.SINGLE_LINE_COMMENT.sub('', stripped) - stripped = Regex.CODE_BLOCK_DASHES.sub('', stripped) - stripped = Regex.CODE_BLOCK_DOTS.sub('', stripped) - stripped = Regex.INTERNAL_IFDEF.sub('', stripped) - nesting_in_modules_check(report, stripped, path) - add_res_section_module_check(report, stripped, path) - - for path in assemblies_found: - with open(path, "r") as file: - original = file.read() - stripped = Regex.MULTI_LINE_COMMENT.sub('', original) - stripped = Regex.SINGLE_LINE_COMMENT.sub('', stripped) - stripped = Regex.CODE_BLOCK_DASHES.sub('', stripped) - stripped = Regex.CODE_BLOCK_DOTS.sub('', stripped) - stripped = Regex.INTERNAL_IFDEF.sub('', stripped) - nesting_in_assemblies_check(report, stripped, path) - add_res_section_assembly_check(report, stripped, path) - - return report diff --git a/PantheonCMD/pcyamlchecks.py b/PantheonCMD/pcyamlchecks.py deleted file mode 100644 index 4df0687..0000000 --- a/PantheonCMD/pcyamlchecks.py +++ /dev/null @@ -1,104 +0,0 @@ -#!/usr/bin/python3 - -import os -import sys -import yaml -from cerberus import Validator, errors -from cerberus.errors import BasicErrorHandler -from pcchecks import Regex, icons_check, toc_check -from pcvalidator import Report -import glob - - -class CustomErrorHandler(BasicErrorHandler): - """Custom error messages.""" - messages = errors.BasicErrorHandler.messages.copy() - messages[errors.REQUIRED_FIELD.code] = "key is missing" - messages[errors.UNKNOWN_FIELD.code] = "unsupported key" - messages[errors.NOT_NULLABLE.code] = "value can't be empty" - - -def get_yaml_size(yaml_file): - """Test if pv2.yml is empty.""" - if os.path.getsize(yaml_file) == 0: - print("\nYour pantheon2.yml file is empty; exiting...") - sys.exit(2) - - -def load_doc(yaml_file): - """Load pv2.yml and test for syntax errors.""" - with open(yaml_file, 'r') as file: - try: - return yaml.safe_load(file) - except yaml.YAMLError: - print("There's a syntax error in your pantheon2.yml file. Please fix it and try again.\nTo detect an error try running yaml lint on your pantheo2.yml file.") - sys.exit(2) - - -def get_attribute_file_validation_results(attribute_file): - """Validate attributes file.""" - report = Report() - - for path in attribute_file: - with open(path, 'r') as file: - original = file.read() - stripped = Regex.MULTI_LINE_COMMENT.sub('', original) - stripped = Regex.SINGLE_LINE_COMMENT.sub('', stripped) - - icons_check(report, stripped, path) - toc_check(report, stripped, path) - - return report - - -def get_yaml_errors(yaml_schema, yaml_doc): - # load validator with custom error handler - v = Validator(yaml_schema, error_handler=CustomErrorHandler()) - # validate the pv2.yml with schema - v.validate(yaml_doc, yaml_schema) - - if v.errors: - print("FAIL: there is an error in your yaml file:") - for key in v.errors.keys(): - print("\n\t'{}' {}".format(key, ', '.join(str(item) for item in v.errors[key]))) - sys.exit(2) - - else: - - path_does_not_exist = [] - path_exists = [] - - for item in yaml_doc['resources']: - path_to_images_dir = os.path.split(item)[0] - if not glob.glob(path_to_images_dir): - path_does_not_exist.append(item) - - for variant in yaml_doc['variants']: - if not os.path.exists(variant['path']): - path_does_not_exist.append(variant['path']) - else: - path_exists.append(variant['path']) - - if path_does_not_exist: - print('FAIL: Your pantheon2.yml contains the following files or directories that do not exist in your repository:\n') - for path in path_does_not_exist: - print('\t', path) - sys.exit(2) - else: - attribute_file_validation = get_attribute_file_validation_results(path_exists) - if attribute_file_validation.count != 0: - print("Your attributes file has the following errors:\n") - attribute_file_validation.print_report() - - -def yaml_validation(yaml_file): - """Validate pv2.yml; get path to attributes while we're at it.""" - # define path to script - path_to_script = os.path.dirname(os.path.realpath(__file__)) - # load schema - schema = eval(open(path_to_script + '/schema.py', 'r').read()) - # load pv2.yml - loaded_yaml = load_doc(yaml_file) - - get_yaml_size(yaml_file) - get_yaml_errors(schema, loaded_yaml) diff --git a/PantheonCMD/pv2yml-generator.sh b/PantheonCMD/pv2yml-generator.sh deleted file mode 100644 index 0b94021..0000000 --- a/PantheonCMD/pv2yml-generator.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash - -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" - -# a simple yay/nay prompt for users -while true; do - read -p "Are you at the root of your repository where you want to create a pantheo2.yml? [y/N] " yn - case $yn in - [Yy]*) - read -p "Do you want to create the pantheon2.yml in $PWD directory? [y/N] " yn - case $yn in - [Yy]*) - if [ ! -f "pantheon2.yml" ]; then - sh $SCRIPT_DIR/generate-pv2-yml.sh > pantheon2.yml - echo "pantheon2.yml succsessfully generated"; - else - read -p "pantheon2.yml already exists. Do you want to overwrite it? [y/N] " yn - case $yn in - [Yy]*) - sh $SCRIPT_DIR/generate-pv2-yml.sh > pantheon2.yml && echo "pantheon2.yml succsessfully generated"; exit;; - [Nn]*) - echo "exiting..."; exit;; - *) - echo "Please answer yes or no.";; - esac - fi - exit;; - [Nn]*) - echo "exiting..."; exit;; - *) - echo "Please answer yes or no.";; - esac;; - [Nn]*) - echo "Please navigate to the root of the repository where you want to create the pantheo2.yml."; exit;; - *) - echo "Please answer yes or no.";; - esac -done diff --git a/PantheonCMD/schema.py b/PantheonCMD/schema.py index 2ac401a..61a72a6 100644 --- a/PantheonCMD/schema.py +++ b/PantheonCMD/schema.py @@ -1,8 +1,4 @@ { - 'server': { - 'required': True, - 'type': 'string' - }, 'repository': { 'required': True, 'type': 'string' @@ -17,27 +13,33 @@ 'required': True, 'type': 'string' }, - 'path': { + 'attributes': { 'required': True, + 'type': 'list' + }, + 'nav': { + 'required': False, 'type': 'string' }, - 'canonical': { + 'build': { 'required': True, 'allowed': [True, False] + }, + 'files': { + 'required': True, + 'type': 'dict', + 'schema': { + 'included': { + 'required': True, + 'type': 'list' + }, + 'excluded': { + 'required': False, + 'type': 'list' + } + } } } } - }, - 'assemblies': { - 'required': True, - 'type': 'list' - }, - 'modules': { - 'required': True, - 'type': 'list' - }, - 'resources': { - 'required': True, - 'type': 'list' } }