diff --git a/requirements-dev.txt b/requirements-dev.txt index 4910009..dd0343f 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,7 @@ +hypothesis==4.31.0 black==19.3b0 coveralls pytest==4.4.1 pytest-cov==2.7.1 responses==0.5.1 + diff --git a/swaggercheck/__main__.py b/swaggercheck/__main__.py index edd1e69..b4386fd 100644 --- a/swaggercheck/__main__.py +++ b/swaggercheck/__main__.py @@ -31,6 +31,14 @@ def main(): help="continue on error", ) + parser.add_argument( + "-v", + "--verbose", + dest="get_report", + action="store_true", + help="get a report after the execution", + ) + parser.add_argument( "-u", "--username", help="username (implies 'basic' auth)" ) @@ -57,6 +65,9 @@ def main(): "cont_on_err": ( parsed_args.cont_on_err or environ.get("SC_CONTINUE_ON_ERROR") ), + "get_report": ( + parsed_args.get_report or environ.get("SC_GET_REPORT") + ), "username": parsed_args.username or environ.get("SC_BASIC_USERNAME"), "password": parsed_args.password or environ.get("SC_BASIC_PASSWORD"), "token": parsed_args.token or environ.get("SC_API_TOKEN"), diff --git a/swaggercheck/_basictests.py b/swaggercheck/_basictests.py index 105710a..a6852cb 100644 --- a/swaggercheck/_basictests.py +++ b/swaggercheck/_basictests.py @@ -14,6 +14,7 @@ def api_conformance_test( schema_path, num_tests_per_op=20, cont_on_err=True, + get_report=True, username=None, password=None, token=None, @@ -22,6 +23,7 @@ def api_conformance_test( ): init() + log_filename = "log.txt" print(Fore.BLUE + "Connecting to {}".format(schema_path) + Style.RESET_ALL) @@ -48,9 +50,19 @@ def api_conformance_test( Fore.BLUE + "Swagger client... " + Fore.GREEN + " ok" + Style.RESET_ALL ) + method = " basic" + if username is not None and password is not None: + method = " authenticated" + + print( + Fore.BLUE + "Authentication method : " + Fore.GREEN + method + Style.RESET_ALL + ) + fd, watchdog_filename = tempfile.mkstemp() os.close(fd) os.remove(watchdog_filename) + if os.path.isfile(log_filename): + os.remove(log_filename) for operation in client.api.operations(): try: @@ -60,6 +72,8 @@ def api_conformance_test( num_tests_per_op, cont_on_err, watchdog_filename, + get_report, + log_filename, ) except ValueError as exc: print( @@ -70,9 +84,68 @@ def api_conformance_test( ) sys.exit(1) + if get_report: + with open(log_filename, "r") as myfile: + dic = {} + nb_error = 0 + first = True + for line in myfile: + line = line.split(" ") + line[-1] = line[-1].split("\n")[0] + if line[0] == "test": + if not first: + print_report(dic, nb_error) + dic = {} + nb_error = 0 + + print( + Fore.BLUE + + "\n[" + + Fore.YELLOW + + line[1] + + Fore.BLUE + + "] " + + Fore.CYAN + + line[2] + + Style.RESET_ALL + ) + first = False + + elif line[0] == "ok": + if line[1] not in dic: + dic[line[1]] = 1 + else: + dic[line[1]] += 1 + + elif line[0] == "fail": + dic[nb_error] = line[1] + "\t" + line[2] + + print_report(dic, nb_error) + + +def print_report(dic, nb_error): + for k, v in dic.items(): + if isinstance(k, str): + print( + "[ SUCCESS " + + Fore.MAGENTA + + "Code: {0} \ttests : {1}".format(k, v) + + Style.RESET_ALL + + " ] " + ) + for i in range(0, nb_error): + tmp = dic[i].split("\t") + print( + "[ FAIL " + + Fore.RED + + "\n\tResponse code {} not in documented codes: {}".format(tmp[0], tmp[1]) + + Style.RESET_ALL + + " ] " + ) + def operation_conformance_test( - client, operation, num_tests, cont_on_err, watchdog_filename + client, operation, num_tests, cont_on_err, watchdog_filename, get_report, log_filename ): success = "\t[" + Fore.GREEN + " ok " + Style.RESET_ALL + "] " failed = "\t[" + Fore.RED + " fail " + Style.RESET_ALL + "] " @@ -90,6 +163,10 @@ def operation_conformance_test( + Style.RESET_ALL ) + if get_report: + with open(log_filename, "a+") as myfile: + myfile.write("test [" + str(operation.method) + "] " + operation.path + "\n") + for name, op in operation._parameters.items(): if not op.type: url = "https://github.com/adimian/swagger-check/labels/types%20support" @@ -109,9 +186,8 @@ def operation_conformance_test( ) @hypothesis.given(strategy) def single_operation_test( - client, operation, cont_on_err, watchdog_filename, params + client, operation, cont_on_err, get_report, log_filename, watchdog_filename, params ): - root = "Testing with params: {}".format(params) + Style.RESET_ALL result = client.request(operation, params) @@ -125,6 +201,9 @@ def single_operation_test( if result.status in operation.response_codes: print(success + status_code + root) + if get_report: + with open(log_filename, "a+") as myfile: + myfile.write("ok " + str(result.status) + "\n") else: outcome = ( Fore.RED @@ -134,6 +213,10 @@ def single_operation_test( + Style.RESET_ALL ) print(failed + status_code + root + outcome) + if get_report: + with open(log_filename, "a+") as myfile: + myfile.write("failed " + str(result.status) + " " + str(operation.response_codes) + "\n") + if not cont_on_err: # we use a file as a signal between inside and outside of # hypothesis since otherwise we'd see hypothesis extended help @@ -141,7 +224,7 @@ def single_operation_test( with open(watchdog_filename, "w"): pass - single_operation_test(client, operation, cont_on_err, watchdog_filename) + single_operation_test(client, operation, cont_on_err, get_report, log_filename, watchdog_filename) if os.path.isfile(watchdog_filename): print(Fore.RED + "Stopping after first failure" + Style.RESET_ALL)