diff --git a/analyzer.py b/analyzer.py index 81e51b186..eccb8d78d 100644 --- a/analyzer.py +++ b/analyzer.py @@ -29,7 +29,10 @@ def slither_analyzer(output:str) -> dict[list[Finding]]: if not re.match(r"[0-9]{40}", address): address = path findings.append(Finding(address, fname, ",".join([str(l) for l in element['source_mapping']['lines']]))) - result[detector_result['check']] = findings + if detector_result['check'] in result: + result[detector_result['check']] += findings + else: + result[detector_result['check']] = findings elif not 'results' in output: raise SlitherOutError('no results') return result diff --git a/runner.py b/runner.py index 2a64ff139..ead1aa9e9 100644 --- a/runner.py +++ b/runner.py @@ -14,6 +14,9 @@ from storage import Storage from config import LOGGING_LEVEL +CONTRACT_STAT_TYPE_NAME = 'by_contract' +FINDING_STAT_TYPE_NAME = 'by_finding' + def process_file(contract: Contract, use_slither: bool = False) -> tuple[Contract, dict[str, list]]: """Run subproccess contract processing Args: @@ -77,7 +80,7 @@ def main(output, extra_output, input, skip_duplicates, skip_libs, new_contracts, detectors = DETECTORS # Use multiprocessing Pool to run slitherin in parallel logger.info("starting pool on %d cores contract", pool) - detector_statistics = Counter() + detector_statistics = { CONTRACT_STAT_TYPE_NAME: Counter(), FINDING_STAT_TYPE_NAME: Counter() } start_time = time.time() storage = Storage() with Pool(pool) as pool: @@ -95,25 +98,34 @@ def main(output, extra_output, input, skip_duplicates, skip_libs, new_contracts, f_extra.write(f"{finding.address};{finding.filename};{detector};\"{finding.lines}\"\n") if count_files: increment = len(files_counter) - detector_statistics[detector] += increment - if not count_files: - detector_statistics['total'] += 1 - else: - detector_statistics['total'] += count_sol_files(contract.filename) - + if not count_files: + detector_statistics[CONTRACT_STAT_TYPE_NAME][detector] += increment + count_findings = len(findings) + if count_findings > 0: + detector_statistics[FINDING_STAT_TYPE_NAME][detector] += count_findings + sol_files = count_sol_files(contract.filename) + for stat_type in detector_statistics: + if count_files and stat_type == CONTRACT_STAT_TYPE_NAME: + continue + detector_statistics[stat_type]['files'] += sol_files + detector_statistics[stat_type]['contracts'] += 1 + for detector in contract.detectors: storage.set_contract_checked(contract.address, contract.chain_id, detector) if timeout is not None and time.time() - start_time > timeout: logger.info("timeout stop, processed %d tasks", detector_statistics['total']) break + for stat_type in list(detector_statistics.keys()): + if len(detector_statistics[stat_type]) == 0: + del detector_statistics[stat_type] logger.info("completed pool in %s", str(timedelta(seconds=round(time.time()-start_time)))) df = pd.DataFrame.from_dict(detector_statistics, orient='index') print(df.to_markdown()) if output is not None: logger.info("Save stats to file %s", output) with open(output, 'w') as f: - f.write(df.to_csv(sep=';')) + f.write(df.to_csv(sep=';', float_format='%.0f')) if __name__ == "__main__": main() diff --git a/save_sheet.py b/save_sheet.py index 4d3ee90fa..1ddc550d9 100644 --- a/save_sheet.py +++ b/save_sheet.py @@ -2,8 +2,10 @@ import time import click import subprocess +import logging from sheet import Sheet +from config import LOGGING_LEVEL DETECTOR_COL_NUM = 2 @@ -15,13 +17,10 @@ def get_slitherin_version(): except Exception as e: return '' -@click.command() -@click.option('-i', '--input', help="file with benchmark results", required=True) -@click.option('-sa', '--service-account', help="google service account json file", required=True) -@click.option('-si', '--sheet-id', help="google sheet id", required=True) -@click.option('-ln', '--list-name', help="google list name", required=True) -@click.option('-sv', '--slitherin-version', help="slitherin version, default value taken from slitherin --version command", required=False, default=get_slitherin_version()) -def main(input, service_account, sheet_id, list_name, slitherin_version): +def save_sheet(results, service_account, sheet_id, list_name, slitherin_version): + logger = logging.getLogger() + logger.info("save results for %d detectors to list %s", len(results), list_name) + print(results) sheet = Sheet(service_account, sheet_id, list_name) values = sheet.get_rows("A1:ZZZ") detector_names = values[0][DETECTOR_COL_NUM:] @@ -29,15 +28,11 @@ def main(input, service_account, sheet_id, list_name, slitherin_version): detector_col_by_name = {detector_names[i]:(i+DETECTOR_COL_NUM) for i in range(0, len(detector_names))} new_row = [round(time.time()), slitherin_version] + ['']*len(detector_names) new_columns = [] - with open(input, 'r') as f: - for line in f: - detector_name, c = line.split(';') - if detector_name == '': - continue - if detector_name in detector_col_by_name: - new_row[detector_col_by_name[detector_name]] = c - else: - new_columns.append([detector_name]+['']*(len(values)-1)+[c]) + for detector_name in results: + if detector_name in detector_col_by_name: + new_row[detector_col_by_name[detector_name]] = results[detector_name] + else: + new_columns.append([detector_name]+['']*(len(values)-1)+[results[detector_name]]) response = sheet.add_row(new_row) last_column_num = DETECTOR_COL_NUM + len(detector_names) - 1 @@ -45,6 +40,32 @@ def main(input, service_account, sheet_id, list_name, slitherin_version): last_column_num += 1 sheet.add_column(last_column_num, column) +@click.command() +@click.option('-i', '--input', help="file with benchmark results", required=True) +@click.option('-sa', '--service-account', help="google service account json file", required=True) +@click.option('-si', '--sheet-id', help="google sheet id", required=True) +@click.option('-ln', '--list-name', help="google list name", required=True) +@click.option('-sv', '--slitherin-version', help="slitherin version, default value taken from slitherin --version command", required=False, default=get_slitherin_version()) +def main(input, service_account, sheet_id, list_name, slitherin_version): + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter("%(levelname)s: %(asctime)s - %(process)s - %(message)s")) + + logger = logging.getLogger() + logger.setLevel(LOGGING_LEVEL) + logger.addHandler(handler) + with open(input, 'r') as f: + detector_results = {} + for line in f: + line_list = line.strip().split(';') + stat_type, c = line_list[0], line_list[1:] + + if stat_type == '': + detector_names = c + continue + detector_results[stat_type] = dict(zip(detector_names, c)) + for stat_type in detector_results: + save_sheet(detector_results[stat_type], service_account, sheet_id, f"{list_name}_{stat_type}", slitherin_version) + if __name__ == "__main__": main()