diff --git a/package.json b/package.json index 8c1bac25..4eb4a045 100644 --- a/package.json +++ b/package.json @@ -10,12 +10,14 @@ "license": "MIT", "homepage": "https://github.com/Webperf-se/webperf_core/", "dependencies": { - "lighthouse": "12.3.0", "pa11y": "8.0.0", "sitespeed.io": "35.7.5", "stylelint": "16.12.0", "vnu-jar": "23.4.11", - "yellowlabtools": "3.0.1" + "yellowlabtools": "3.0.1", + "@sitespeed.io/plugin-lighthouse": "12.1.0", + "@sitespeed.io/plugin": "0.0.6", + "webperf-sitespeedio-plugin": "2025.1.2" }, "engines": { "node": "20.x" diff --git a/tests/lighthouse_base.py b/tests/lighthouse_base.py index 02f33043..e6f38619 100644 --- a/tests/lighthouse_base.py +++ b/tests/lighthouse_base.py @@ -5,6 +5,7 @@ from datetime import datetime, timedelta import subprocess from helpers.models import Rating +from tests.sitespeed_base import get_result from tests.utils import is_file_older_than,\ get_cache_path_for_rule,\ get_translation @@ -64,8 +65,7 @@ def run_test(url, strategy, category, silance, lighthouse_translations): json_content = get_json_result( lang_code, url, - strategy, - category + strategy ) return_dict = {} @@ -459,50 +459,48 @@ def get_json_result_using_caching(lang_code, url, strategy): Returns: dict: The JSON result of the audit, either from the cache or a new audit. """ - cache_key_rule = 'lighthouse-{0}' - cache_path = get_cache_path_for_rule(url, cache_key_rule) - - if not os.path.exists(cache_path): - os.makedirs(cache_path) - - result_file = os.path.join(cache_path, 'result.json') - command = ( - f"node node_modules{os.path.sep}lighthouse{os.path.sep}cli{os.path.sep}index.js" - f" --output json --output-path {result_file} --locale {lang_code}" - f" --form-factor {strategy} --chrome-flags=\"--headless\" --quiet") - - artifacts_file = os.path.join(cache_path, 'artifacts.json') - if os.path.exists(result_file) and \ - not is_file_older_than(result_file, timedelta(minutes=get_config('general.cache.max-age'))): - - file_created_timestamp = os.path.getctime(result_file) - file_created_date = time.ctime(file_created_timestamp) - print((f'Cached entry found from {file_created_date},' - ' using it instead of calling website again.')) - with open(result_file, 'r', encoding='utf-8', newline='') as file: - return str_to_json('\n'.join(file.readlines()), url) - elif os.path.exists(artifacts_file) and \ - not is_file_older_than( - artifacts_file, - timedelta(minutes=get_config('general.cache.max-age'))): - - file_created_timestamp = os.path.getctime(artifacts_file) - file_created_date = time.ctime(file_created_timestamp) - print(( - f'Cached entry found from {file_created_date},' - ' using it instead of calling website again.')) - command += f" -A={cache_path}" - else: - command += f" -GA={cache_path} {url}" - - with subprocess.Popen(command.split(), stdout=subprocess.PIPE) as process: - _, _ = process.communicate(timeout=get_config('general.request.timeout') * 10) - with open(result_file, 'r', encoding='utf-8', newline='') as file: - return str_to_json('\n'.join(file.readlines()), url) - - -def get_json_result(lang_code, url, strategy, category): + # TODO: re add lang code logic + # TODO: re add strategy logic + + # We don't need extra iterations for what we are using it for + sitespeed_iterations = 1 + sitespeed_arg = ( + '--shm-size=1g -b chrome ' + '--plugins.remove screenshot --plugins.remove html --plugins.remove metrics ' + '--browsertime.screenshot false --screenshot false --screenshotLCP false ' + '--browsertime.screenshotLCP false --chrome.cdp.performance false ' + '--browsertime.chrome.timeline false --videoParams.createFilmstrip false ' + '--visualMetrics false --visualMetricsPerceptual false ' + '--visualMetricsContentful false --browsertime.headless true ' + '--browsertime.chrome.includeResponseBodies all --utc true ' + '--browsertime.chrome.args ignore-certificate-errors ' + f'-n {sitespeed_iterations}') + if get_config('tests.sitespeed.xvfb'): + sitespeed_arg += ' --xvfb' + (_, filename) = get_result( + url, + get_config('tests.sitespeed.docker.use'), + sitespeed_arg, + get_config('tests.sitespeed.timeout')) + + # TODO: should we add logic to run lighthouse with different url if file doesn't exist? + if not os.path.exists(filename): + return {} + + result_file = filename.replace('.har', '-lighthouse-lhr.json') + if not os.path.exists(result_file): + # TODO: should we add logic to run lighthouse with different url if file doesn't exist? + return {} + + if is_file_older_than(result_file, timedelta(minutes=get_config('general.cache.max-age'))): + return {} + + with open(result_file, 'r', encoding='utf-8', newline='') as file: + return str_to_json('\n'.join(file.readlines()), url) + + +def get_json_result(lang_code, url, strategy): """ Retrieves the JSON result of a Lighthouse audit for a specific URL. This function uses either the Google Pagespeed API or @@ -516,33 +514,10 @@ def get_json_result(lang_code, url, strategy, category): url (str): The URL to audit. strategy (str): The form factor to use for the audit (e.g., 'mobile' or 'desktop'). - category (str): - The category of audits to perform (e.g., 'performance' or 'accessibility'). Returns: dict: The JSON result of the audit. """ - json_content = {} check_url = url.strip() - if get_config('general.cache.use'): - return get_json_result_using_caching(lang_code, check_url, strategy) - - command = ( - f"node node_modules{os.path.sep}lighthouse{os.path.sep}cli{os.path.sep}index.js" - f" {check_url} --output json --output-path stdout --locale {lang_code}" - f" --only-categories {category} --form-factor {strategy}" - " --chrome-flags=\"--headless\" --quiet") - - if get_config('tests.lighthouse.disable-sandbox'): - command += ( - " --chrome-flags=\"--no-sandbox\"" - ) - - - with subprocess.Popen(command.split(), stdout=subprocess.PIPE) as process: - output, _ = process.communicate(timeout=get_config('general.request.timeout') * 10) - get_content = output - json_content = str_to_json(get_content, check_url) - - return json_content + return get_json_result_using_caching(lang_code, check_url, strategy) diff --git a/tests/sitespeed_base.py b/tests/sitespeed_base.py index 5f56ed1e..f47cc9d1 100644 --- a/tests/sitespeed_base.py +++ b/tests/sitespeed_base.py @@ -60,8 +60,10 @@ def get_result(url, sitespeed_use_docker, sitespeed_arg, timeout): result_folder_name = os.path.join(folder, hostname, f'{str(uuid.uuid4())}') - sitespeed_arg += (' --postScript chrome-cookies.cjs --postScript chrome-versions.cjs ' - f'--outputFolder {result_folder_name} {url}') + sitespeed_arg += (' --postScript chrome-cookies.cjs --postScript chrome-versions.cjs' + f' --outputFolder {result_folder_name}' + f' --plugins.add ../../../@sitespeed.io/plugin-lighthouse/index.js' + f' --plugins.add ../../../webperf-sitespeedio-plugin/index.js --sustainable.enable=true {url}') filename = '' # Should we use cache when available? @@ -193,7 +195,21 @@ def cleanup_results_dir(browsertime_path, path): path (str): The path to the directory to be removed. """ correct_path = f'{path}.har' - os.rename(browsertime_path, correct_path) + coach_path = browsertime_path.replace('browsertime.har', 'coach.json') + correct_coach_path = f'{path}-coach.json' + sustainable_path = browsertime_path.replace('browsertime.har', 'sustainable.json') + correct_sustainable_path = f'{path}-sustainable.json' + lighthouse_path = browsertime_path.replace('browsertime.har', 'lighthouse-lhr.json') + correct_lighthouse_path = f'{path}-lighthouse-lhr.json' + + if os.path.exists(browsertime_path): + os.rename(browsertime_path, correct_path) + if os.path.exists(coach_path): + os.rename(coach_path, correct_coach_path) + if os.path.exists(sustainable_path): + os.rename(sustainable_path, correct_sustainable_path) + if os.path.exists(lighthouse_path): + os.rename(lighthouse_path, correct_lighthouse_path) shutil.rmtree(path) def get_result_using_no_cache(sitespeed_use_docker, arg, timeout):