Skip to content

Commit

Permalink
Lighthouse based Tests - use lighthouse plugin inside sitespeed
Browse files Browse the repository at this point in the history
  • Loading branch information
7h3Rabbit committed Jan 10, 2025
1 parent 2c06f06 commit a1d5c56
Show file tree
Hide file tree
Showing 3 changed files with 67 additions and 74 deletions.
6 changes: 4 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,14 @@
"license": "MIT",
"homepage": "https://github.com/Webperf-se/webperf_core/",
"dependencies": {
"lighthouse": "12.3.0",
"pa11y": "8.0.0",
"sitespeed.io": "35.7.5",
"stylelint": "16.12.0",
"vnu-jar": "23.4.11",
"yellowlabtools": "3.0.1"
"yellowlabtools": "3.0.1",
"@sitespeed.io/plugin-lighthouse": "12.1.0",
"@sitespeed.io/plugin": "0.0.6",
"webperf-sitespeedio-plugin": "2025.1.2"
},
"engines": {
"node": "20.x"
Expand Down
113 changes: 44 additions & 69 deletions tests/lighthouse_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from datetime import datetime, timedelta
import subprocess
from helpers.models import Rating
from tests.sitespeed_base import get_result
from tests.utils import is_file_older_than,\
get_cache_path_for_rule,\
get_translation
Expand Down Expand Up @@ -64,8 +65,7 @@ def run_test(url, strategy, category, silance, lighthouse_translations):
json_content = get_json_result(
lang_code,
url,
strategy,
category
strategy
)

return_dict = {}
Expand Down Expand Up @@ -459,50 +459,48 @@ def get_json_result_using_caching(lang_code, url, strategy):
Returns:
dict: The JSON result of the audit, either from the cache or a new audit.
"""
cache_key_rule = 'lighthouse-{0}'
cache_path = get_cache_path_for_rule(url, cache_key_rule)

if not os.path.exists(cache_path):
os.makedirs(cache_path)

result_file = os.path.join(cache_path, 'result.json')
command = (
f"node node_modules{os.path.sep}lighthouse{os.path.sep}cli{os.path.sep}index.js"
f" --output json --output-path {result_file} --locale {lang_code}"
f" --form-factor {strategy} --chrome-flags=\"--headless\" --quiet")

artifacts_file = os.path.join(cache_path, 'artifacts.json')
if os.path.exists(result_file) and \
not is_file_older_than(result_file, timedelta(minutes=get_config('general.cache.max-age'))):

file_created_timestamp = os.path.getctime(result_file)
file_created_date = time.ctime(file_created_timestamp)
print((f'Cached entry found from {file_created_date},'
' using it instead of calling website again.'))
with open(result_file, 'r', encoding='utf-8', newline='') as file:
return str_to_json('\n'.join(file.readlines()), url)
elif os.path.exists(artifacts_file) and \
not is_file_older_than(
artifacts_file,
timedelta(minutes=get_config('general.cache.max-age'))):

file_created_timestamp = os.path.getctime(artifacts_file)
file_created_date = time.ctime(file_created_timestamp)
print((
f'Cached entry found from {file_created_date},'
' using it instead of calling website again.'))
command += f" -A={cache_path}"
else:
command += f" -GA={cache_path} {url}"

with subprocess.Popen(command.split(), stdout=subprocess.PIPE) as process:
_, _ = process.communicate(timeout=get_config('general.request.timeout') * 10)
with open(result_file, 'r', encoding='utf-8', newline='') as file:
return str_to_json('\n'.join(file.readlines()), url)



def get_json_result(lang_code, url, strategy, category):
# TODO: re add lang code logic
# TODO: re add strategy logic

# We don't need extra iterations for what we are using it for
sitespeed_iterations = 1
sitespeed_arg = (
'--shm-size=1g -b chrome '
'--plugins.remove screenshot --plugins.remove html --plugins.remove metrics '
'--browsertime.screenshot false --screenshot false --screenshotLCP false '
'--browsertime.screenshotLCP false --chrome.cdp.performance false '
'--browsertime.chrome.timeline false --videoParams.createFilmstrip false '
'--visualMetrics false --visualMetricsPerceptual false '
'--visualMetricsContentful false --browsertime.headless true '
'--browsertime.chrome.includeResponseBodies all --utc true '
'--browsertime.chrome.args ignore-certificate-errors '
f'-n {sitespeed_iterations}')
if get_config('tests.sitespeed.xvfb'):
sitespeed_arg += ' --xvfb'
(_, filename) = get_result(
url,
get_config('tests.sitespeed.docker.use'),
sitespeed_arg,
get_config('tests.sitespeed.timeout'))

# TODO: should we add logic to run lighthouse with different url if file doesn't exist?
if not os.path.exists(filename):
return {}

result_file = filename.replace('.har', '-lighthouse-lhr.json')
if not os.path.exists(result_file):
# TODO: should we add logic to run lighthouse with different url if file doesn't exist?
return {}

if is_file_older_than(result_file, timedelta(minutes=get_config('general.cache.max-age'))):
return {}

with open(result_file, 'r', encoding='utf-8', newline='') as file:
return str_to_json('\n'.join(file.readlines()), url)


def get_json_result(lang_code, url, strategy):
"""
Retrieves the JSON result of a Lighthouse audit for a specific URL.
This function uses either the Google Pagespeed API or
Expand All @@ -516,33 +514,10 @@ def get_json_result(lang_code, url, strategy, category):
url (str): The URL to audit.
strategy (str):
The form factor to use for the audit (e.g., 'mobile' or 'desktop').
category (str):
The category of audits to perform (e.g., 'performance' or 'accessibility').
Returns:
dict: The JSON result of the audit.
"""
json_content = {}
check_url = url.strip()

if get_config('general.cache.use'):
return get_json_result_using_caching(lang_code, check_url, strategy)

command = (
f"node node_modules{os.path.sep}lighthouse{os.path.sep}cli{os.path.sep}index.js"
f" {check_url} --output json --output-path stdout --locale {lang_code}"
f" --only-categories {category} --form-factor {strategy}"
" --chrome-flags=\"--headless\" --quiet")

if get_config('tests.lighthouse.disable-sandbox'):
command += (
" --chrome-flags=\"--no-sandbox\""
)


with subprocess.Popen(command.split(), stdout=subprocess.PIPE) as process:
output, _ = process.communicate(timeout=get_config('general.request.timeout') * 10)
get_content = output
json_content = str_to_json(get_content, check_url)

return json_content
return get_json_result_using_caching(lang_code, check_url, strategy)
22 changes: 19 additions & 3 deletions tests/sitespeed_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,10 @@ def get_result(url, sitespeed_use_docker, sitespeed_arg, timeout):

result_folder_name = os.path.join(folder, hostname, f'{str(uuid.uuid4())}')

sitespeed_arg += (' --postScript chrome-cookies.cjs --postScript chrome-versions.cjs '
f'--outputFolder {result_folder_name} {url}')
sitespeed_arg += (' --postScript chrome-cookies.cjs --postScript chrome-versions.cjs'
f' --outputFolder {result_folder_name}'
f' --plugins.add ../../../@sitespeed.io/plugin-lighthouse/index.js'
f' --plugins.add ../../../webperf-sitespeedio-plugin/index.js --sustainable.enable=true {url}')

filename = ''
# Should we use cache when available?
Expand Down Expand Up @@ -193,7 +195,21 @@ def cleanup_results_dir(browsertime_path, path):
path (str): The path to the directory to be removed.
"""
correct_path = f'{path}.har'
os.rename(browsertime_path, correct_path)
coach_path = browsertime_path.replace('browsertime.har', 'coach.json')
correct_coach_path = f'{path}-coach.json'
sustainable_path = browsertime_path.replace('browsertime.har', 'sustainable.json')
correct_sustainable_path = f'{path}-sustainable.json'
lighthouse_path = browsertime_path.replace('browsertime.har', 'lighthouse-lhr.json')
correct_lighthouse_path = f'{path}-lighthouse-lhr.json'

if os.path.exists(browsertime_path):
os.rename(browsertime_path, correct_path)
if os.path.exists(coach_path):
os.rename(coach_path, correct_coach_path)
if os.path.exists(sustainable_path):
os.rename(sustainable_path, correct_sustainable_path)
if os.path.exists(lighthouse_path):
os.rename(lighthouse_path, correct_lighthouse_path)
shutil.rmtree(path)

def get_result_using_no_cache(sitespeed_use_docker, arg, timeout):
Expand Down

0 comments on commit a1d5c56

Please sign in to comment.